commit
stringlengths 40
40
| old_file
stringlengths 4
234
| new_file
stringlengths 4
234
| old_contents
stringlengths 10
3.01k
| new_contents
stringlengths 19
3.38k
| subject
stringlengths 16
736
| message
stringlengths 17
2.63k
| lang
stringclasses 4
values | license
stringclasses 13
values | repos
stringlengths 5
82.6k
| config
stringclasses 4
values | content
stringlengths 134
4.41k
| fuzzy_diff
stringlengths 29
3.44k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
63f7489066aeb23dbefc6f8de534ad05144431ad
|
boardinghouse/tests/test_sql.py
|
boardinghouse/tests/test_sql.py
|
from django.conf import settings
from django.test import TestCase
from django.db.models import connection
from boardinghouse.models import Schema
class TestRejectSchemaColumnChange(TestCase):
def test_exception_is_raised(self):
Schema.objects.mass_create('a')
cursor = connection.cursor()
UPDATE = "UPDATE boardinghouse_schema SET schema='foo' WHERE schema='a'"
self.assertRaises(Exception, cursor.execute, UPDATE)
|
from django.conf import settings
from django.test import TestCase
from django.db import connection
from boardinghouse.models import Schema
class TestRejectSchemaColumnChange(TestCase):
def test_exception_is_raised(self):
Schema.objects.mass_create('a')
cursor = connection.cursor()
UPDATE = "UPDATE boardinghouse_schema SET schema='foo' WHERE schema='a'"
self.assertRaises(Exception, cursor.execute, UPDATE)
|
Make test work with 1.7
|
Make test work with 1.7
|
Python
|
bsd-3-clause
|
luzfcb/django-boardinghouse,luzfcb/django-boardinghouse,luzfcb/django-boardinghouse
|
python
|
## Code Before:
from django.conf import settings
from django.test import TestCase
from django.db.models import connection
from boardinghouse.models import Schema
class TestRejectSchemaColumnChange(TestCase):
def test_exception_is_raised(self):
Schema.objects.mass_create('a')
cursor = connection.cursor()
UPDATE = "UPDATE boardinghouse_schema SET schema='foo' WHERE schema='a'"
self.assertRaises(Exception, cursor.execute, UPDATE)
## Instruction:
Make test work with 1.7
## Code After:
from django.conf import settings
from django.test import TestCase
from django.db import connection
from boardinghouse.models import Schema
class TestRejectSchemaColumnChange(TestCase):
def test_exception_is_raised(self):
Schema.objects.mass_create('a')
cursor = connection.cursor()
UPDATE = "UPDATE boardinghouse_schema SET schema='foo' WHERE schema='a'"
self.assertRaises(Exception, cursor.execute, UPDATE)
|
...
from django.conf import settings
from django.test import TestCase
from django.db import connection
from boardinghouse.models import Schema
...
Schema.objects.mass_create('a')
cursor = connection.cursor()
UPDATE = "UPDATE boardinghouse_schema SET schema='foo' WHERE schema='a'"
self.assertRaises(Exception, cursor.execute, UPDATE)
...
|
7bcd0f1a731b9808cecf8a4f0e67696e0eb319ea
|
src/main/java/se/kits/gakusei/content/model/Lesson.java
|
src/main/java/se/kits/gakusei/content/model/Lesson.java
|
package se.kits.gakusei.content.model;
import com.fasterxml.jackson.annotation.JsonManagedReference;
import javax.persistence.*;
import java.io.Serializable;
import java.util.List;
@Entity
@Table(name = "lessons", schema = "contentschema")
public class Lesson implements Serializable {
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
private Long id;
@Column(nullable = false)
private String name;
private String description;
@ManyToMany
@JsonManagedReference
@JoinTable(
name = "lessons_nuggets",
joinColumns = @JoinColumn(name = "lesson_id", referencedColumnName = "id"),
inverseJoinColumns = @JoinColumn(name = "nugget_id", referencedColumnName = "id"))
private List<Nugget> nuggets;
public Lesson() {
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public List<Nugget> getNuggets() {
return nuggets;
}
public void setNuggets(List<Nugget> nuggets) {
this.nuggets = nuggets;
}
}
|
package se.kits.gakusei.content.model;
import com.fasterxml.jackson.annotation.JsonManagedReference;
import javax.persistence.*;
import java.io.Serializable;
import java.util.List;
@Entity
@Table(name = "lessons", schema = "contentschema")
public class Lesson implements Serializable {
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
private Long id;
@Column(nullable = false)
private String name;
private String description;
@ManyToMany
@JsonManagedReference
@JoinTable(
name = "lessons_nuggets",
schema = "contentschema",
joinColumns = @JoinColumn(name = "lesson_id", referencedColumnName = "id"),
inverseJoinColumns = @JoinColumn(name = "nugget_id", referencedColumnName = "id"))
private List<Nugget> nuggets;
public Lesson() {
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public List<Nugget> getNuggets() {
return nuggets;
}
public void setNuggets(List<Nugget> nuggets) {
this.nuggets = nuggets;
}
}
|
Add missing schema declaration to lesson/nugget relation
|
Add missing schema declaration to lesson/nugget relation
|
Java
|
mit
|
kits-ab/gakusei,kits-ab/gakusei,kits-ab/gakusei
|
java
|
## Code Before:
package se.kits.gakusei.content.model;
import com.fasterxml.jackson.annotation.JsonManagedReference;
import javax.persistence.*;
import java.io.Serializable;
import java.util.List;
@Entity
@Table(name = "lessons", schema = "contentschema")
public class Lesson implements Serializable {
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
private Long id;
@Column(nullable = false)
private String name;
private String description;
@ManyToMany
@JsonManagedReference
@JoinTable(
name = "lessons_nuggets",
joinColumns = @JoinColumn(name = "lesson_id", referencedColumnName = "id"),
inverseJoinColumns = @JoinColumn(name = "nugget_id", referencedColumnName = "id"))
private List<Nugget> nuggets;
public Lesson() {
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public List<Nugget> getNuggets() {
return nuggets;
}
public void setNuggets(List<Nugget> nuggets) {
this.nuggets = nuggets;
}
}
## Instruction:
Add missing schema declaration to lesson/nugget relation
## Code After:
package se.kits.gakusei.content.model;
import com.fasterxml.jackson.annotation.JsonManagedReference;
import javax.persistence.*;
import java.io.Serializable;
import java.util.List;
@Entity
@Table(name = "lessons", schema = "contentschema")
public class Lesson implements Serializable {
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
private Long id;
@Column(nullable = false)
private String name;
private String description;
@ManyToMany
@JsonManagedReference
@JoinTable(
name = "lessons_nuggets",
schema = "contentschema",
joinColumns = @JoinColumn(name = "lesson_id", referencedColumnName = "id"),
inverseJoinColumns = @JoinColumn(name = "nugget_id", referencedColumnName = "id"))
private List<Nugget> nuggets;
public Lesson() {
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public List<Nugget> getNuggets() {
return nuggets;
}
public void setNuggets(List<Nugget> nuggets) {
this.nuggets = nuggets;
}
}
|
// ... existing code ...
@JsonManagedReference
@JoinTable(
name = "lessons_nuggets",
schema = "contentschema",
joinColumns = @JoinColumn(name = "lesson_id", referencedColumnName = "id"),
inverseJoinColumns = @JoinColumn(name = "nugget_id", referencedColumnName = "id"))
private List<Nugget> nuggets;
// ... rest of the code ...
|
745568d54b705cf767142911556c7d87a0397919
|
lfs/shipping/migrations/0002_auto_20170216_0739.py
|
lfs/shipping/migrations/0002_auto_20170216_0739.py
|
from __future__ import unicode_literals
from django.db import migrations
def update_price_calculator(apps, schema_editor):
ShippingMethod = apps.get_model("shipping", "ShippingMethod")
for shipping_method in ShippingMethod.objects.filter(price_calculator="lfs.shipping.NetShippingMethodPriceCalculator"):
shipping_method.price_calculator = "lfs.shipping.calculator.NetPriceCalculator"
shipping_method.save()
for shipping_method in ShippingMethod.objects.filter(price_calculator="lfs.shipping.GrossShippingMethodPriceCalculator"):
shipping_method.price_calculator = "lfs.shipping.calculator.GrossPriceCalculator"
shipping_method.save()
class Migration(migrations.Migration):
dependencies = [
('shipping', '0001_initial'),
]
operations = [
migrations.RunPython(update_price_calculator),
]
|
from __future__ import unicode_literals
from django.db import migrations
def update_price_calculator(apps, schema_editor):
ShippingMethod = apps.get_model("shipping", "ShippingMethod")
for shipping_method in ShippingMethod.objects.filter(price_calculator="lfs.shipping.NetShippingMethodPriceCalculator"):
shipping_method.price_calculator = "lfs.shipping.calculator.NetShippingMethodPriceCalculator"
shipping_method.save()
for shipping_method in ShippingMethod.objects.filter(price_calculator="lfs.shipping.GrossShippingMethodPriceCalculator"):
shipping_method.price_calculator = "lfs.shipping.calculator.GrossShippingMethodPriceCalculator"
shipping_method.save()
class Migration(migrations.Migration):
dependencies = [
('shipping', '0001_initial'),
]
operations = [
migrations.RunPython(update_price_calculator),
]
|
Fix price calculator class names
|
Fix price calculator class names
|
Python
|
bsd-3-clause
|
diefenbach/django-lfs,diefenbach/django-lfs,diefenbach/django-lfs
|
python
|
## Code Before:
from __future__ import unicode_literals
from django.db import migrations
def update_price_calculator(apps, schema_editor):
ShippingMethod = apps.get_model("shipping", "ShippingMethod")
for shipping_method in ShippingMethod.objects.filter(price_calculator="lfs.shipping.NetShippingMethodPriceCalculator"):
shipping_method.price_calculator = "lfs.shipping.calculator.NetPriceCalculator"
shipping_method.save()
for shipping_method in ShippingMethod.objects.filter(price_calculator="lfs.shipping.GrossShippingMethodPriceCalculator"):
shipping_method.price_calculator = "lfs.shipping.calculator.GrossPriceCalculator"
shipping_method.save()
class Migration(migrations.Migration):
dependencies = [
('shipping', '0001_initial'),
]
operations = [
migrations.RunPython(update_price_calculator),
]
## Instruction:
Fix price calculator class names
## Code After:
from __future__ import unicode_literals
from django.db import migrations
def update_price_calculator(apps, schema_editor):
ShippingMethod = apps.get_model("shipping", "ShippingMethod")
for shipping_method in ShippingMethod.objects.filter(price_calculator="lfs.shipping.NetShippingMethodPriceCalculator"):
shipping_method.price_calculator = "lfs.shipping.calculator.NetShippingMethodPriceCalculator"
shipping_method.save()
for shipping_method in ShippingMethod.objects.filter(price_calculator="lfs.shipping.GrossShippingMethodPriceCalculator"):
shipping_method.price_calculator = "lfs.shipping.calculator.GrossShippingMethodPriceCalculator"
shipping_method.save()
class Migration(migrations.Migration):
dependencies = [
('shipping', '0001_initial'),
]
operations = [
migrations.RunPython(update_price_calculator),
]
|
# ... existing code ...
def update_price_calculator(apps, schema_editor):
ShippingMethod = apps.get_model("shipping", "ShippingMethod")
for shipping_method in ShippingMethod.objects.filter(price_calculator="lfs.shipping.NetShippingMethodPriceCalculator"):
shipping_method.price_calculator = "lfs.shipping.calculator.NetShippingMethodPriceCalculator"
shipping_method.save()
for shipping_method in ShippingMethod.objects.filter(price_calculator="lfs.shipping.GrossShippingMethodPriceCalculator"):
shipping_method.price_calculator = "lfs.shipping.calculator.GrossShippingMethodPriceCalculator"
shipping_method.save()
# ... rest of the code ...
|
417415283d87654b066c11d807516d3cd5b5bf3d
|
tests/test_probabilistic_interleave_speed.py
|
tests/test_probabilistic_interleave_speed.py
|
import interleaving as il
import numpy as np
import pytest
np.random.seed(0)
from .test_methods import TestMethods
class TestProbabilisticInterleaveSpeed(TestMethods):
def test_interleave(self):
r1 = list(range(100))
r2 = list(range(100, 200))
for i in range(1000):
method = il.Probabilistic([r1, r2])
ranking = method.interleave()
print(list(ranking))
|
import interleaving as il
import numpy as np
import pytest
np.random.seed(0)
from .test_methods import TestMethods
class TestProbabilisticInterleaveSpeed(TestMethods):
def test_interleave(self):
r1 = list(range(100))
r2 = list(range(50, 150))
r3 = list(range(100, 200))
r4 = list(range(150, 250))
for i in range(1000):
method = il.Probabilistic([r1, r2, r3, r4])
ranking = method.interleave()
method.evaluate(ranking, [0, 1, 2])
|
Add tests for measuring the speed of probabilistic interleaving
|
Add tests for measuring the speed of probabilistic interleaving
|
Python
|
mit
|
mpkato/interleaving
|
python
|
## Code Before:
import interleaving as il
import numpy as np
import pytest
np.random.seed(0)
from .test_methods import TestMethods
class TestProbabilisticInterleaveSpeed(TestMethods):
def test_interleave(self):
r1 = list(range(100))
r2 = list(range(100, 200))
for i in range(1000):
method = il.Probabilistic([r1, r2])
ranking = method.interleave()
print(list(ranking))
## Instruction:
Add tests for measuring the speed of probabilistic interleaving
## Code After:
import interleaving as il
import numpy as np
import pytest
np.random.seed(0)
from .test_methods import TestMethods
class TestProbabilisticInterleaveSpeed(TestMethods):
def test_interleave(self):
r1 = list(range(100))
r2 = list(range(50, 150))
r3 = list(range(100, 200))
r4 = list(range(150, 250))
for i in range(1000):
method = il.Probabilistic([r1, r2, r3, r4])
ranking = method.interleave()
method.evaluate(ranking, [0, 1, 2])
|
# ... existing code ...
def test_interleave(self):
r1 = list(range(100))
r2 = list(range(50, 150))
r3 = list(range(100, 200))
r4 = list(range(150, 250))
for i in range(1000):
method = il.Probabilistic([r1, r2, r3, r4])
ranking = method.interleave()
method.evaluate(ranking, [0, 1, 2])
# ... rest of the code ...
|
84f6cc46e7ba7e2e3c046e957545687ce6802278
|
cegui/src/ScriptingModules/PythonScriptModule/bindings/distutils/PyCEGUI/__init__.py
|
cegui/src/ScriptingModules/PythonScriptModule/bindings/distutils/PyCEGUI/__init__.py
|
import os
import os.path
# atrocious and unholy!
def get_my_path():
import fake
return os.path.dirname(str(fake).split()[3][1:])
libpath = os.path.abspath(get_my_path())
#print "libpath =", libpath
os.environ['PATH'] = libpath + ";" + os.environ['PATH']
from PyCEGUI import *
|
import os
import os.path
# atrocious and unholy!
def get_my_path():
import fake
return os.path.dirname(os.path.abspath(fake.__file__))
libpath = get_my_path()
#print "libpath =", libpath
os.environ['PATH'] = libpath + ";" + os.environ['PATH']
from PyCEGUI import *
|
Use a less pathetic method to retrieve the PyCEGUI dirname
|
MOD: Use a less pathetic method to retrieve the PyCEGUI dirname
|
Python
|
mit
|
cbeck88/cegui-mirror-two,cbeck88/cegui-mirror-two,cbeck88/cegui-mirror-two,cbeck88/cegui-mirror-two,cbeck88/cegui-mirror-two
|
python
|
## Code Before:
import os
import os.path
# atrocious and unholy!
def get_my_path():
import fake
return os.path.dirname(str(fake).split()[3][1:])
libpath = os.path.abspath(get_my_path())
#print "libpath =", libpath
os.environ['PATH'] = libpath + ";" + os.environ['PATH']
from PyCEGUI import *
## Instruction:
MOD: Use a less pathetic method to retrieve the PyCEGUI dirname
## Code After:
import os
import os.path
# atrocious and unholy!
def get_my_path():
import fake
return os.path.dirname(os.path.abspath(fake.__file__))
libpath = get_my_path()
#print "libpath =", libpath
os.environ['PATH'] = libpath + ";" + os.environ['PATH']
from PyCEGUI import *
|
// ... existing code ...
# atrocious and unholy!
def get_my_path():
import fake
return os.path.dirname(os.path.abspath(fake.__file__))
libpath = get_my_path()
#print "libpath =", libpath
os.environ['PATH'] = libpath + ";" + os.environ['PATH']
// ... rest of the code ...
|
638dda46a63f1c98f674febe170df55fe36cea5e
|
tests/test_timestepping.py
|
tests/test_timestepping.py
|
import numpy as np
from sympy import Eq
import pytest
from devito.interfaces import TimeData
from devito.stencilkernel import StencilKernel
@pytest.fixture
def a(shape=(11, 11)):
"""Forward time data object, unrolled (save=True)"""
return TimeData(name='a', shape=shape, time_order=1,
time_dim=6, save=True)
def test_forward(a, nt=5):
a.data[0, :] = 1.
eqn = Eq(a.forward, a + 1.)
StencilKernel(eqn, dle=None, dse=None)()
for i in range(nt):
assert np.allclose(a.data[i, :], 1. + i, rtol=1.e-12)
|
import numpy as np
from sympy import Eq
import pytest
from devito.interfaces import Backward, Forward, TimeData
from devito.stencilkernel import StencilKernel
@pytest.fixture
def a(shape=(11, 11)):
"""Forward time data object, unrolled (save=True)"""
return TimeData(name='a', shape=shape, time_order=1,
time_dim=6, save=True)
@pytest.fixture
def b(shape=(11, 11)):
"""Backward time data object, unrolled (save=True)"""
return TimeData(name='b', shape=shape, time_order=1,
time_dim=6, save=True)
def test_forward(a, nt=5):
a.data[0, :] = 1.
eqn = Eq(a.forward, a + 1.)
StencilKernel(eqn, dle=None, dse=None)()
for i in range(nt):
assert np.allclose(a.data[i, :], 1. + i, rtol=1.e-12)
def test_backward(b, nt=5):
b.data[nt, :] = 6.
eqn = Eq(b.backward, b - 1.)
StencilKernel(eqn, dle=None, dse=None, time_axis=Backward)(time=nt)
for i in range(nt + 1):
assert np.allclose(b.data[i, :], 1. + i, rtol=1.e-12)
|
Add explicit test for reverse timestepping
|
TimeData: Add explicit test for reverse timestepping
|
Python
|
mit
|
opesci/devito,opesci/devito
|
python
|
## Code Before:
import numpy as np
from sympy import Eq
import pytest
from devito.interfaces import TimeData
from devito.stencilkernel import StencilKernel
@pytest.fixture
def a(shape=(11, 11)):
"""Forward time data object, unrolled (save=True)"""
return TimeData(name='a', shape=shape, time_order=1,
time_dim=6, save=True)
def test_forward(a, nt=5):
a.data[0, :] = 1.
eqn = Eq(a.forward, a + 1.)
StencilKernel(eqn, dle=None, dse=None)()
for i in range(nt):
assert np.allclose(a.data[i, :], 1. + i, rtol=1.e-12)
## Instruction:
TimeData: Add explicit test for reverse timestepping
## Code After:
import numpy as np
from sympy import Eq
import pytest
from devito.interfaces import Backward, Forward, TimeData
from devito.stencilkernel import StencilKernel
@pytest.fixture
def a(shape=(11, 11)):
"""Forward time data object, unrolled (save=True)"""
return TimeData(name='a', shape=shape, time_order=1,
time_dim=6, save=True)
@pytest.fixture
def b(shape=(11, 11)):
"""Backward time data object, unrolled (save=True)"""
return TimeData(name='b', shape=shape, time_order=1,
time_dim=6, save=True)
def test_forward(a, nt=5):
a.data[0, :] = 1.
eqn = Eq(a.forward, a + 1.)
StencilKernel(eqn, dle=None, dse=None)()
for i in range(nt):
assert np.allclose(a.data[i, :], 1. + i, rtol=1.e-12)
def test_backward(b, nt=5):
b.data[nt, :] = 6.
eqn = Eq(b.backward, b - 1.)
StencilKernel(eqn, dle=None, dse=None, time_axis=Backward)(time=nt)
for i in range(nt + 1):
assert np.allclose(b.data[i, :], 1. + i, rtol=1.e-12)
|
# ... existing code ...
import pytest
from devito.interfaces import Backward, Forward, TimeData
from devito.stencilkernel import StencilKernel
# ... modified code ...
time_dim=6, save=True)
@pytest.fixture
def b(shape=(11, 11)):
"""Backward time data object, unrolled (save=True)"""
return TimeData(name='b', shape=shape, time_order=1,
time_dim=6, save=True)
def test_forward(a, nt=5):
a.data[0, :] = 1.
eqn = Eq(a.forward, a + 1.)
...
StencilKernel(eqn, dle=None, dse=None)()
for i in range(nt):
assert np.allclose(a.data[i, :], 1. + i, rtol=1.e-12)
def test_backward(b, nt=5):
b.data[nt, :] = 6.
eqn = Eq(b.backward, b - 1.)
StencilKernel(eqn, dle=None, dse=None, time_axis=Backward)(time=nt)
for i in range(nt + 1):
assert np.allclose(b.data[i, :], 1. + i, rtol=1.e-12)
# ... rest of the code ...
|
ae4af32bf5ca21b2c7d80e2034560ed23f6a2ea7
|
src/main-rpython.py
|
src/main-rpython.py
|
import sys
from som.compiler.parse_error import ParseError
from som.interp_type import is_ast_interpreter, is_bytecode_interpreter
from som.vm.universe import main, Exit
import os
# __________ Entry points __________
def entry_point(argv):
try:
main(argv)
except Exit as e:
return e.code
except ParseError as e:
os.write(2, str(e))
return 1
except Exception as e:
os.write(2, "ERROR: %s thrown during execution.\n" % e)
return 1
return 1
# _____ Define and setup target ___
def target(driver, args):
exe_name = 'som-'
if is_ast_interpreter():
exe_name += 'ast-'
elif is_bytecode_interpreter():
exe_name += 'bc-'
if driver.config.translation.jit:
exe_name += 'jit'
else:
exe_name += 'interp'
driver.exe_name = exe_name
return entry_point, None
def jitpolicy(driver):
from rpython.jit.codewriter.policy import JitPolicy
return JitPolicy()
if __name__ == '__main__':
from rpython.translator.driver import TranslationDriver
f, _ = target(TranslationDriver(), sys.argv)
sys.exit(f(sys.argv))
|
import sys
from som.compiler.parse_error import ParseError
from som.interp_type import is_ast_interpreter, is_bytecode_interpreter
from som.vm.universe import main, Exit
import os
try:
import rpython.rlib
except ImportError:
print("Failed to load RPython library. Please make sure it is on PYTHONPATH")
sys.exit(1)
# __________ Entry points __________
def entry_point(argv):
try:
main(argv)
except Exit as e:
return e.code
except ParseError as e:
os.write(2, str(e))
return 1
except Exception as e:
os.write(2, "ERROR: %s thrown during execution.\n" % e)
return 1
return 1
# _____ Define and setup target ___
def target(driver, args):
exe_name = 'som-'
if is_ast_interpreter():
exe_name += 'ast-'
elif is_bytecode_interpreter():
exe_name += 'bc-'
if driver.config.translation.jit:
exe_name += 'jit'
else:
exe_name += 'interp'
driver.exe_name = exe_name
return entry_point, None
def jitpolicy(driver):
from rpython.jit.codewriter.policy import JitPolicy
return JitPolicy()
if __name__ == '__main__':
from rpython.translator.driver import TranslationDriver
f, _ = target(TranslationDriver(), sys.argv)
sys.exit(f(sys.argv))
|
Add error to make sure we have RPython when using the RPython main
|
Add error to make sure we have RPython when using the RPython main
Signed-off-by: Stefan Marr <[email protected]>
|
Python
|
mit
|
SOM-st/RPySOM,smarr/PySOM,smarr/PySOM,SOM-st/PySOM,SOM-st/RPySOM,SOM-st/PySOM
|
python
|
## Code Before:
import sys
from som.compiler.parse_error import ParseError
from som.interp_type import is_ast_interpreter, is_bytecode_interpreter
from som.vm.universe import main, Exit
import os
# __________ Entry points __________
def entry_point(argv):
try:
main(argv)
except Exit as e:
return e.code
except ParseError as e:
os.write(2, str(e))
return 1
except Exception as e:
os.write(2, "ERROR: %s thrown during execution.\n" % e)
return 1
return 1
# _____ Define and setup target ___
def target(driver, args):
exe_name = 'som-'
if is_ast_interpreter():
exe_name += 'ast-'
elif is_bytecode_interpreter():
exe_name += 'bc-'
if driver.config.translation.jit:
exe_name += 'jit'
else:
exe_name += 'interp'
driver.exe_name = exe_name
return entry_point, None
def jitpolicy(driver):
from rpython.jit.codewriter.policy import JitPolicy
return JitPolicy()
if __name__ == '__main__':
from rpython.translator.driver import TranslationDriver
f, _ = target(TranslationDriver(), sys.argv)
sys.exit(f(sys.argv))
## Instruction:
Add error to make sure we have RPython when using the RPython main
Signed-off-by: Stefan Marr <[email protected]>
## Code After:
import sys
from som.compiler.parse_error import ParseError
from som.interp_type import is_ast_interpreter, is_bytecode_interpreter
from som.vm.universe import main, Exit
import os
try:
import rpython.rlib
except ImportError:
print("Failed to load RPython library. Please make sure it is on PYTHONPATH")
sys.exit(1)
# __________ Entry points __________
def entry_point(argv):
try:
main(argv)
except Exit as e:
return e.code
except ParseError as e:
os.write(2, str(e))
return 1
except Exception as e:
os.write(2, "ERROR: %s thrown during execution.\n" % e)
return 1
return 1
# _____ Define and setup target ___
def target(driver, args):
exe_name = 'som-'
if is_ast_interpreter():
exe_name += 'ast-'
elif is_bytecode_interpreter():
exe_name += 'bc-'
if driver.config.translation.jit:
exe_name += 'jit'
else:
exe_name += 'interp'
driver.exe_name = exe_name
return entry_point, None
def jitpolicy(driver):
from rpython.jit.codewriter.policy import JitPolicy
return JitPolicy()
if __name__ == '__main__':
from rpython.translator.driver import TranslationDriver
f, _ = target(TranslationDriver(), sys.argv)
sys.exit(f(sys.argv))
|
# ... existing code ...
from som.vm.universe import main, Exit
import os
try:
import rpython.rlib
except ImportError:
print("Failed to load RPython library. Please make sure it is on PYTHONPATH")
sys.exit(1)
# __________ Entry points __________
# ... rest of the code ...
|
95e1f9517d79fb48bb9601e2d94419c6e2c984ca
|
tools/data2c.py
|
tools/data2c.py
|
import sys
import os.path
import string
def path2varname(path):
path = os.path.basename(path)
s = ''
for c in path:
if c in string.ascii_letters or c in string.digits:
s += c
else:
s += '_'
return s
def main():
for path in sys.argv[1:]:
varname = path2varname(path)
with open(path, 'rb') as f:
sys.stdout.write('static const char %s[] = {' % varname)
data = f.read()
i = 0
for c in data:
if i % 16 == 0:
sys.stdout.write('\n\t')
i += 1
sys.stdout.write('0x%02x, ' % ord(c))
sys.stdout.write('\n};\n\n')
if __name__ == '__main__':
main()
|
import sys
import os.path
import string
import getopt
cflag = 0 # clean output: just the hexdump
def path2varname(path):
path = os.path.basename(path)
s = ''
for c in path:
if c in string.ascii_letters or c in string.digits:
s += c
else:
s += '_'
return s
def main():
global cflag
opts, args = getopt.getopt(sys.argv[1:], "c")
for (x, y) in opts:
if x == "-c":
cflag += 1
for path in args:
varname = path2varname(path)
with open(path, 'rb') as f:
if not cflag:
sys.stdout.write('static const char %s[] = {' % varname)
data = f.read()
i = 0
for c in data:
if i % 16 == 0:
sys.stdout.write('\n')
if not cflag:
sys.stdout.write('\t')
i += 1
sys.stdout.write('0x%02x, ' % ord(c))
if not cflag:
sys.stdout.write('\n};')
sys.stdout.write('\n')
if __name__ == '__main__':
main()
|
Add -c option: output only hexdump, without C variable wrapper.
|
Add -c option: output only hexdump, without C variable wrapper.
|
Python
|
isc
|
S010/misc,S010/misc,S010/misc,S010/misc,S010/misc,S010/misc,S010/misc,S010/misc,S010/misc
|
python
|
## Code Before:
import sys
import os.path
import string
def path2varname(path):
path = os.path.basename(path)
s = ''
for c in path:
if c in string.ascii_letters or c in string.digits:
s += c
else:
s += '_'
return s
def main():
for path in sys.argv[1:]:
varname = path2varname(path)
with open(path, 'rb') as f:
sys.stdout.write('static const char %s[] = {' % varname)
data = f.read()
i = 0
for c in data:
if i % 16 == 0:
sys.stdout.write('\n\t')
i += 1
sys.stdout.write('0x%02x, ' % ord(c))
sys.stdout.write('\n};\n\n')
if __name__ == '__main__':
main()
## Instruction:
Add -c option: output only hexdump, without C variable wrapper.
## Code After:
import sys
import os.path
import string
import getopt
cflag = 0 # clean output: just the hexdump
def path2varname(path):
path = os.path.basename(path)
s = ''
for c in path:
if c in string.ascii_letters or c in string.digits:
s += c
else:
s += '_'
return s
def main():
global cflag
opts, args = getopt.getopt(sys.argv[1:], "c")
for (x, y) in opts:
if x == "-c":
cflag += 1
for path in args:
varname = path2varname(path)
with open(path, 'rb') as f:
if not cflag:
sys.stdout.write('static const char %s[] = {' % varname)
data = f.read()
i = 0
for c in data:
if i % 16 == 0:
sys.stdout.write('\n')
if not cflag:
sys.stdout.write('\t')
i += 1
sys.stdout.write('0x%02x, ' % ord(c))
if not cflag:
sys.stdout.write('\n};')
sys.stdout.write('\n')
if __name__ == '__main__':
main()
|
// ... existing code ...
import sys
import os.path
import string
import getopt
cflag = 0 # clean output: just the hexdump
def path2varname(path):
path = os.path.basename(path)
// ... modified code ...
return s
def main():
global cflag
opts, args = getopt.getopt(sys.argv[1:], "c")
for (x, y) in opts:
if x == "-c":
cflag += 1
for path in args:
varname = path2varname(path)
with open(path, 'rb') as f:
if not cflag:
sys.stdout.write('static const char %s[] = {' % varname)
data = f.read()
i = 0
for c in data:
if i % 16 == 0:
sys.stdout.write('\n')
if not cflag:
sys.stdout.write('\t')
i += 1
sys.stdout.write('0x%02x, ' % ord(c))
if not cflag:
sys.stdout.write('\n};')
sys.stdout.write('\n')
if __name__ == '__main__':
main()
// ... rest of the code ...
|
53c6a9d6cd9c674794b28d08f36cd7dfc19b317d
|
ReferenceLibrary/src/test/java/referencelibrary/AppTest.java
|
ReferenceLibrary/src/test/java/referencelibrary/AppTest.java
|
package referencelibrary;
import org.junit.Before;
import org.junit.Test;
import referencelibrary.data.StubDao;
import referencelibrary.reference.BookReference;
import referencelibrary.reference.Reference;
import java.util.List;
import static org.junit.Assert.*;
import referencelibrary.util.FileUtil;
/**
* Created by petri on 18.4.2016.
*/
public class AppTest {
private App app;
@Before
public void setUp() {
app = new App(new StubDao());
}
@Test
public void newReference() {
app.newReference(new BookReference("REF1"));
List<Reference> reflist = app.listReferences();
assertEquals(2, reflist.size());
assertEquals("REF1", reflist.get(1).getReferenceName());
}
@Test
public void listReferences() {
List<Reference> reflist = app.listReferences();
assertEquals("REF", reflist.get(0).getReferenceName());
}
@Test
public void generateBixTexFile() {
String filename = "test_bibtex.bib";
app.generateBixTexFile(filename);
String result = FileUtil.Read(filename);
assertEquals("@Book{REF,\n}\n\n", result);
}
}
|
package referencelibrary;
import java.io.File;
import org.junit.Before;
import org.junit.Test;
import referencelibrary.data.StubDao;
import referencelibrary.reference.BookReference;
import referencelibrary.reference.Reference;
import java.util.List;
import org.junit.After;
import static org.junit.Assert.*;
import referencelibrary.util.FileUtil;
/**
* Created by petri on 18.4.2016.
*/
public class AppTest {
private final String filename = "test_bibtex.bib";
private App app;
@Before
public void setUp() {
app = new App(new StubDao());
File testFile = new File(filename);
testFile.delete();
}
@After
public void tearDown() {
File testFile = new File(filename);
testFile.delete();
}
@Test
public void newReference() {
app.newReference(new BookReference("REF1"));
List<Reference> reflist = app.listReferences();
assertEquals(2, reflist.size());
assertEquals("REF1", reflist.get(1).getReferenceName());
}
@Test
public void listReferences() {
List<Reference> reflist = app.listReferences();
assertEquals("REF", reflist.get(0).getReferenceName());
}
@Test
public void generateBixTexFile() {
app.generateBixTexFile(filename);
String result = FileUtil.Read(filename);
assertEquals("@Book{REF,\n}\n\n", result);
}
}
|
Make BookReference test cleanup test file
|
Make BookReference test cleanup test file
|
Java
|
mit
|
Mahtimursut/ohtu
|
java
|
## Code Before:
package referencelibrary;
import org.junit.Before;
import org.junit.Test;
import referencelibrary.data.StubDao;
import referencelibrary.reference.BookReference;
import referencelibrary.reference.Reference;
import java.util.List;
import static org.junit.Assert.*;
import referencelibrary.util.FileUtil;
/**
* Created by petri on 18.4.2016.
*/
public class AppTest {
private App app;
@Before
public void setUp() {
app = new App(new StubDao());
}
@Test
public void newReference() {
app.newReference(new BookReference("REF1"));
List<Reference> reflist = app.listReferences();
assertEquals(2, reflist.size());
assertEquals("REF1", reflist.get(1).getReferenceName());
}
@Test
public void listReferences() {
List<Reference> reflist = app.listReferences();
assertEquals("REF", reflist.get(0).getReferenceName());
}
@Test
public void generateBixTexFile() {
String filename = "test_bibtex.bib";
app.generateBixTexFile(filename);
String result = FileUtil.Read(filename);
assertEquals("@Book{REF,\n}\n\n", result);
}
}
## Instruction:
Make BookReference test cleanup test file
## Code After:
package referencelibrary;
import java.io.File;
import org.junit.Before;
import org.junit.Test;
import referencelibrary.data.StubDao;
import referencelibrary.reference.BookReference;
import referencelibrary.reference.Reference;
import java.util.List;
import org.junit.After;
import static org.junit.Assert.*;
import referencelibrary.util.FileUtil;
/**
* Created by petri on 18.4.2016.
*/
public class AppTest {
private final String filename = "test_bibtex.bib";
private App app;
@Before
public void setUp() {
app = new App(new StubDao());
File testFile = new File(filename);
testFile.delete();
}
@After
public void tearDown() {
File testFile = new File(filename);
testFile.delete();
}
@Test
public void newReference() {
app.newReference(new BookReference("REF1"));
List<Reference> reflist = app.listReferences();
assertEquals(2, reflist.size());
assertEquals("REF1", reflist.get(1).getReferenceName());
}
@Test
public void listReferences() {
List<Reference> reflist = app.listReferences();
assertEquals("REF", reflist.get(0).getReferenceName());
}
@Test
public void generateBixTexFile() {
app.generateBixTexFile(filename);
String result = FileUtil.Read(filename);
assertEquals("@Book{REF,\n}\n\n", result);
}
}
|
# ... existing code ...
package referencelibrary;
import java.io.File;
import org.junit.Before;
import org.junit.Test;
import referencelibrary.data.StubDao;
# ... modified code ...
import referencelibrary.reference.Reference;
import java.util.List;
import org.junit.After;
import static org.junit.Assert.*;
import referencelibrary.util.FileUtil;
...
* Created by petri on 18.4.2016.
*/
public class AppTest {
private final String filename = "test_bibtex.bib";
private App app;
@Before
public void setUp() {
app = new App(new StubDao());
File testFile = new File(filename);
testFile.delete();
}
@After
public void tearDown() {
File testFile = new File(filename);
testFile.delete();
}
@Test
...
@Test
public void generateBixTexFile() {
app.generateBixTexFile(filename);
String result = FileUtil.Read(filename);
assertEquals("@Book{REF,\n}\n\n", result);
# ... rest of the code ...
|
2dac0f9825b58c5c9af9958d6f0cb0337649cf76
|
wsgi_general.py
|
wsgi_general.py
|
import DQXUtils
import DQXDbTools
def application(environ, start_response):
#For the root we do a relative redirect to index.html, hoping the app has one
if environ['PATH_INFO'] == '/':
start_response('301 Moved Permanently', [('Location', 'index.html'),])
return
with DQXDbTools.DBCursor() as cur:
cur.execute('select id from datasetindex')
datasets = [d[0] for d in cur.fetchall()]
#Redirect to specific dataset
path = environ['PATH_INFO'].split('/')
if len(path) >= 2 and path[-2] in datasets:
start_response('301 Moved Permanently', [('Location', '../index.html?dataset='+path[-2]),])
return
if path[-1] in datasets:
start_response('301 Moved Permanently', [('Location', '../index.html?dataset='+path[-1]),])
return
#Everything else is 404
DQXUtils.LogServer('404:' + environ['PATH_INFO'])
start_response('404 Not Found', [])
try:
with open('static/404.html') as page:
yield page.read()
except IOError:
yield '404 Page Not Found'
return
|
import DQXUtils
import DQXDbTools
def application(environ, start_response):
#For the root we do a relative redirect to index.html, hoping the app has one
if environ['PATH_INFO'] == '/':
start_response('301 Moved Permanently', [('Location', 'index.html'),])
return
with DQXDbTools.DBCursor() as cur:
cur.execute('select id from datasetindex')
datasets = [d[0] for d in cur.fetchall()]
#Redirect to specific dataset
path = environ['PATH_INFO'].split('/')
if len(path) >= 2 and path[-2] in datasets and not (len(path) >= 3 and path[-3] == "Docs"):
start_response('301 Moved Permanently', [('Location', '../index.html?dataset='+path[-2]),])
return
if path[-1] in datasets:
start_response('301 Moved Permanently', [('Location', '../index.html?dataset='+path[-1]),])
return
#Everything else is 404
DQXUtils.LogServer('404:' + environ['PATH_INFO'])
start_response('404 Not Found', [])
try:
with open('static/404.html') as page:
yield page.read()
except IOError:
yield '404 Page Not Found'
return
|
Add routing exception for docs
|
Add routing exception for docs
|
Python
|
agpl-3.0
|
cggh/DQXServer
|
python
|
## Code Before:
import DQXUtils
import DQXDbTools
def application(environ, start_response):
#For the root we do a relative redirect to index.html, hoping the app has one
if environ['PATH_INFO'] == '/':
start_response('301 Moved Permanently', [('Location', 'index.html'),])
return
with DQXDbTools.DBCursor() as cur:
cur.execute('select id from datasetindex')
datasets = [d[0] for d in cur.fetchall()]
#Redirect to specific dataset
path = environ['PATH_INFO'].split('/')
if len(path) >= 2 and path[-2] in datasets:
start_response('301 Moved Permanently', [('Location', '../index.html?dataset='+path[-2]),])
return
if path[-1] in datasets:
start_response('301 Moved Permanently', [('Location', '../index.html?dataset='+path[-1]),])
return
#Everything else is 404
DQXUtils.LogServer('404:' + environ['PATH_INFO'])
start_response('404 Not Found', [])
try:
with open('static/404.html') as page:
yield page.read()
except IOError:
yield '404 Page Not Found'
return
## Instruction:
Add routing exception for docs
## Code After:
import DQXUtils
import DQXDbTools
def application(environ, start_response):
#For the root we do a relative redirect to index.html, hoping the app has one
if environ['PATH_INFO'] == '/':
start_response('301 Moved Permanently', [('Location', 'index.html'),])
return
with DQXDbTools.DBCursor() as cur:
cur.execute('select id from datasetindex')
datasets = [d[0] for d in cur.fetchall()]
#Redirect to specific dataset
path = environ['PATH_INFO'].split('/')
if len(path) >= 2 and path[-2] in datasets and not (len(path) >= 3 and path[-3] == "Docs"):
start_response('301 Moved Permanently', [('Location', '../index.html?dataset='+path[-2]),])
return
if path[-1] in datasets:
start_response('301 Moved Permanently', [('Location', '../index.html?dataset='+path[-1]),])
return
#Everything else is 404
DQXUtils.LogServer('404:' + environ['PATH_INFO'])
start_response('404 Not Found', [])
try:
with open('static/404.html') as page:
yield page.read()
except IOError:
yield '404 Page Not Found'
return
|
...
#Redirect to specific dataset
path = environ['PATH_INFO'].split('/')
if len(path) >= 2 and path[-2] in datasets and not (len(path) >= 3 and path[-3] == "Docs"):
start_response('301 Moved Permanently', [('Location', '../index.html?dataset='+path[-2]),])
return
if path[-1] in datasets:
...
|
341725556ca0818d93f4fad7c6929340c218c972
|
src/main/java/io/github/aquerr/eaglefactions/commands/VersionCommand.java
|
src/main/java/io/github/aquerr/eaglefactions/commands/VersionCommand.java
|
package io.github.aquerr.eaglefactions.commands;
import io.github.aquerr.eaglefactions.PluginInfo;
import org.spongepowered.api.command.CommandException;
import org.spongepowered.api.command.CommandResult;
import org.spongepowered.api.command.CommandSource;
import org.spongepowered.api.command.args.CommandContext;
import org.spongepowered.api.command.spec.CommandExecutor;
import org.spongepowered.api.text.Text;
import org.spongepowered.api.text.format.TextColors;
public class VersionCommand implements CommandExecutor
{
@Override
public CommandResult execute(CommandSource source, CommandContext context) throws CommandException
{
source.sendMessage (Text.of (TextColors.AQUA, PluginInfo.Name + " - ", TextColors.WHITE, "version ", PluginInfo.Version));
return CommandResult.success ();
}
}
|
package io.github.aquerr.eaglefactions.commands;
import io.github.aquerr.eaglefactions.PluginInfo;
import org.spongepowered.api.command.CommandException;
import org.spongepowered.api.command.CommandResult;
import org.spongepowered.api.command.CommandSource;
import org.spongepowered.api.command.args.CommandContext;
import org.spongepowered.api.command.spec.CommandExecutor;
import org.spongepowered.api.text.Text;
import org.spongepowered.api.text.format.TextColors;
public class VersionCommand implements CommandExecutor
{
@Override
public CommandResult execute(CommandSource source, CommandContext context) throws CommandException
{
source.sendMessage (Text.of (TextColors.AQUA, PluginInfo.Name, TextColors.WHITE, " - Version ", PluginInfo.Version));
return CommandResult.success ();
}
}
|
Change display of /f version
|
Change display of /f version
|
Java
|
mit
|
Aquerr/EagleFactions,Aquerr/EagleFactions
|
java
|
## Code Before:
package io.github.aquerr.eaglefactions.commands;
import io.github.aquerr.eaglefactions.PluginInfo;
import org.spongepowered.api.command.CommandException;
import org.spongepowered.api.command.CommandResult;
import org.spongepowered.api.command.CommandSource;
import org.spongepowered.api.command.args.CommandContext;
import org.spongepowered.api.command.spec.CommandExecutor;
import org.spongepowered.api.text.Text;
import org.spongepowered.api.text.format.TextColors;
public class VersionCommand implements CommandExecutor
{
@Override
public CommandResult execute(CommandSource source, CommandContext context) throws CommandException
{
source.sendMessage (Text.of (TextColors.AQUA, PluginInfo.Name + " - ", TextColors.WHITE, "version ", PluginInfo.Version));
return CommandResult.success ();
}
}
## Instruction:
Change display of /f version
## Code After:
package io.github.aquerr.eaglefactions.commands;
import io.github.aquerr.eaglefactions.PluginInfo;
import org.spongepowered.api.command.CommandException;
import org.spongepowered.api.command.CommandResult;
import org.spongepowered.api.command.CommandSource;
import org.spongepowered.api.command.args.CommandContext;
import org.spongepowered.api.command.spec.CommandExecutor;
import org.spongepowered.api.text.Text;
import org.spongepowered.api.text.format.TextColors;
public class VersionCommand implements CommandExecutor
{
@Override
public CommandResult execute(CommandSource source, CommandContext context) throws CommandException
{
source.sendMessage (Text.of (TextColors.AQUA, PluginInfo.Name, TextColors.WHITE, " - Version ", PluginInfo.Version));
return CommandResult.success ();
}
}
|
// ... existing code ...
@Override
public CommandResult execute(CommandSource source, CommandContext context) throws CommandException
{
source.sendMessage (Text.of (TextColors.AQUA, PluginInfo.Name, TextColors.WHITE, " - Version ", PluginInfo.Version));
return CommandResult.success ();
}
// ... rest of the code ...
|
4783f7047500865da06202a7d6d777801cf49c71
|
Box.py
|
Box.py
|
class Box:
def __init__(self, length, width, height):
self.length = length
self.width = width
self.height = height
self.plist = list()
def main():
N = input()
box = list()
for i in range(N):
x = input()
x = x.split('')
b = Box(x[0], x[1], x[2])
box.append(b)
if __name__ == "__main__":
main()
|
class Box:
def __init__(self, length, width, height):
self.length = length
self.width = width
self.height = height
self.plist = list()
self.plength = 0
def __lt__(self, other):
return (self.length < other.length
and self.width < other.width
and self.height < other.height)
def link_to(self, box):
self.plist.append(box)
self.plength += 1
"""
Test for input:
def print_content(self):
print(self.length, self.width, self.height)
"""
def main():
N = int(input())
box = list()
for i in range(N):
x = input()
x = x.split(' ')
x = Box(x[0], x[1], x[2])
box.append(x)
# Test:
# for i, x in enumerate(box):
# x.print_content()
if __name__ == "__main__":
main()
|
Redefine function __lt__ and define a function to set link with others.
|
Redefine function __lt__ and define a function to set link with others.
|
Python
|
mit
|
hane1818/Algorithm_HW4_box_problem
|
python
|
## Code Before:
class Box:
def __init__(self, length, width, height):
self.length = length
self.width = width
self.height = height
self.plist = list()
def main():
N = input()
box = list()
for i in range(N):
x = input()
x = x.split('')
b = Box(x[0], x[1], x[2])
box.append(b)
if __name__ == "__main__":
main()
## Instruction:
Redefine function __lt__ and define a function to set link with others.
## Code After:
class Box:
def __init__(self, length, width, height):
self.length = length
self.width = width
self.height = height
self.plist = list()
self.plength = 0
def __lt__(self, other):
return (self.length < other.length
and self.width < other.width
and self.height < other.height)
def link_to(self, box):
self.plist.append(box)
self.plength += 1
"""
Test for input:
def print_content(self):
print(self.length, self.width, self.height)
"""
def main():
N = int(input())
box = list()
for i in range(N):
x = input()
x = x.split(' ')
x = Box(x[0], x[1], x[2])
box.append(x)
# Test:
# for i, x in enumerate(box):
# x.print_content()
if __name__ == "__main__":
main()
|
// ... existing code ...
self.width = width
self.height = height
self.plist = list()
self.plength = 0
def __lt__(self, other):
return (self.length < other.length
and self.width < other.width
and self.height < other.height)
def link_to(self, box):
self.plist.append(box)
self.plength += 1
"""
Test for input:
def print_content(self):
print(self.length, self.width, self.height)
"""
def main():
N = int(input())
box = list()
for i in range(N):
x = input()
x = x.split(' ')
x = Box(x[0], x[1], x[2])
box.append(x)
# Test:
# for i, x in enumerate(box):
# x.print_content()
if __name__ == "__main__":
main()
// ... rest of the code ...
|
e0aab62f2a693ca20a81c9e55c4220f379ac9eb1
|
socialregistration/templatetags/socialregistration_tags.py
|
socialregistration/templatetags/socialregistration_tags.py
|
from django import template
register = template.Library()
@register.tag
def social_csrf_token():
"""
Wrapper around the ``{% csrf_token %}`` template tag to make socialregistration
work with both Django v1.2 and Django < v1.2
"""
return CsrfNode()
class CsrfNode(template.Node):
def render(self, context):
try:
from django.template.defaulttags import CsrfTokenNode
return CsrfTokenNode().render(context)
except ImportError:
return u''
|
from django import template
register = template.Library()
@register.tag
def social_csrf_token(parser, token):
"""
Wrapper around the ``{% csrf_token %}`` template tag to make socialregistration
work with both Django v1.2 and Django < v1.2
"""
return CsrfNode()
class CsrfNode(template.Node):
def render(self, context):
try:
from django.template.defaulttags import CsrfTokenNode
return CsrfTokenNode().render(context)
except ImportError:
return u''
|
Add necessary arguments to the social_csrf_token tag.
|
Add necessary arguments to the social_csrf_token tag.
|
Python
|
mit
|
praekelt/django-socialregistration,aditweb/django-socialregistration,minlex/django-socialregistration,kapt/django-socialregistration,coxmediagroup/django-socialregistration,aditweb/django-socialregistration,flashingpumpkin/django-socialregistration,mark-adams/django-socialregistration,mark-adams/django-socialregistration,minlex/django-socialregistration,brodie/django-socialregistration,minlex/django-socialregistration,lgapontes/django-socialregistration,bopo/django-socialregistration,0101/django-socialregistration,aditweb/django-socialregistration,lgapontes/django-socialregistration,amakhnach/django-socialregistration,bopo/django-socialregistration,flashingpumpkin/django-socialregistration,coxmediagroup/django-socialregistration,itmustbejj/django-socialregistration,lgapontes/django-socialregistration,Soovox/django-socialregistration,bopo/django-socialregistration,mark-adams/django-socialregistration,kapt/django-socialregistration,brodie/django-socialregistration
|
python
|
## Code Before:
from django import template
register = template.Library()
@register.tag
def social_csrf_token():
"""
Wrapper around the ``{% csrf_token %}`` template tag to make socialregistration
work with both Django v1.2 and Django < v1.2
"""
return CsrfNode()
class CsrfNode(template.Node):
def render(self, context):
try:
from django.template.defaulttags import CsrfTokenNode
return CsrfTokenNode().render(context)
except ImportError:
return u''
## Instruction:
Add necessary arguments to the social_csrf_token tag.
## Code After:
from django import template
register = template.Library()
@register.tag
def social_csrf_token(parser, token):
"""
Wrapper around the ``{% csrf_token %}`` template tag to make socialregistration
work with both Django v1.2 and Django < v1.2
"""
return CsrfNode()
class CsrfNode(template.Node):
def render(self, context):
try:
from django.template.defaulttags import CsrfTokenNode
return CsrfTokenNode().render(context)
except ImportError:
return u''
|
# ... existing code ...
register = template.Library()
@register.tag
def social_csrf_token(parser, token):
"""
Wrapper around the ``{% csrf_token %}`` template tag to make socialregistration
work with both Django v1.2 and Django < v1.2
# ... rest of the code ...
|
ecc2a444294bffd8295f7cfe92f9b6612205019d
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
# move version string out of setup so it is readily available to others
from inception import __version__
setup(
name='inception',
version=__version__,
description="Inception: Towards a Nested Cloud Architecture",
license="Apache 2.0",
classifiers=["Programming Language :: Python"],
url='https://github.com/stackforge/inception',
packages=find_packages(),
install_requires=[
"oslo.config>=1.1.1",
"python-novaclient>=2.13.0",
"IPython>=0.13.2",
],
)
|
from setuptools import find_packages
from setuptools import setup
# move version string out of setup so it is readily available to others
from inception import __version__
setup(
name='inception',
version=__version__,
description="Inception: Towards a Nested Cloud Architecture",
license="Apache 2.0",
classifiers=["Programming Language :: Python"],
url='https://github.com/stackforge/inception',
packages=find_packages(),
install_requires=[
"oslo.config>=1.1.1",
"python-novaclient>=2.13.0",
"IPython>=0.13.2",
],
)
|
Break multiple imports to multiple lines
|
Break multiple imports to multiple lines
Change-Id: I62ba21f4447fada5bf1b86c261d0f7a65681ba76
|
Python
|
apache-2.0
|
stackforge/inception,stackforge/inception
|
python
|
## Code Before:
from setuptools import setup, find_packages
# move version string out of setup so it is readily available to others
from inception import __version__
setup(
name='inception',
version=__version__,
description="Inception: Towards a Nested Cloud Architecture",
license="Apache 2.0",
classifiers=["Programming Language :: Python"],
url='https://github.com/stackforge/inception',
packages=find_packages(),
install_requires=[
"oslo.config>=1.1.1",
"python-novaclient>=2.13.0",
"IPython>=0.13.2",
],
)
## Instruction:
Break multiple imports to multiple lines
Change-Id: I62ba21f4447fada5bf1b86c261d0f7a65681ba76
## Code After:
from setuptools import find_packages
from setuptools import setup
# move version string out of setup so it is readily available to others
from inception import __version__
setup(
name='inception',
version=__version__,
description="Inception: Towards a Nested Cloud Architecture",
license="Apache 2.0",
classifiers=["Programming Language :: Python"],
url='https://github.com/stackforge/inception',
packages=find_packages(),
install_requires=[
"oslo.config>=1.1.1",
"python-novaclient>=2.13.0",
"IPython>=0.13.2",
],
)
|
// ... existing code ...
from setuptools import find_packages
from setuptools import setup
# move version string out of setup so it is readily available to others
from inception import __version__
// ... rest of the code ...
|
0d441ece80c6e8dbb7b9045a9e2a48d2af0ea992
|
Snooper/src/main/java/com/prateekj/snooper/infra/CustomBindings.java
|
Snooper/src/main/java/com/prateekj/snooper/infra/CustomBindings.java
|
package com.prateekj.snooper.infra;
import android.databinding.BindingAdapter;
import android.support.v4.view.ViewCompat;
import android.support.v7.widget.RecyclerView;
import android.widget.ListView;
import se.emilsjolander.stickylistheaders.StickyListHeadersAdapter;
import se.emilsjolander.stickylistheaders.StickyListHeadersListView;
public class CustomBindings {
@BindingAdapter("adapter")
public static void setAdapter(RecyclerView recyclerView, RecyclerView.Adapter adapter) {
recyclerView.setAdapter(adapter);
}
@BindingAdapter("stickyAdapter")
public static void setStickyAdapter(StickyListHeadersListView stickyHeadersListView, StickyListHeadersAdapter adapter) {
stickyHeadersListView.setAdapter(adapter);
}
@BindingAdapter("nestingScrollingEnabled")
public static void setNestingScrollingEnabled(StickyListHeadersListView stickyHeadersListView, boolean enabled) {
ListView listView = stickyHeadersListView.getWrappedList();
ViewCompat.setNestedScrollingEnabled(listView, enabled);
}
@BindingAdapter("itemDecoration")
public static void setItemDecoration(RecyclerView recyclerView, RecyclerView.ItemDecoration itemDecoration) {
recyclerView.addItemDecoration(itemDecoration);
}
}
|
package com.prateekj.snooper.infra;
import android.databinding.BindingAdapter;
import android.support.v4.view.ViewCompat;
import android.support.v7.widget.RecyclerView;
import android.widget.ListView;
import se.emilsjolander.stickylistheaders.StickyListHeadersAdapter;
import se.emilsjolander.stickylistheaders.StickyListHeadersListView;
public class CustomBindings {
@BindingAdapter("app:adapter")
public static void setAdapter(RecyclerView recyclerView, RecyclerView.Adapter adapter) {
recyclerView.setAdapter(adapter);
}
@BindingAdapter("app:stickyAdapter")
public static void setStickyAdapter(StickyListHeadersListView stickyHeadersListView, StickyListHeadersAdapter adapter) {
stickyHeadersListView.setAdapter(adapter);
}
@BindingAdapter("app:nestingScrollingEnabled")
public static void setNestingScrollingEnabled(StickyListHeadersListView stickyHeadersListView, boolean enabled) {
ListView listView = stickyHeadersListView.getWrappedList();
ViewCompat.setNestedScrollingEnabled(listView, enabled);
}
@BindingAdapter("app:itemDecoration")
public static void setItemDecoration(RecyclerView recyclerView, RecyclerView.ItemDecoration itemDecoration) {
recyclerView.addItemDecoration(itemDecoration);
}
}
|
Revert "removing data binding warnings"
|
Revert "removing data binding warnings"
This reverts commit df1e2ca5d05839d788c73df62681e060de9760fd.
|
Java
|
apache-2.0
|
jainsahab/AndroidSnooper,jainsahab/AndroidSnooper,jainsahab/AndroidSnooper
|
java
|
## Code Before:
package com.prateekj.snooper.infra;
import android.databinding.BindingAdapter;
import android.support.v4.view.ViewCompat;
import android.support.v7.widget.RecyclerView;
import android.widget.ListView;
import se.emilsjolander.stickylistheaders.StickyListHeadersAdapter;
import se.emilsjolander.stickylistheaders.StickyListHeadersListView;
public class CustomBindings {
@BindingAdapter("adapter")
public static void setAdapter(RecyclerView recyclerView, RecyclerView.Adapter adapter) {
recyclerView.setAdapter(adapter);
}
@BindingAdapter("stickyAdapter")
public static void setStickyAdapter(StickyListHeadersListView stickyHeadersListView, StickyListHeadersAdapter adapter) {
stickyHeadersListView.setAdapter(adapter);
}
@BindingAdapter("nestingScrollingEnabled")
public static void setNestingScrollingEnabled(StickyListHeadersListView stickyHeadersListView, boolean enabled) {
ListView listView = stickyHeadersListView.getWrappedList();
ViewCompat.setNestedScrollingEnabled(listView, enabled);
}
@BindingAdapter("itemDecoration")
public static void setItemDecoration(RecyclerView recyclerView, RecyclerView.ItemDecoration itemDecoration) {
recyclerView.addItemDecoration(itemDecoration);
}
}
## Instruction:
Revert "removing data binding warnings"
This reverts commit df1e2ca5d05839d788c73df62681e060de9760fd.
## Code After:
package com.prateekj.snooper.infra;
import android.databinding.BindingAdapter;
import android.support.v4.view.ViewCompat;
import android.support.v7.widget.RecyclerView;
import android.widget.ListView;
import se.emilsjolander.stickylistheaders.StickyListHeadersAdapter;
import se.emilsjolander.stickylistheaders.StickyListHeadersListView;
public class CustomBindings {
@BindingAdapter("app:adapter")
public static void setAdapter(RecyclerView recyclerView, RecyclerView.Adapter adapter) {
recyclerView.setAdapter(adapter);
}
@BindingAdapter("app:stickyAdapter")
public static void setStickyAdapter(StickyListHeadersListView stickyHeadersListView, StickyListHeadersAdapter adapter) {
stickyHeadersListView.setAdapter(adapter);
}
@BindingAdapter("app:nestingScrollingEnabled")
public static void setNestingScrollingEnabled(StickyListHeadersListView stickyHeadersListView, boolean enabled) {
ListView listView = stickyHeadersListView.getWrappedList();
ViewCompat.setNestedScrollingEnabled(listView, enabled);
}
@BindingAdapter("app:itemDecoration")
public static void setItemDecoration(RecyclerView recyclerView, RecyclerView.ItemDecoration itemDecoration) {
recyclerView.addItemDecoration(itemDecoration);
}
}
|
# ... existing code ...
public class CustomBindings {
@BindingAdapter("app:adapter")
public static void setAdapter(RecyclerView recyclerView, RecyclerView.Adapter adapter) {
recyclerView.setAdapter(adapter);
}
@BindingAdapter("app:stickyAdapter")
public static void setStickyAdapter(StickyListHeadersListView stickyHeadersListView, StickyListHeadersAdapter adapter) {
stickyHeadersListView.setAdapter(adapter);
}
@BindingAdapter("app:nestingScrollingEnabled")
public static void setNestingScrollingEnabled(StickyListHeadersListView stickyHeadersListView, boolean enabled) {
ListView listView = stickyHeadersListView.getWrappedList();
ViewCompat.setNestedScrollingEnabled(listView, enabled);
}
@BindingAdapter("app:itemDecoration")
public static void setItemDecoration(RecyclerView recyclerView, RecyclerView.ItemDecoration itemDecoration) {
recyclerView.addItemDecoration(itemDecoration);
}
# ... rest of the code ...
|
ba1ca23964080e4b6c4fa5b3295a65ce1787f291
|
setup.py
|
setup.py
|
import os
from setuptools import setup
setup(
name = "ld35",
version = "0.0.1.dev",
url = "https://github.com/seventhroot/ld35",
package_dir = {'ld35': 'src/ld35'},
packages = ['ld35'],
package_data = {'ld35': [
'assets/*.ogg',
'assets/*.wav',
'examples/*.png',
'examples/*.tmx',
]},
install_requires = [
'pygame==1.9.1',
'Pyganim==0.9.2',
'pyscroll==2.16.6',
'PyTMX==3.20.14',
'six==1.10.0',
],
scripts = ['scripts/ld35game.py'],
# this is to compensate for pytmx.
# better solution may be to give it a suitable resource loader
zip_safe = False,
)
|
import os
from setuptools import setup
setup(
name = "ld35",
version = "0.0.1.dev",
url = "https://github.com/seventhroot/ld35",
description = 'The SeventhRoot entry for Ludum Dare 35',
long_description_markdown_filename='README.md',
packages = ['ld35'],
package_data = {'ld35': [
'assets/*.ogg',
'assets/*.wav',
'examples/*.png',
'examples/*.tmx',
]},
setup_requires=['setuptools-markdown'],
install_requires = [
'pygame==1.9.1',
'Pyganim==0.9.2',
'pyscroll==2.16.6',
'PyTMX==3.20.14',
'six==1.10.0',
],
scripts = ['scripts/ld35game.py'],
# this is to compensate for pytmx.
# better solution may be to give it a suitable resource loader
zip_safe = False,
)
|
Set package description and removed package dir setting.
|
Set package description and removed package dir setting.
|
Python
|
mit
|
seventhroot/ld35
|
python
|
## Code Before:
import os
from setuptools import setup
setup(
name = "ld35",
version = "0.0.1.dev",
url = "https://github.com/seventhroot/ld35",
package_dir = {'ld35': 'src/ld35'},
packages = ['ld35'],
package_data = {'ld35': [
'assets/*.ogg',
'assets/*.wav',
'examples/*.png',
'examples/*.tmx',
]},
install_requires = [
'pygame==1.9.1',
'Pyganim==0.9.2',
'pyscroll==2.16.6',
'PyTMX==3.20.14',
'six==1.10.0',
],
scripts = ['scripts/ld35game.py'],
# this is to compensate for pytmx.
# better solution may be to give it a suitable resource loader
zip_safe = False,
)
## Instruction:
Set package description and removed package dir setting.
## Code After:
import os
from setuptools import setup
setup(
name = "ld35",
version = "0.0.1.dev",
url = "https://github.com/seventhroot/ld35",
description = 'The SeventhRoot entry for Ludum Dare 35',
long_description_markdown_filename='README.md',
packages = ['ld35'],
package_data = {'ld35': [
'assets/*.ogg',
'assets/*.wav',
'examples/*.png',
'examples/*.tmx',
]},
setup_requires=['setuptools-markdown'],
install_requires = [
'pygame==1.9.1',
'Pyganim==0.9.2',
'pyscroll==2.16.6',
'PyTMX==3.20.14',
'six==1.10.0',
],
scripts = ['scripts/ld35game.py'],
# this is to compensate for pytmx.
# better solution may be to give it a suitable resource loader
zip_safe = False,
)
|
// ... existing code ...
name = "ld35",
version = "0.0.1.dev",
url = "https://github.com/seventhroot/ld35",
description = 'The SeventhRoot entry for Ludum Dare 35',
long_description_markdown_filename='README.md',
packages = ['ld35'],
package_data = {'ld35': [
'assets/*.ogg',
// ... modified code ...
'examples/*.png',
'examples/*.tmx',
]},
setup_requires=['setuptools-markdown'],
install_requires = [
'pygame==1.9.1',
'Pyganim==0.9.2',
// ... rest of the code ...
|
63349f4a73c8cb99f495cc15621ecf946e9e652e
|
setup.py
|
setup.py
|
VERSION = '0.2'
from setuptools import setup
setup(
name='nutshell',
packages=["nutshell"],
version=VERSION,
description='A minimal python library to access Nutshell CRM:s JSON-RPC API.',
author=u'Emil Stenström',
author_email='[email protected]',
url='https://github.com/EmilStenstrom/python-nutshell',
download_url='https://github.com/EmilStenstrom/python-nutshell/tarball/' + VERSION,
install_requires=["requests>=2.9.1", "six>=1.10.0"],
tests_require=["mock>=1.0.1", "nose>=1.3.7"],
test_suite="nose.collector",
keywords=['nutshell', 'nutshell-crm', 'json-rpc'],
classifiers=[],
)
|
VERSION = '0.2'
from setuptools import setup
setup(
name='nutshell',
packages=["nutshell"],
version=VERSION,
description='A minimal python library to access Nutshell CRM:s JSON-RPC API.',
author=u'Emil Stenström',
author_email='[email protected]',
url='https://github.com/EmilStenstrom/python-nutshell',
download_url='https://github.com/EmilStenstrom/python-nutshell/tarball/' + VERSION,
install_requires=["requests>=2.9.1", "six>=1.10.0"],
tests_require=["mock>=1.0.1", "nose>=1.3.7"],
test_suite="nose.collector",
keywords=['nutshell', 'nutshell-crm', 'json-rpc'],
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Operating System :: OS Independent",
])
|
Add proper Python version classifiers.
|
Add proper Python version classifiers.
|
Python
|
mit
|
EmilStenstrom/python-nutshell
|
python
|
## Code Before:
VERSION = '0.2'
from setuptools import setup
setup(
name='nutshell',
packages=["nutshell"],
version=VERSION,
description='A minimal python library to access Nutshell CRM:s JSON-RPC API.',
author=u'Emil Stenström',
author_email='[email protected]',
url='https://github.com/EmilStenstrom/python-nutshell',
download_url='https://github.com/EmilStenstrom/python-nutshell/tarball/' + VERSION,
install_requires=["requests>=2.9.1", "six>=1.10.0"],
tests_require=["mock>=1.0.1", "nose>=1.3.7"],
test_suite="nose.collector",
keywords=['nutshell', 'nutshell-crm', 'json-rpc'],
classifiers=[],
)
## Instruction:
Add proper Python version classifiers.
## Code After:
VERSION = '0.2'
from setuptools import setup
setup(
name='nutshell',
packages=["nutshell"],
version=VERSION,
description='A minimal python library to access Nutshell CRM:s JSON-RPC API.',
author=u'Emil Stenström',
author_email='[email protected]',
url='https://github.com/EmilStenstrom/python-nutshell',
download_url='https://github.com/EmilStenstrom/python-nutshell/tarball/' + VERSION,
install_requires=["requests>=2.9.1", "six>=1.10.0"],
tests_require=["mock>=1.0.1", "nose>=1.3.7"],
test_suite="nose.collector",
keywords=['nutshell', 'nutshell-crm', 'json-rpc'],
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Operating System :: OS Independent",
])
|
...
tests_require=["mock>=1.0.1", "nose>=1.3.7"],
test_suite="nose.collector",
keywords=['nutshell', 'nutshell-crm', 'json-rpc'],
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Operating System :: OS Independent",
])
...
|
c345e73ac22be8dde7e0230121e0e02b581d1209
|
uncertainty/lib/nlp/summarizer.py
|
uncertainty/lib/nlp/summarizer.py
|
from . import chunktagger, lemmatizer, postagger, stemmer, tokenizer
class Summarizer(object):
def __init__(self, text):
self.text = text
def execute(self):
tokens = tokenizer.NLTKTokenizer(self.text).execute()
stems = stemmer.Stemmer(tokens).execute()
pos = postagger.PosTagger(tokens).execute()
chunk = chunktagger.ChunkTagger().parse(pos)
summary = zip(tokens, stems, pos, chunk)
return summary
|
from . import chunktagger, lemmatizer, postagger, stemmer, tokenizer
class Summarizer(object):
def __init__(self, text):
self.text = text
def execute(self):
tokens = tokenizer.NLTKTokenizer(self.text).execute()
stems = stemmer.Stemmer(tokens).execute()
pos = postagger.PosTagger(tokens).execute()
chunk = chunktagger.ChunkTagger().parse(pos)
summary = zip(tokens, stems, pos, chunk)
return [(t, s, p[1], c[1]) for (t, s, p, c) in summary]
|
Fix bug that returned a zip object instead of list
|
Fix bug that returned a zip object instead of list
|
Python
|
mit
|
meyersbs/uncertainty
|
python
|
## Code Before:
from . import chunktagger, lemmatizer, postagger, stemmer, tokenizer
class Summarizer(object):
def __init__(self, text):
self.text = text
def execute(self):
tokens = tokenizer.NLTKTokenizer(self.text).execute()
stems = stemmer.Stemmer(tokens).execute()
pos = postagger.PosTagger(tokens).execute()
chunk = chunktagger.ChunkTagger().parse(pos)
summary = zip(tokens, stems, pos, chunk)
return summary
## Instruction:
Fix bug that returned a zip object instead of list
## Code After:
from . import chunktagger, lemmatizer, postagger, stemmer, tokenizer
class Summarizer(object):
def __init__(self, text):
self.text = text
def execute(self):
tokens = tokenizer.NLTKTokenizer(self.text).execute()
stems = stemmer.Stemmer(tokens).execute()
pos = postagger.PosTagger(tokens).execute()
chunk = chunktagger.ChunkTagger().parse(pos)
summary = zip(tokens, stems, pos, chunk)
return [(t, s, p[1], c[1]) for (t, s, p, c) in summary]
|
...
chunk = chunktagger.ChunkTagger().parse(pos)
summary = zip(tokens, stems, pos, chunk)
return [(t, s, p[1], c[1]) for (t, s, p, c) in summary]
...
|
2f9a4029e909f71539f3b7326b867e27386c3378
|
tests/interface_test.py
|
tests/interface_test.py
|
import unittest
import aiozmq
class ZmqTransportTests(unittest.TestCase):
def test_interface(self):
tr = aiozmq.ZmqTransport()
self.assertRaises(NotImplementedError, tr.write, [b'data'])
self.assertRaises(NotImplementedError, tr.abort)
self.assertRaises(NotImplementedError, tr.getsockopt, 1)
self.assertRaises(NotImplementedError, tr.setsockopt, 1, 2)
self.assertRaises(NotImplementedError, tr.set_write_buffer_limits)
self.assertRaises(NotImplementedError, tr.get_write_buffer_size)
self.assertRaises(NotImplementedError, tr.bind, 'endpoint')
self.assertRaises(NotImplementedError, tr.unbind, 'endpoint')
self.assertRaises(NotImplementedError, tr.bindings)
self.assertRaises(NotImplementedError, tr.connect, 'endpoint')
self.assertRaises(NotImplementedError, tr.disconnect, 'endpoint')
self.assertRaises(NotImplementedError, tr.connections)
self.assertRaises(NotImplementedError, tr.subscribe, b'filter')
self.assertRaises(NotImplementedError, tr.unsubscribe, b'filter')
self.assertRaises(NotImplementedError, tr.subscriptions)
class ZmqProtocolTests(unittest.TestCase):
def test_interface(self):
pr = aiozmq.ZmqProtocol()
self.assertIsNone(pr.msg_received((b'data',)))
|
import unittest
import aiozmq
class ZmqTransportTests(unittest.TestCase):
def test_interface(self):
tr = aiozmq.ZmqTransport()
self.assertRaises(NotImplementedError, tr.write, [b'data'])
self.assertRaises(NotImplementedError, tr.abort)
self.assertRaises(NotImplementedError, tr.getsockopt, 1)
self.assertRaises(NotImplementedError, tr.setsockopt, 1, 2)
self.assertRaises(NotImplementedError, tr.set_write_buffer_limits)
self.assertRaises(NotImplementedError, tr.get_write_buffer_size)
self.assertRaises(NotImplementedError, tr.pause_reading)
self.assertRaises(NotImplementedError, tr.resume_reading)
self.assertRaises(NotImplementedError, tr.bind, 'endpoint')
self.assertRaises(NotImplementedError, tr.unbind, 'endpoint')
self.assertRaises(NotImplementedError, tr.bindings)
self.assertRaises(NotImplementedError, tr.connect, 'endpoint')
self.assertRaises(NotImplementedError, tr.disconnect, 'endpoint')
self.assertRaises(NotImplementedError, tr.connections)
self.assertRaises(NotImplementedError, tr.subscribe, b'filter')
self.assertRaises(NotImplementedError, tr.unsubscribe, b'filter')
self.assertRaises(NotImplementedError, tr.subscriptions)
class ZmqProtocolTests(unittest.TestCase):
def test_interface(self):
pr = aiozmq.ZmqProtocol()
self.assertIsNone(pr.msg_received((b'data',)))
|
Add missing tests for interfaces
|
Add missing tests for interfaces
|
Python
|
bsd-2-clause
|
MetaMemoryT/aiozmq,claws/aiozmq,asteven/aiozmq,aio-libs/aiozmq
|
python
|
## Code Before:
import unittest
import aiozmq
class ZmqTransportTests(unittest.TestCase):
def test_interface(self):
tr = aiozmq.ZmqTransport()
self.assertRaises(NotImplementedError, tr.write, [b'data'])
self.assertRaises(NotImplementedError, tr.abort)
self.assertRaises(NotImplementedError, tr.getsockopt, 1)
self.assertRaises(NotImplementedError, tr.setsockopt, 1, 2)
self.assertRaises(NotImplementedError, tr.set_write_buffer_limits)
self.assertRaises(NotImplementedError, tr.get_write_buffer_size)
self.assertRaises(NotImplementedError, tr.bind, 'endpoint')
self.assertRaises(NotImplementedError, tr.unbind, 'endpoint')
self.assertRaises(NotImplementedError, tr.bindings)
self.assertRaises(NotImplementedError, tr.connect, 'endpoint')
self.assertRaises(NotImplementedError, tr.disconnect, 'endpoint')
self.assertRaises(NotImplementedError, tr.connections)
self.assertRaises(NotImplementedError, tr.subscribe, b'filter')
self.assertRaises(NotImplementedError, tr.unsubscribe, b'filter')
self.assertRaises(NotImplementedError, tr.subscriptions)
class ZmqProtocolTests(unittest.TestCase):
def test_interface(self):
pr = aiozmq.ZmqProtocol()
self.assertIsNone(pr.msg_received((b'data',)))
## Instruction:
Add missing tests for interfaces
## Code After:
import unittest
import aiozmq
class ZmqTransportTests(unittest.TestCase):
def test_interface(self):
tr = aiozmq.ZmqTransport()
self.assertRaises(NotImplementedError, tr.write, [b'data'])
self.assertRaises(NotImplementedError, tr.abort)
self.assertRaises(NotImplementedError, tr.getsockopt, 1)
self.assertRaises(NotImplementedError, tr.setsockopt, 1, 2)
self.assertRaises(NotImplementedError, tr.set_write_buffer_limits)
self.assertRaises(NotImplementedError, tr.get_write_buffer_size)
self.assertRaises(NotImplementedError, tr.pause_reading)
self.assertRaises(NotImplementedError, tr.resume_reading)
self.assertRaises(NotImplementedError, tr.bind, 'endpoint')
self.assertRaises(NotImplementedError, tr.unbind, 'endpoint')
self.assertRaises(NotImplementedError, tr.bindings)
self.assertRaises(NotImplementedError, tr.connect, 'endpoint')
self.assertRaises(NotImplementedError, tr.disconnect, 'endpoint')
self.assertRaises(NotImplementedError, tr.connections)
self.assertRaises(NotImplementedError, tr.subscribe, b'filter')
self.assertRaises(NotImplementedError, tr.unsubscribe, b'filter')
self.assertRaises(NotImplementedError, tr.subscriptions)
class ZmqProtocolTests(unittest.TestCase):
def test_interface(self):
pr = aiozmq.ZmqProtocol()
self.assertIsNone(pr.msg_received((b'data',)))
|
# ... existing code ...
self.assertRaises(NotImplementedError, tr.setsockopt, 1, 2)
self.assertRaises(NotImplementedError, tr.set_write_buffer_limits)
self.assertRaises(NotImplementedError, tr.get_write_buffer_size)
self.assertRaises(NotImplementedError, tr.pause_reading)
self.assertRaises(NotImplementedError, tr.resume_reading)
self.assertRaises(NotImplementedError, tr.bind, 'endpoint')
self.assertRaises(NotImplementedError, tr.unbind, 'endpoint')
self.assertRaises(NotImplementedError, tr.bindings)
# ... rest of the code ...
|
2949d485a0fc6c91957dd430931ae4e6c173cc2e
|
src/main/java/org/libvirt/jna/virSchedParameterValue.java
|
src/main/java/org/libvirt/jna/virSchedParameterValue.java
|
package org.libvirt.jna;
import com.sun.jna.Structure;
/**
* JNA mapping for the virSchedParameterValue structure
*/
public class virSchedParameterValue extends Structure {
public int i; /* data for integer case */
public int ui; /* data for unsigned integer case */
public long l; /* data for long long integer case */
public long ul; /* data for unsigned long long integer case */
public double d; /* data for double case */
public byte b; /* data for char case */
}
|
package org.libvirt.jna;
import com.sun.jna.Union;
/**
* JNA mapping for the virSchedParameterValue structure
*/
public class virSchedParameterValue extends Union {
public int i; /* data for integer case */
public int ui; /* data for unsigned integer case */
public long l; /* data for long long integer case */
public long ul; /* data for unsigned long long integer case */
public double d; /* data for double case */
public byte b; /* data for char case */
}
|
Make the parameters a Union based on the comments from Frederic Ddangtran
|
Make the parameters a Union based on the comments from Frederic Ddangtran
|
Java
|
lgpl-2.1
|
lstoll/libvirt-java,lstoll/libvirt-java
|
java
|
## Code Before:
package org.libvirt.jna;
import com.sun.jna.Structure;
/**
* JNA mapping for the virSchedParameterValue structure
*/
public class virSchedParameterValue extends Structure {
public int i; /* data for integer case */
public int ui; /* data for unsigned integer case */
public long l; /* data for long long integer case */
public long ul; /* data for unsigned long long integer case */
public double d; /* data for double case */
public byte b; /* data for char case */
}
## Instruction:
Make the parameters a Union based on the comments from Frederic Ddangtran
## Code After:
package org.libvirt.jna;
import com.sun.jna.Union;
/**
* JNA mapping for the virSchedParameterValue structure
*/
public class virSchedParameterValue extends Union {
public int i; /* data for integer case */
public int ui; /* data for unsigned integer case */
public long l; /* data for long long integer case */
public long ul; /* data for unsigned long long integer case */
public double d; /* data for double case */
public byte b; /* data for char case */
}
|
// ... existing code ...
package org.libvirt.jna;
import com.sun.jna.Union;
/**
* JNA mapping for the virSchedParameterValue structure
*/
public class virSchedParameterValue extends Union {
public int i; /* data for integer case */
public int ui; /* data for unsigned integer case */
public long l; /* data for long long integer case */
// ... rest of the code ...
|
ba008f405a89d07e170d1b4c893246fb25ccba04
|
benchmarks/benchmarks/bench_lib.py
|
benchmarks/benchmarks/bench_lib.py
|
"""Benchmarks for `numpy.lib`."""
from __future__ import absolute_import, division, print_function
from .common import Benchmark
import numpy as np
class Pad(Benchmark):
"""Benchmarks for `numpy.pad`."""
param_names = ["shape", "pad_width", "mode"]
params = [
[(1000,), (10, 100), (10, 10, 10)],
[1, 3, (0, 5)],
["constant", "edge", "linear_ramp", "mean", "reflect", "wrap"],
]
def setup(self, shape, pad_width, mode):
# avoid np.zeros or np.empty's lazy allocation.
# np.full causes pagefaults to occur during setup
# instead of during the benchmark
self.array = np.full(shape, 0)
def time_pad(self, shape, pad_width, mode):
np.pad(self.array, pad_width, mode)
|
"""Benchmarks for `numpy.lib`."""
from __future__ import absolute_import, division, print_function
from .common import Benchmark
import numpy as np
class Pad(Benchmark):
"""Benchmarks for `numpy.pad`."""
param_names = ["shape", "pad_width", "mode"]
params = [
[(1000,),
(10, 100),
(10, 10, 10),
# 50 * 512 * 512 = 13 million points = 46 MB. should be a good
# out of cache describing a typical usecase
(50, 512, 512)],
[1,
3,
(0, 5)],
["constant",
"edge", "linear_ramp",
# mean/median/minimum/maximum all use the same code path
"mean",
# reflect/symmetric share alot of the code path
"reflect",
"wrap"],
]
def setup(self, shape, pad_width, mode):
# Make sure to fill the array to make the OS page fault
# in the setup phase and not the timed phase
self.array = np.full(shape, fill_value=1)
def time_pad(self, shape, pad_width, mode):
np.pad(self.array, pad_width, mode)
|
Make the pad benchmark pagefault in setup
|
BENCH: Make the pad benchmark pagefault in setup
|
Python
|
bsd-3-clause
|
shoyer/numpy,grlee77/numpy,mhvk/numpy,pbrod/numpy,endolith/numpy,WarrenWeckesser/numpy,mhvk/numpy,pbrod/numpy,jakirkham/numpy,mattip/numpy,anntzer/numpy,abalkin/numpy,shoyer/numpy,WarrenWeckesser/numpy,madphysicist/numpy,MSeifert04/numpy,madphysicist/numpy,endolith/numpy,mhvk/numpy,mattip/numpy,pizzathief/numpy,seberg/numpy,grlee77/numpy,jakirkham/numpy,pbrod/numpy,jakirkham/numpy,rgommers/numpy,mattip/numpy,shoyer/numpy,MSeifert04/numpy,jorisvandenbossche/numpy,numpy/numpy,ahaldane/numpy,pdebuyl/numpy,numpy/numpy,ahaldane/numpy,charris/numpy,endolith/numpy,ahaldane/numpy,simongibbons/numpy,pdebuyl/numpy,pdebuyl/numpy,shoyer/numpy,mhvk/numpy,charris/numpy,pbrod/numpy,WarrenWeckesser/numpy,jorisvandenbossche/numpy,simongibbons/numpy,MSeifert04/numpy,pizzathief/numpy,grlee77/numpy,rgommers/numpy,MSeifert04/numpy,madphysicist/numpy,abalkin/numpy,charris/numpy,abalkin/numpy,anntzer/numpy,seberg/numpy,pdebuyl/numpy,numpy/numpy,pizzathief/numpy,jorisvandenbossche/numpy,pizzathief/numpy,simongibbons/numpy,MSeifert04/numpy,jakirkham/numpy,pbrod/numpy,madphysicist/numpy,anntzer/numpy,WarrenWeckesser/numpy,charris/numpy,jorisvandenbossche/numpy,jorisvandenbossche/numpy,grlee77/numpy,shoyer/numpy,anntzer/numpy,endolith/numpy,grlee77/numpy,simongibbons/numpy,ahaldane/numpy,mattip/numpy,mhvk/numpy,simongibbons/numpy,seberg/numpy,numpy/numpy,pizzathief/numpy,seberg/numpy,ahaldane/numpy,WarrenWeckesser/numpy,jakirkham/numpy,madphysicist/numpy,rgommers/numpy,rgommers/numpy
|
python
|
## Code Before:
"""Benchmarks for `numpy.lib`."""
from __future__ import absolute_import, division, print_function
from .common import Benchmark
import numpy as np
class Pad(Benchmark):
"""Benchmarks for `numpy.pad`."""
param_names = ["shape", "pad_width", "mode"]
params = [
[(1000,), (10, 100), (10, 10, 10)],
[1, 3, (0, 5)],
["constant", "edge", "linear_ramp", "mean", "reflect", "wrap"],
]
def setup(self, shape, pad_width, mode):
# avoid np.zeros or np.empty's lazy allocation.
# np.full causes pagefaults to occur during setup
# instead of during the benchmark
self.array = np.full(shape, 0)
def time_pad(self, shape, pad_width, mode):
np.pad(self.array, pad_width, mode)
## Instruction:
BENCH: Make the pad benchmark pagefault in setup
## Code After:
"""Benchmarks for `numpy.lib`."""
from __future__ import absolute_import, division, print_function
from .common import Benchmark
import numpy as np
class Pad(Benchmark):
"""Benchmarks for `numpy.pad`."""
param_names = ["shape", "pad_width", "mode"]
params = [
[(1000,),
(10, 100),
(10, 10, 10),
# 50 * 512 * 512 = 13 million points = 46 MB. should be a good
# out of cache describing a typical usecase
(50, 512, 512)],
[1,
3,
(0, 5)],
["constant",
"edge", "linear_ramp",
# mean/median/minimum/maximum all use the same code path
"mean",
# reflect/symmetric share alot of the code path
"reflect",
"wrap"],
]
def setup(self, shape, pad_width, mode):
# Make sure to fill the array to make the OS page fault
# in the setup phase and not the timed phase
self.array = np.full(shape, fill_value=1)
def time_pad(self, shape, pad_width, mode):
np.pad(self.array, pad_width, mode)
|
# ... existing code ...
param_names = ["shape", "pad_width", "mode"]
params = [
[(1000,),
(10, 100),
(10, 10, 10),
# 50 * 512 * 512 = 13 million points = 46 MB. should be a good
# out of cache describing a typical usecase
(50, 512, 512)],
[1,
3,
(0, 5)],
["constant",
"edge", "linear_ramp",
# mean/median/minimum/maximum all use the same code path
"mean",
# reflect/symmetric share alot of the code path
"reflect",
"wrap"],
]
def setup(self, shape, pad_width, mode):
# Make sure to fill the array to make the OS page fault
# in the setup phase and not the timed phase
self.array = np.full(shape, fill_value=1)
def time_pad(self, shape, pad_width, mode):
np.pad(self.array, pad_width, mode)
# ... rest of the code ...
|
fb11a6a3b4e13b16c537217e7e698467054febcc
|
mobile/src/main/java/com/stefanhoth/ropasclisp/achievements/AchievementObserver.java
|
mobile/src/main/java/com/stefanhoth/ropasclisp/achievements/AchievementObserver.java
|
package com.stefanhoth.ropasclisp.achievements;
import com.stefanhoth.ropasclisp.games_sdk.PlayGamesClient;
public class AchievementObserver {
private final PlayGamesClient playGamesClient;
private final boolean firstSessionToday;
public AchievementObserver(PlayGamesClient playGamesClient, boolean firstSessionToday) {
this.playGamesClient = playGamesClient;
this.firstSessionToday = firstSessionToday;
}
public void onGameStart() {
// first game start ever
playGamesClient.unlockAchievement(ACHIEVEMENT.HELLO_THERE);
// on X game starts per day
if(! firstSessionToday){
playGamesClient.unlockAchievement(ACHIEVEMENT.CONNOISSEUR);
playGamesClient.unlockAchievement(ACHIEVEMENT.ADDICT);
}
}
public void onGameWin() {
playGamesClient.unlockAchievement(ACHIEVEMENT.GOTCHA);
playGamesClient.unlockAchievement(ACHIEVEMENT.ON_A_ROLL);
playGamesClient.unlockAchievement(ACHIEVEMENT.POKER_FACE);
}
}
|
package com.stefanhoth.ropasclisp.achievements;
import android.content.Context;
import com.stefanhoth.ropasclisp.games_sdk.PlayGamesClient;
public class AchievementObserver {
private final PlayGamesClient playGamesClient;
private final Context context;
public AchievementObserver(PlayGamesClient playGamesClient, final Context context) {
this.playGamesClient = playGamesClient;
this.context = context;
}
public void onGameStart() {
// first game start ever
playGamesClient.unlockAchievement(ACHIEVEMENT.HELLO_THERE);
DAILY_STATE dailyState = DAILY_STATE.with(context);
// on X game starts per day
if (!dailyState.isTodaysFirstStart()) {
final int todaysStarts = dailyState.getTodaysStarts();
if (ACHIEVEMENT.ADDICT.isGoalReached(todaysStarts)) {
playGamesClient.unlockAchievement(ACHIEVEMENT.ADDICT);
}
if (ACHIEVEMENT.CONNOISSEUR.isGoalReached(todaysStarts)) {
playGamesClient.unlockAchievement(ACHIEVEMENT.CONNOISSEUR);
}
}
}
public void onGameWin() {
playGamesClient.unlockAchievement(ACHIEVEMENT.GOTCHA);
playGamesClient.unlockAchievement(ACHIEVEMENT.ON_A_ROLL);
playGamesClient.unlockAchievement(ACHIEVEMENT.POKER_FACE);
}
}
|
Correct behavior for daily start awards
|
Correct behavior for daily start awards
|
Java
|
mit
|
stefanhoth/ropasclisp
|
java
|
## Code Before:
package com.stefanhoth.ropasclisp.achievements;
import com.stefanhoth.ropasclisp.games_sdk.PlayGamesClient;
public class AchievementObserver {
private final PlayGamesClient playGamesClient;
private final boolean firstSessionToday;
public AchievementObserver(PlayGamesClient playGamesClient, boolean firstSessionToday) {
this.playGamesClient = playGamesClient;
this.firstSessionToday = firstSessionToday;
}
public void onGameStart() {
// first game start ever
playGamesClient.unlockAchievement(ACHIEVEMENT.HELLO_THERE);
// on X game starts per day
if(! firstSessionToday){
playGamesClient.unlockAchievement(ACHIEVEMENT.CONNOISSEUR);
playGamesClient.unlockAchievement(ACHIEVEMENT.ADDICT);
}
}
public void onGameWin() {
playGamesClient.unlockAchievement(ACHIEVEMENT.GOTCHA);
playGamesClient.unlockAchievement(ACHIEVEMENT.ON_A_ROLL);
playGamesClient.unlockAchievement(ACHIEVEMENT.POKER_FACE);
}
}
## Instruction:
Correct behavior for daily start awards
## Code After:
package com.stefanhoth.ropasclisp.achievements;
import android.content.Context;
import com.stefanhoth.ropasclisp.games_sdk.PlayGamesClient;
public class AchievementObserver {
private final PlayGamesClient playGamesClient;
private final Context context;
public AchievementObserver(PlayGamesClient playGamesClient, final Context context) {
this.playGamesClient = playGamesClient;
this.context = context;
}
public void onGameStart() {
// first game start ever
playGamesClient.unlockAchievement(ACHIEVEMENT.HELLO_THERE);
DAILY_STATE dailyState = DAILY_STATE.with(context);
// on X game starts per day
if (!dailyState.isTodaysFirstStart()) {
final int todaysStarts = dailyState.getTodaysStarts();
if (ACHIEVEMENT.ADDICT.isGoalReached(todaysStarts)) {
playGamesClient.unlockAchievement(ACHIEVEMENT.ADDICT);
}
if (ACHIEVEMENT.CONNOISSEUR.isGoalReached(todaysStarts)) {
playGamesClient.unlockAchievement(ACHIEVEMENT.CONNOISSEUR);
}
}
}
public void onGameWin() {
playGamesClient.unlockAchievement(ACHIEVEMENT.GOTCHA);
playGamesClient.unlockAchievement(ACHIEVEMENT.ON_A_ROLL);
playGamesClient.unlockAchievement(ACHIEVEMENT.POKER_FACE);
}
}
|
...
package com.stefanhoth.ropasclisp.achievements;
import android.content.Context;
import com.stefanhoth.ropasclisp.games_sdk.PlayGamesClient;
...
public class AchievementObserver {
private final PlayGamesClient playGamesClient;
private final Context context;
public AchievementObserver(PlayGamesClient playGamesClient, final Context context) {
this.playGamesClient = playGamesClient;
this.context = context;
}
public void onGameStart() {
...
// first game start ever
playGamesClient.unlockAchievement(ACHIEVEMENT.HELLO_THERE);
DAILY_STATE dailyState = DAILY_STATE.with(context);
// on X game starts per day
if (!dailyState.isTodaysFirstStart()) {
final int todaysStarts = dailyState.getTodaysStarts();
if (ACHIEVEMENT.ADDICT.isGoalReached(todaysStarts)) {
playGamesClient.unlockAchievement(ACHIEVEMENT.ADDICT);
}
if (ACHIEVEMENT.CONNOISSEUR.isGoalReached(todaysStarts)) {
playGamesClient.unlockAchievement(ACHIEVEMENT.CONNOISSEUR);
}
}
}
...
|
1a10f81750d935eaf84b6ea02d39201471b5fa84
|
pmpa_internals.h
|
pmpa_internals.h
|
/*
* pmpa_internals.h
* Part of pmpa
* Copyright (c) 2014 Philip Wernersbach
*
* Dual-Licensed under the Public Domain and the Unlicense.
* Choose the one that you prefer.
*/
#ifndef HAVE_PMPA_INTERNALS_H
#include <stdbool.h>
#include <pmpa.h>
typedef struct {
pmpa_memory_int size;
bool allocated;
char data;
} pmpa_memory_block;
#define PMPA_MEMORY_BLOCK_HEADER_SIZE ( sizeof(pmpa_memory_int) + sizeof(bool) )
#ifdef PMPA_UNIT_TEST
#define PMPA_STATIC_UNLESS_TESTING
extern PMPA_STATIC_UNLESS_TESTING __thread pmpa_memory_block *master_memory_block;
extern PMPA_STATIC_UNLESS_TESTING __thread pmpa_memory_int master_memory_block_size;
#else
#define PMPA_STATIC_UNLESS_TESTING static
#endif
#define HAVE_PMPA_INTERNALS_H
#endif
|
/*
* pmpa_internals.h
* Part of pmpa
* Copyright (c) 2014 Philip Wernersbach
*
* Dual-Licensed under the Public Domain and the Unlicense.
* Choose the one that you prefer.
*/
#ifndef HAVE_PMPA_INTERNALS_H
#include <stddef.h>
#include <stdbool.h>
#include <pmpa.h>
typedef struct {
pmpa_memory_int size;
bool allocated;
char data;
} pmpa_memory_block;
#define PMPA_MEMORY_BLOCK_HEADER_SIZE ( offsetof(pmpa_memory_block, data) )
#ifdef PMPA_UNIT_TEST
#define PMPA_STATIC_UNLESS_TESTING
extern PMPA_STATIC_UNLESS_TESTING __thread pmpa_memory_block *master_memory_block;
extern PMPA_STATIC_UNLESS_TESTING __thread pmpa_memory_int master_memory_block_size;
#else
#define PMPA_STATIC_UNLESS_TESTING static
#endif
#define HAVE_PMPA_INTERNALS_H
#endif
|
Use offsetof() to compute the memory block header size.
|
Use offsetof() to compute the memory block header size.
On compilers that don’t pad structs to alignment, this is equivalent to
using the sizeof() method. On compilers that do pad structs to
alignment, the sizeof() method will yield the wrong size, whereas this
method will work.
|
C
|
unlicense
|
philip-wernersbach/memory-pool-allocator
|
c
|
## Code Before:
/*
* pmpa_internals.h
* Part of pmpa
* Copyright (c) 2014 Philip Wernersbach
*
* Dual-Licensed under the Public Domain and the Unlicense.
* Choose the one that you prefer.
*/
#ifndef HAVE_PMPA_INTERNALS_H
#include <stdbool.h>
#include <pmpa.h>
typedef struct {
pmpa_memory_int size;
bool allocated;
char data;
} pmpa_memory_block;
#define PMPA_MEMORY_BLOCK_HEADER_SIZE ( sizeof(pmpa_memory_int) + sizeof(bool) )
#ifdef PMPA_UNIT_TEST
#define PMPA_STATIC_UNLESS_TESTING
extern PMPA_STATIC_UNLESS_TESTING __thread pmpa_memory_block *master_memory_block;
extern PMPA_STATIC_UNLESS_TESTING __thread pmpa_memory_int master_memory_block_size;
#else
#define PMPA_STATIC_UNLESS_TESTING static
#endif
#define HAVE_PMPA_INTERNALS_H
#endif
## Instruction:
Use offsetof() to compute the memory block header size.
On compilers that don’t pad structs to alignment, this is equivalent to
using the sizeof() method. On compilers that do pad structs to
alignment, the sizeof() method will yield the wrong size, whereas this
method will work.
## Code After:
/*
* pmpa_internals.h
* Part of pmpa
* Copyright (c) 2014 Philip Wernersbach
*
* Dual-Licensed under the Public Domain and the Unlicense.
* Choose the one that you prefer.
*/
#ifndef HAVE_PMPA_INTERNALS_H
#include <stddef.h>
#include <stdbool.h>
#include <pmpa.h>
typedef struct {
pmpa_memory_int size;
bool allocated;
char data;
} pmpa_memory_block;
#define PMPA_MEMORY_BLOCK_HEADER_SIZE ( offsetof(pmpa_memory_block, data) )
#ifdef PMPA_UNIT_TEST
#define PMPA_STATIC_UNLESS_TESTING
extern PMPA_STATIC_UNLESS_TESTING __thread pmpa_memory_block *master_memory_block;
extern PMPA_STATIC_UNLESS_TESTING __thread pmpa_memory_int master_memory_block_size;
#else
#define PMPA_STATIC_UNLESS_TESTING static
#endif
#define HAVE_PMPA_INTERNALS_H
#endif
|
// ... existing code ...
#ifndef HAVE_PMPA_INTERNALS_H
#include <stddef.h>
#include <stdbool.h>
#include <pmpa.h>
// ... modified code ...
char data;
} pmpa_memory_block;
#define PMPA_MEMORY_BLOCK_HEADER_SIZE ( offsetof(pmpa_memory_block, data) )
#ifdef PMPA_UNIT_TEST
#define PMPA_STATIC_UNLESS_TESTING
// ... rest of the code ...
|
e9b465ccf6e9ef2294487ea43caa669e94661e97
|
digest/DigestOOP.h
|
digest/DigestOOP.h
|
namespace oop
{
class Digest
{
public:
virtual ~Digest() {}
virtual void update(const void* data, int len) = 0;
#ifdef __cpp_lib_string_view
void update(std::string_view str) { update(str.data(), str.length());
#endif
virtual std::string digest() = 0;
virtual int length() const = 0;
enum Type
{
SHA1 = 1,
SHA256 = 2,
MD5 = 5,
};
static std::unique_ptr<Digest> create(Type t);
protected:
Digest() {}
private:
Digest(const Digest&) = delete;
void operator=(const Digest&) = delete;
};
}
|
namespace oop
{
class Digest
{
public:
virtual ~Digest() {}
virtual void update(const void* data, int len) = 0;
#ifdef __cpp_lib_string_view
void update(std::string_view str)
{
update(str.data(), str.length());
}
#endif
virtual std::string digest() = 0;
virtual int length() const = 0;
enum Type
{
SHA1 = 1,
SHA256 = 2,
MD5 = 5,
};
static std::unique_ptr<Digest> create(Type t);
protected:
Digest() {}
private:
Digest(const Digest&) = delete;
void operator=(const Digest&) = delete;
};
}
|
Fix digest oop for C++17.
|
Fix digest oop for C++17.
|
C
|
bsd-3-clause
|
chenshuo/recipes,chenshuo/recipes,chenshuo/recipes,chenshuo/recipes,chenshuo/recipes,chenshuo/recipes
|
c
|
## Code Before:
namespace oop
{
class Digest
{
public:
virtual ~Digest() {}
virtual void update(const void* data, int len) = 0;
#ifdef __cpp_lib_string_view
void update(std::string_view str) { update(str.data(), str.length());
#endif
virtual std::string digest() = 0;
virtual int length() const = 0;
enum Type
{
SHA1 = 1,
SHA256 = 2,
MD5 = 5,
};
static std::unique_ptr<Digest> create(Type t);
protected:
Digest() {}
private:
Digest(const Digest&) = delete;
void operator=(const Digest&) = delete;
};
}
## Instruction:
Fix digest oop for C++17.
## Code After:
namespace oop
{
class Digest
{
public:
virtual ~Digest() {}
virtual void update(const void* data, int len) = 0;
#ifdef __cpp_lib_string_view
void update(std::string_view str)
{
update(str.data(), str.length());
}
#endif
virtual std::string digest() = 0;
virtual int length() const = 0;
enum Type
{
SHA1 = 1,
SHA256 = 2,
MD5 = 5,
};
static std::unique_ptr<Digest> create(Type t);
protected:
Digest() {}
private:
Digest(const Digest&) = delete;
void operator=(const Digest&) = delete;
};
}
|
// ... existing code ...
virtual ~Digest() {}
virtual void update(const void* data, int len) = 0;
#ifdef __cpp_lib_string_view
void update(std::string_view str)
{
update(str.data(), str.length());
}
#endif
virtual std::string digest() = 0;
virtual int length() const = 0;
// ... rest of the code ...
|
34330aec6cf0c038d47c43ef926fa615bd568ea3
|
sqlservice/__init__.py
|
sqlservice/__init__.py
|
from .__pkg__ import (
__description__,
__url__,
__version__,
__author__,
__email__,
__license__
)
from .client import SQLClient
from .model import ModelBase, declarative_base
from .query import Query
from .service import SQLService
|
from .__pkg__ import (
__description__,
__url__,
__version__,
__author__,
__email__,
__license__
)
from .client import SQLClient
from .model import ModelBase, declarative_base
from .service import SQLService
from . import event
|
Remove Query from import and add explicit event module import.
|
Remove Query from import and add explicit event module import.
|
Python
|
mit
|
dgilland/sqlservice
|
python
|
## Code Before:
from .__pkg__ import (
__description__,
__url__,
__version__,
__author__,
__email__,
__license__
)
from .client import SQLClient
from .model import ModelBase, declarative_base
from .query import Query
from .service import SQLService
## Instruction:
Remove Query from import and add explicit event module import.
## Code After:
from .__pkg__ import (
__description__,
__url__,
__version__,
__author__,
__email__,
__license__
)
from .client import SQLClient
from .model import ModelBase, declarative_base
from .service import SQLService
from . import event
|
// ... existing code ...
from .client import SQLClient
from .model import ModelBase, declarative_base
from .service import SQLService
from . import event
// ... rest of the code ...
|
b674f76c93b5208ad302fcba2d43b8c30bbaf14c
|
main.py
|
main.py
|
from altitude import run
run.run("27279", "C://Program Files (x86)/Altitude/servers/command.txt", "C://Program Files (x86)/Altitude/servers/log.txt",
"C://Program Files (x86)/Altitude/servers/log_old.txt", "C://Program Files (x86)/Altitude/servers/logs_archive.txt")
|
from altitude import run
run.run("27279", "/home/user/altitude-files/servers/command.txt", "/home/user/altitude-files/servers/log.txt",
"/home/user/altitude-files/servers/log_old.txt", "/home/user/altitude-files/servers/logs_archive.txt")
|
Put the directories back as they were for the server
|
Put the directories back as they were for the server
|
Python
|
mit
|
StamKaly/altitude-mod,StamKaly/altitude-mod
|
python
|
## Code Before:
from altitude import run
run.run("27279", "C://Program Files (x86)/Altitude/servers/command.txt", "C://Program Files (x86)/Altitude/servers/log.txt",
"C://Program Files (x86)/Altitude/servers/log_old.txt", "C://Program Files (x86)/Altitude/servers/logs_archive.txt")
## Instruction:
Put the directories back as they were for the server
## Code After:
from altitude import run
run.run("27279", "/home/user/altitude-files/servers/command.txt", "/home/user/altitude-files/servers/log.txt",
"/home/user/altitude-files/servers/log_old.txt", "/home/user/altitude-files/servers/logs_archive.txt")
|
// ... existing code ...
from altitude import run
run.run("27279", "/home/user/altitude-files/servers/command.txt", "/home/user/altitude-files/servers/log.txt",
"/home/user/altitude-files/servers/log_old.txt", "/home/user/altitude-files/servers/logs_archive.txt")
// ... rest of the code ...
|
66ae5304c81d74e8f30e9274c90d0f83766744d7
|
datamodel/nodes/printer.py
|
datamodel/nodes/printer.py
|
import sys
from datamodel.base import node
class ConsolePrinter(node.Node):
"""
This node prints on stdout its context and then returns it as output.
"""
def input(self, context):
self._context = context
def output(self):
sys.stdout.write(self._context)
return self._context
def reset(self):
del self._context
class LogPrinter(node.Node):
"""
This node prints its context on a statically defined logger and then
returns it as output
"""
def __init__(self, logger, loglevel, stringify=False, name=None):
"""
:param logger: any logging.Logger subtype
:param loglevel: the log level
:param stringify: try to cast to str the context before passing it to
the logger
:param name: name of this node
"""
node.Node.__init__(self, name=name)
self._logger = logger
self._loglevel = loglevel
self._stringify = stringify
def input(self, context):
self._context = context
def output(self):
str_context = self._context
if self._stringify:
try:
str_context = str(self._context)
except:
pass # oops...
self._logger.log(self._loglevel, str_context)
return self._context
def reset(self):
del self._context
|
import sys
import os
from datamodel.base import node
class ConsolePrinter(node.Node):
"""
This node prints on stdout its context and then returns it as output.
"""
def input(self, context):
self._context = context
def output(self):
try:
sys.stdout.write(str(self._context))
sys.stdout.write(os.linesep)
except:
pass
return self._context
def reset(self):
del self._context
class LogPrinter(node.Node):
"""
This node prints its context on a statically defined logger and then
returns it as output
"""
def __init__(self, logger, loglevel, stringify=False, name=None):
"""
:param logger: any logging.Logger subtype
:param loglevel: the log level
:param stringify: try to cast to str the context before passing it to
the logger
:param name: name of this node
"""
node.Node.__init__(self, name=name)
self._logger = logger
self._loglevel = loglevel
self._stringify = stringify
def input(self, context):
self._context = context
def output(self):
str_context = self._context
if self._stringify:
try:
str_context = str(self._context)
except:
pass # oops...
self._logger.log(self._loglevel, str_context)
return self._context
def reset(self):
del self._context
|
Make sure we only write chars to stdout
|
Make sure we only write chars to stdout
|
Python
|
apache-2.0
|
csparpa/robograph,csparpa/robograph
|
python
|
## Code Before:
import sys
from datamodel.base import node
class ConsolePrinter(node.Node):
"""
This node prints on stdout its context and then returns it as output.
"""
def input(self, context):
self._context = context
def output(self):
sys.stdout.write(self._context)
return self._context
def reset(self):
del self._context
class LogPrinter(node.Node):
"""
This node prints its context on a statically defined logger and then
returns it as output
"""
def __init__(self, logger, loglevel, stringify=False, name=None):
"""
:param logger: any logging.Logger subtype
:param loglevel: the log level
:param stringify: try to cast to str the context before passing it to
the logger
:param name: name of this node
"""
node.Node.__init__(self, name=name)
self._logger = logger
self._loglevel = loglevel
self._stringify = stringify
def input(self, context):
self._context = context
def output(self):
str_context = self._context
if self._stringify:
try:
str_context = str(self._context)
except:
pass # oops...
self._logger.log(self._loglevel, str_context)
return self._context
def reset(self):
del self._context
## Instruction:
Make sure we only write chars to stdout
## Code After:
import sys
import os
from datamodel.base import node
class ConsolePrinter(node.Node):
"""
This node prints on stdout its context and then returns it as output.
"""
def input(self, context):
self._context = context
def output(self):
try:
sys.stdout.write(str(self._context))
sys.stdout.write(os.linesep)
except:
pass
return self._context
def reset(self):
del self._context
class LogPrinter(node.Node):
"""
This node prints its context on a statically defined logger and then
returns it as output
"""
def __init__(self, logger, loglevel, stringify=False, name=None):
"""
:param logger: any logging.Logger subtype
:param loglevel: the log level
:param stringify: try to cast to str the context before passing it to
the logger
:param name: name of this node
"""
node.Node.__init__(self, name=name)
self._logger = logger
self._loglevel = loglevel
self._stringify = stringify
def input(self, context):
self._context = context
def output(self):
str_context = self._context
if self._stringify:
try:
str_context = str(self._context)
except:
pass # oops...
self._logger.log(self._loglevel, str_context)
return self._context
def reset(self):
del self._context
|
...
import sys
import os
from datamodel.base import node
...
self._context = context
def output(self):
try:
sys.stdout.write(str(self._context))
sys.stdout.write(os.linesep)
except:
pass
return self._context
def reset(self):
...
|
666f82be2ed920d34f9d7612aa090ea5b5069eda
|
loritta-discord/src/main/java/com/mrpowergamerbr/loritta/tables/Profiles.kt
|
loritta-discord/src/main/java/com/mrpowergamerbr/loritta/tables/Profiles.kt
|
package com.mrpowergamerbr.loritta.tables
import org.jetbrains.exposed.sql.ReferenceOption
object Profiles : SnowflakeTable() {
val xp = long("xp").index()
val lastMessageSentAt = long("last_message_sent_at")
val lastMessageSentHash = integer("last_message_sent_hash")
val lastCommandSentAt = long("last_command_sent_at").nullable()
val money = long("money").index()
var isAfk = bool("isAfk")
var afkReason = text("afkReason").nullable()
var settings = reference("settings", UserSettings, onDelete = ReferenceOption.CASCADE).index()
var marriage = reference("marriage", Marriages).nullable().index()
}
|
package com.mrpowergamerbr.loritta.tables
import org.jetbrains.exposed.sql.ReferenceOption
object Profiles : SnowflakeTable() {
val xp = long("xp").index()
val lastMessageSentAt = long("last_message_sent_at")
val lastMessageSentHash = integer("last_message_sent_hash")
val lastCommandSentAt = long("last_command_sent_at").nullable()
val money = long("money").index()
var isAfk = bool("isAfk")
var afkReason = text("afkReason").nullable()
var settings = reference("settings", UserSettings, onDelete = ReferenceOption.CASCADE).index()
var marriage = reference("marriage", Marriages, onDelete = ReferenceOption.CASCADE).nullable().index()
}
|
Add cascade delete for marriages
|
Add cascade delete for marriages
|
Kotlin
|
agpl-3.0
|
LorittaBot/Loritta,LorittaBot/Loritta,LorittaBot/Loritta,LorittaBot/Loritta
|
kotlin
|
## Code Before:
package com.mrpowergamerbr.loritta.tables
import org.jetbrains.exposed.sql.ReferenceOption
object Profiles : SnowflakeTable() {
val xp = long("xp").index()
val lastMessageSentAt = long("last_message_sent_at")
val lastMessageSentHash = integer("last_message_sent_hash")
val lastCommandSentAt = long("last_command_sent_at").nullable()
val money = long("money").index()
var isAfk = bool("isAfk")
var afkReason = text("afkReason").nullable()
var settings = reference("settings", UserSettings, onDelete = ReferenceOption.CASCADE).index()
var marriage = reference("marriage", Marriages).nullable().index()
}
## Instruction:
Add cascade delete for marriages
## Code After:
package com.mrpowergamerbr.loritta.tables
import org.jetbrains.exposed.sql.ReferenceOption
object Profiles : SnowflakeTable() {
val xp = long("xp").index()
val lastMessageSentAt = long("last_message_sent_at")
val lastMessageSentHash = integer("last_message_sent_hash")
val lastCommandSentAt = long("last_command_sent_at").nullable()
val money = long("money").index()
var isAfk = bool("isAfk")
var afkReason = text("afkReason").nullable()
var settings = reference("settings", UserSettings, onDelete = ReferenceOption.CASCADE).index()
var marriage = reference("marriage", Marriages, onDelete = ReferenceOption.CASCADE).nullable().index()
}
|
...
var isAfk = bool("isAfk")
var afkReason = text("afkReason").nullable()
var settings = reference("settings", UserSettings, onDelete = ReferenceOption.CASCADE).index()
var marriage = reference("marriage", Marriages, onDelete = ReferenceOption.CASCADE).nullable().index()
}
...
|
7dc44d8250ccf91b13833af1605f46b131d715e5
|
tests/testprocs/org/voltdb_testprocs/fakeusecase/greetings/package-info.java
|
tests/testprocs/org/voltdb_testprocs/fakeusecase/greetings/package-info.java
|
/* This file is part of VoltDB.
* Copyright (C) 2008-2017 VoltDB Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
* OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
* ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
/**
* A fake use case intended for use verifying that 'voltdb init --classes' works with non-trivial use cases.
* The theme is querying translations for 'Hello' in different languages.
*/
package fakeusecase.greetings;
|
/* This file is part of VoltDB.
* Copyright (C) 2008-2017 VoltDB Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
* OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
* ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
/**
* A fake use case intended for use verifying that 'voltdb init --classes' works with non-trivial use cases.
* The theme is querying translations for 'Hello' in different languages.
*/
package org.voltdb_testprocs.fakeusecase.greetings;
|
Fix package name to keep Eclipse happy.
|
Fix package name to keep Eclipse happy.
|
Java
|
agpl-3.0
|
deerwalk/voltdb,deerwalk/voltdb,deerwalk/voltdb,deerwalk/voltdb,VoltDB/voltdb,simonzhangsm/voltdb,simonzhangsm/voltdb,VoltDB/voltdb,VoltDB/voltdb,simonzhangsm/voltdb,simonzhangsm/voltdb,simonzhangsm/voltdb,deerwalk/voltdb,deerwalk/voltdb,deerwalk/voltdb,VoltDB/voltdb,deerwalk/voltdb,VoltDB/voltdb,simonzhangsm/voltdb,simonzhangsm/voltdb,simonzhangsm/voltdb,VoltDB/voltdb,VoltDB/voltdb
|
java
|
## Code Before:
/* This file is part of VoltDB.
* Copyright (C) 2008-2017 VoltDB Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
* OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
* ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
/**
* A fake use case intended for use verifying that 'voltdb init --classes' works with non-trivial use cases.
* The theme is querying translations for 'Hello' in different languages.
*/
package fakeusecase.greetings;
## Instruction:
Fix package name to keep Eclipse happy.
## Code After:
/* This file is part of VoltDB.
* Copyright (C) 2008-2017 VoltDB Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
* OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
* ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
/**
* A fake use case intended for use verifying that 'voltdb init --classes' works with non-trivial use cases.
* The theme is querying translations for 'Hello' in different languages.
*/
package org.voltdb_testprocs.fakeusecase.greetings;
|
...
* A fake use case intended for use verifying that 'voltdb init --classes' works with non-trivial use cases.
* The theme is querying translations for 'Hello' in different languages.
*/
package org.voltdb_testprocs.fakeusecase.greetings;
...
|
8ca16832b54c887e6e3a84d7018181bf7e55fba0
|
comrade/core/context_processors.py
|
comrade/core/context_processors.py
|
from django.conf import settings
from django.contrib.sites.models import Site
from settings import DeploymentType
def default(request):
context = {}
context['DEPLOYMENT'] = settings.DEPLOYMENT
context['site'] = Site.objects.get_current()
if settings.DEPLOYMENT != DeploymentType.PRODUCTION:
context['GIT_COMMIT'] = settings.GIT_COMMIT
return context
|
from django.conf import settings
from django.contrib.sites.models import Site
from settings import DeploymentType
def default(request):
context = {}
context['DEPLOYMENT'] = settings.DEPLOYMENT
context['site'] = Site.objects.get_current()
if settings.DEPLOYMENT != DeploymentType.PRODUCTION:
context['GIT_COMMIT'] = settings.GIT_COMMIT
return context
def ssl_media(request):
if request.is_secure():
ssl_media_url = settings.MEDIA_URL.replace('http://','https://')
else:
ssl_media_url = settings.MEDIA_URL
return {'MEDIA_URL': ssl_media_url}
|
Add SSL media context processor.
|
Add SSL media context processor.
|
Python
|
mit
|
bueda/django-comrade
|
python
|
## Code Before:
from django.conf import settings
from django.contrib.sites.models import Site
from settings import DeploymentType
def default(request):
context = {}
context['DEPLOYMENT'] = settings.DEPLOYMENT
context['site'] = Site.objects.get_current()
if settings.DEPLOYMENT != DeploymentType.PRODUCTION:
context['GIT_COMMIT'] = settings.GIT_COMMIT
return context
## Instruction:
Add SSL media context processor.
## Code After:
from django.conf import settings
from django.contrib.sites.models import Site
from settings import DeploymentType
def default(request):
context = {}
context['DEPLOYMENT'] = settings.DEPLOYMENT
context['site'] = Site.objects.get_current()
if settings.DEPLOYMENT != DeploymentType.PRODUCTION:
context['GIT_COMMIT'] = settings.GIT_COMMIT
return context
def ssl_media(request):
if request.is_secure():
ssl_media_url = settings.MEDIA_URL.replace('http://','https://')
else:
ssl_media_url = settings.MEDIA_URL
return {'MEDIA_URL': ssl_media_url}
|
// ... existing code ...
if settings.DEPLOYMENT != DeploymentType.PRODUCTION:
context['GIT_COMMIT'] = settings.GIT_COMMIT
return context
def ssl_media(request):
if request.is_secure():
ssl_media_url = settings.MEDIA_URL.replace('http://','https://')
else:
ssl_media_url = settings.MEDIA_URL
return {'MEDIA_URL': ssl_media_url}
// ... rest of the code ...
|
05375b10cfd6e060242c9786fb7887dcd3850ebc
|
Wangscape/noise/module/codecs/NoiseQualityCodec.h
|
Wangscape/noise/module/codecs/NoiseQualityCodec.h
|
namespace spotify
{
namespace json
{
template<>
struct default_codec_t<noise::NoiseQuality>
{
using NoiseQuality = noise::NoiseQuality;
static codec::enumeration_t<NoiseQuality, codec::string_t> codec()
{
auto codec = codec::enumeration<NoiseQuality, std::string>({
{NoiseQuality::QUALITY_FAST, "Fast"},
{NoiseQuality::QUALITY_STD, "Standard"},
{NoiseQuality::QUALITY_BEST, "Best"}
});
return codec;
}
};
}
}
|
namespace spotify
{
namespace json
{
template<>
struct default_codec_t<noise::NoiseQuality>
{
using NoiseQuality = noise::NoiseQuality;
static codec::one_of_t<
codec::enumeration_t<NoiseQuality, codec::number_t<int>>,
codec::enumeration_t<NoiseQuality, codec::string_t>> codec()
{
auto codec_str = codec::enumeration<NoiseQuality, std::string>({
{NoiseQuality::QUALITY_FAST, "Fast"},
{NoiseQuality::QUALITY_STD, "Standard"},
{NoiseQuality::QUALITY_BEST, "Best"}
});
auto codec_int = codec::enumeration<NoiseQuality, int>({
{NoiseQuality::QUALITY_FAST, 0},
{NoiseQuality::QUALITY_STD, 1},
{NoiseQuality::QUALITY_BEST, 2}
});
return codec::one_of(codec_int, codec_str);
}
};
}
}
|
Allow specification of NoiseQuality with an int
|
Allow specification of NoiseQuality with an int
|
C
|
mit
|
Wangscape/Wangscape,Wangscape/Wangscape,Wangscape/Wangscape,serin-delaunay/Wangscape,serin-delaunay/Wangscape
|
c
|
## Code Before:
namespace spotify
{
namespace json
{
template<>
struct default_codec_t<noise::NoiseQuality>
{
using NoiseQuality = noise::NoiseQuality;
static codec::enumeration_t<NoiseQuality, codec::string_t> codec()
{
auto codec = codec::enumeration<NoiseQuality, std::string>({
{NoiseQuality::QUALITY_FAST, "Fast"},
{NoiseQuality::QUALITY_STD, "Standard"},
{NoiseQuality::QUALITY_BEST, "Best"}
});
return codec;
}
};
}
}
## Instruction:
Allow specification of NoiseQuality with an int
## Code After:
namespace spotify
{
namespace json
{
template<>
struct default_codec_t<noise::NoiseQuality>
{
using NoiseQuality = noise::NoiseQuality;
static codec::one_of_t<
codec::enumeration_t<NoiseQuality, codec::number_t<int>>,
codec::enumeration_t<NoiseQuality, codec::string_t>> codec()
{
auto codec_str = codec::enumeration<NoiseQuality, std::string>({
{NoiseQuality::QUALITY_FAST, "Fast"},
{NoiseQuality::QUALITY_STD, "Standard"},
{NoiseQuality::QUALITY_BEST, "Best"}
});
auto codec_int = codec::enumeration<NoiseQuality, int>({
{NoiseQuality::QUALITY_FAST, 0},
{NoiseQuality::QUALITY_STD, 1},
{NoiseQuality::QUALITY_BEST, 2}
});
return codec::one_of(codec_int, codec_str);
}
};
}
}
|
...
struct default_codec_t<noise::NoiseQuality>
{
using NoiseQuality = noise::NoiseQuality;
static codec::one_of_t<
codec::enumeration_t<NoiseQuality, codec::number_t<int>>,
codec::enumeration_t<NoiseQuality, codec::string_t>> codec()
{
auto codec_str = codec::enumeration<NoiseQuality, std::string>({
{NoiseQuality::QUALITY_FAST, "Fast"},
{NoiseQuality::QUALITY_STD, "Standard"},
{NoiseQuality::QUALITY_BEST, "Best"}
});
auto codec_int = codec::enumeration<NoiseQuality, int>({
{NoiseQuality::QUALITY_FAST, 0},
{NoiseQuality::QUALITY_STD, 1},
{NoiseQuality::QUALITY_BEST, 2}
});
return codec::one_of(codec_int, codec_str);
}
};
...
|
d74c82d31071d80c5433fce0ebc46a1145b00d7e
|
bin/burgers.py
|
bin/burgers.py
|
from wenohj.solver import Solver
import numpy as np
import matplotlib.pyplot as plt
def get_alpha(x, t, u, u_x_plus, u_x_minus):
max = np.zeros_like(x)
for i in range(len(x)):
if np.abs(u_x_plus[i] + 1.0) > np.abs(u_x_minus[i] + 1.0):
max[i] = np.abs(u_x_plus[i] + 1.0)
else:
max[i] = np.abs(u_x_minus[i] + 1.0)
return max
def flux(x, t, u, u_x):
return (u_x + 1)**2 / 2.0
lb = -1.0
rb = 1.0
ncells = 320
T = 3.5 / np.pi**2
s = Solver(lb, rb, ncells, flux, get_alpha, 'periodic', cfl=0.1)
x = s.get_x()
u0 = -np.cos(np.pi * x)
solution = s.solve(u0, T)
plt.plot(x, solution)
plt.show()
|
from wenohj.solver import Solver
import numpy as np
import matplotlib.pyplot as plt
def get_alpha(x, t, u, u_x_plus, u_x_minus):
f1 = np.abs(u_x_plus + 1.0)
f2 = np.abs(u_x_minus + 1.0)
return np.maximum(f1, f2)
def flux(x, t, u, u_x):
return (u_x + 1)**2 / 2.0
lb = -1.0
rb = 1.0
ncells = 320
T = 3.5 / np.pi**2
s = Solver(lb, rb, ncells, flux, get_alpha, 'periodic', cfl=0.1)
x = s.get_x()
u0 = -np.cos(np.pi * x)
solution = s.solve(u0, T)
plt.plot(x, solution)
plt.show()
|
Refactor get_alpha() to make it faster
|
Refactor get_alpha() to make it faster
Computations of alpha using for loop are very slow. I switch to usage
of element-wise numpy functions to make get_alpha() function faster.
|
Python
|
bsd-3-clause
|
kabanovdmitry/weno-hamilton-jacobi,dmitry-kabanov/weno-hamilton-jacobi
|
python
|
## Code Before:
from wenohj.solver import Solver
import numpy as np
import matplotlib.pyplot as plt
def get_alpha(x, t, u, u_x_plus, u_x_minus):
max = np.zeros_like(x)
for i in range(len(x)):
if np.abs(u_x_plus[i] + 1.0) > np.abs(u_x_minus[i] + 1.0):
max[i] = np.abs(u_x_plus[i] + 1.0)
else:
max[i] = np.abs(u_x_minus[i] + 1.0)
return max
def flux(x, t, u, u_x):
return (u_x + 1)**2 / 2.0
lb = -1.0
rb = 1.0
ncells = 320
T = 3.5 / np.pi**2
s = Solver(lb, rb, ncells, flux, get_alpha, 'periodic', cfl=0.1)
x = s.get_x()
u0 = -np.cos(np.pi * x)
solution = s.solve(u0, T)
plt.plot(x, solution)
plt.show()
## Instruction:
Refactor get_alpha() to make it faster
Computations of alpha using for loop are very slow. I switch to usage
of element-wise numpy functions to make get_alpha() function faster.
## Code After:
from wenohj.solver import Solver
import numpy as np
import matplotlib.pyplot as plt
def get_alpha(x, t, u, u_x_plus, u_x_minus):
f1 = np.abs(u_x_plus + 1.0)
f2 = np.abs(u_x_minus + 1.0)
return np.maximum(f1, f2)
def flux(x, t, u, u_x):
return (u_x + 1)**2 / 2.0
lb = -1.0
rb = 1.0
ncells = 320
T = 3.5 / np.pi**2
s = Solver(lb, rb, ncells, flux, get_alpha, 'periodic', cfl=0.1)
x = s.get_x()
u0 = -np.cos(np.pi * x)
solution = s.solve(u0, T)
plt.plot(x, solution)
plt.show()
|
// ... existing code ...
def get_alpha(x, t, u, u_x_plus, u_x_minus):
f1 = np.abs(u_x_plus + 1.0)
f2 = np.abs(u_x_minus + 1.0)
return np.maximum(f1, f2)
def flux(x, t, u, u_x):
// ... rest of the code ...
|
c21eaccbee53f2b915fc35b85bf665e84b81dc8c
|
app/celery/__init__.py
|
app/celery/__init__.py
|
from celery import Celery
class NewAcropolisCelery(Celery):
def init_app(self, app):
super(NewAcropolisCelery, self).__init__(
app.import_name,
broker=app.config['CELERY_BROKER_URL'],
)
app.logger.info('Setting up celery: %s', app.config['CELERY_BROKER_URL'])
self.conf.update(app.config)
class ContextTask(self.Task):
def __call__(self, *args, **kwargs): # noqa
with app.app_context():
return self.run(*args, **kwargs)
self.Task = ContextTask
|
from celery import Celery
class NewAcropolisCelery(Celery):
def init_app(self, app):
if not app.config['CELERY_BROKER_URL']:
app.logger.info('Celery broker URL not set')
return
super(NewAcropolisCelery, self).__init__(
app.import_name,
broker=app.config['CELERY_BROKER_URL'],
)
app.logger.info('Setting up celery: %s', app.config['CELERY_BROKER_URL'])
self.conf.update(app.config)
class ContextTask(self.Task):
def __call__(self, *args, **kwargs): # noqa
with app.app_context():
return self.run(*args, **kwargs)
self.Task = ContextTask
|
Add logging for missing CELERY_BROKER_URL
|
Add logging for missing CELERY_BROKER_URL
|
Python
|
mit
|
NewAcropolis/api,NewAcropolis/api,NewAcropolis/api
|
python
|
## Code Before:
from celery import Celery
class NewAcropolisCelery(Celery):
def init_app(self, app):
super(NewAcropolisCelery, self).__init__(
app.import_name,
broker=app.config['CELERY_BROKER_URL'],
)
app.logger.info('Setting up celery: %s', app.config['CELERY_BROKER_URL'])
self.conf.update(app.config)
class ContextTask(self.Task):
def __call__(self, *args, **kwargs): # noqa
with app.app_context():
return self.run(*args, **kwargs)
self.Task = ContextTask
## Instruction:
Add logging for missing CELERY_BROKER_URL
## Code After:
from celery import Celery
class NewAcropolisCelery(Celery):
def init_app(self, app):
if not app.config['CELERY_BROKER_URL']:
app.logger.info('Celery broker URL not set')
return
super(NewAcropolisCelery, self).__init__(
app.import_name,
broker=app.config['CELERY_BROKER_URL'],
)
app.logger.info('Setting up celery: %s', app.config['CELERY_BROKER_URL'])
self.conf.update(app.config)
class ContextTask(self.Task):
def __call__(self, *args, **kwargs): # noqa
with app.app_context():
return self.run(*args, **kwargs)
self.Task = ContextTask
|
...
class NewAcropolisCelery(Celery):
def init_app(self, app):
if not app.config['CELERY_BROKER_URL']:
app.logger.info('Celery broker URL not set')
return
super(NewAcropolisCelery, self).__init__(
app.import_name,
broker=app.config['CELERY_BROKER_URL'],
...
|
c49c6113ab8ca9293cad0fc766c6b4bd90e22a75
|
test/small1/strloop.c
|
test/small1/strloop.c
|
void BuildWord(char * pchWord) {
int i;
char * pch = pchWord;
/* original code:
* while ((i = *pch++) != '\0') { }
*/
do {
i = *pch;
pch++;
} while (i != '\0');
printf("%s\n",pchWord);
}
int main() {
char *test = "foo";
BuildWord(test);
SUCCESS;
}
|
void BuildWord(char * pchWord) {
int i;
char * pch = pchWord;
/* original code:
* while ((i = *pch++) != '\0') { }
*/
do {
i = *pch;
// printf("i = '%c'\n",i);
pch++;
} while (i != '\0');
printf("%s\n",pchWord);
}
int main() {
char *test = "foo";
test++;
test--;
BuildWord(test);
SUCCESS;
}
|
Switch the world over to the new 'paper' solver. Local regression tests indicate that it shouldn't be that bad: new models and wrappers have been added to support it. INFERBOX=infer is now the new solver, INFERBOX=old gives the old behavior (will be removed later).
|
Switch the world over to the new 'paper' solver. Local regression tests
indicate that it shouldn't be that bad: new models and wrappers have been
added to support it. INFERBOX=infer is now the new solver, INFERBOX=old
gives the old behavior (will be removed later).
|
C
|
bsd-3-clause
|
samuelhavron/obliv-c,samuelhavron/obliv-c,samuelhavron/obliv-c,samuelhavron/obliv-c
|
c
|
## Code Before:
void BuildWord(char * pchWord) {
int i;
char * pch = pchWord;
/* original code:
* while ((i = *pch++) != '\0') { }
*/
do {
i = *pch;
pch++;
} while (i != '\0');
printf("%s\n",pchWord);
}
int main() {
char *test = "foo";
BuildWord(test);
SUCCESS;
}
## Instruction:
Switch the world over to the new 'paper' solver. Local regression tests
indicate that it shouldn't be that bad: new models and wrappers have been
added to support it. INFERBOX=infer is now the new solver, INFERBOX=old
gives the old behavior (will be removed later).
## Code After:
void BuildWord(char * pchWord) {
int i;
char * pch = pchWord;
/* original code:
* while ((i = *pch++) != '\0') { }
*/
do {
i = *pch;
// printf("i = '%c'\n",i);
pch++;
} while (i != '\0');
printf("%s\n",pchWord);
}
int main() {
char *test = "foo";
test++;
test--;
BuildWord(test);
SUCCESS;
}
|
// ... existing code ...
do {
i = *pch;
// printf("i = '%c'\n",i);
pch++;
} while (i != '\0');
// ... modified code ...
int main() {
char *test = "foo";
test++;
test--;
BuildWord(test);
SUCCESS;
// ... rest of the code ...
|
861f4218f4f119716f4bef74a2323930272cfb8f
|
atlasdb-impl-shared/src/main/java/com/palantir/atlasdb/keyvalue/partition/ConsistentRingRangeRequest.java
|
atlasdb-impl-shared/src/main/java/com/palantir/atlasdb/keyvalue/partition/ConsistentRingRangeRequest.java
|
package com.palantir.atlasdb.keyvalue.partition;
import com.palantir.atlasdb.keyvalue.api.RangeRequest;
import com.palantir.common.annotation.Immutable;
@Immutable public class ConsistentRingRangeRequest {
private final RangeRequest rangeRequest;
public RangeRequest get() {
return rangeRequest;
}
private ConsistentRingRangeRequest(RangeRequest rangeRequest) {
this.rangeRequest = rangeRequest;
}
public static ConsistentRingRangeRequest of(RangeRequest rangeRequest) {
return new ConsistentRingRangeRequest(rangeRequest);
}
public String toString() {
return "CRRR=[" + rangeRequest + "]";
}
}
|
package com.palantir.atlasdb.keyvalue.partition;
import com.palantir.atlasdb.keyvalue.api.RangeRequest;
import com.palantir.common.annotation.Immutable;
@Immutable public class ConsistentRingRangeRequest {
private final RangeRequest rangeRequest;
public RangeRequest get() {
return rangeRequest;
}
private ConsistentRingRangeRequest(RangeRequest rangeRequest) {
this.rangeRequest = rangeRequest;
}
public static ConsistentRingRangeRequest of(RangeRequest rangeRequest) {
return new ConsistentRingRangeRequest(rangeRequest);
}
public String toString() {
return "CRRR=[" + rangeRequest + "]";
}
@Override
public boolean equals(Object other) {
if (other instanceof ConsistentRingRangeRequest == false) {
return false;
}
ConsistentRingRangeRequest otherCrrr = (ConsistentRingRangeRequest) other;
return get().equals(otherCrrr.get());
}
}
|
Add equals impl to CRRR
|
Add equals impl to CRRR
|
Java
|
apache-2.0
|
j-baker/atlasdb,j-baker/atlasdb,sh4nth/atlasdb-1,palantir/atlasdb,andy2palantir/atlasdb,palantir/atlasdb,sh4nth/atlasdb-1,EvilMcJerkface/atlasdb,palantir/atlasdb,EvilMcJerkface/atlasdb,andy2palantir/atlasdb,j-baker/atlasdb,EvilMcJerkface/atlasdb
|
java
|
## Code Before:
package com.palantir.atlasdb.keyvalue.partition;
import com.palantir.atlasdb.keyvalue.api.RangeRequest;
import com.palantir.common.annotation.Immutable;
@Immutable public class ConsistentRingRangeRequest {
private final RangeRequest rangeRequest;
public RangeRequest get() {
return rangeRequest;
}
private ConsistentRingRangeRequest(RangeRequest rangeRequest) {
this.rangeRequest = rangeRequest;
}
public static ConsistentRingRangeRequest of(RangeRequest rangeRequest) {
return new ConsistentRingRangeRequest(rangeRequest);
}
public String toString() {
return "CRRR=[" + rangeRequest + "]";
}
}
## Instruction:
Add equals impl to CRRR
## Code After:
package com.palantir.atlasdb.keyvalue.partition;
import com.palantir.atlasdb.keyvalue.api.RangeRequest;
import com.palantir.common.annotation.Immutable;
@Immutable public class ConsistentRingRangeRequest {
private final RangeRequest rangeRequest;
public RangeRequest get() {
return rangeRequest;
}
private ConsistentRingRangeRequest(RangeRequest rangeRequest) {
this.rangeRequest = rangeRequest;
}
public static ConsistentRingRangeRequest of(RangeRequest rangeRequest) {
return new ConsistentRingRangeRequest(rangeRequest);
}
public String toString() {
return "CRRR=[" + rangeRequest + "]";
}
@Override
public boolean equals(Object other) {
if (other instanceof ConsistentRingRangeRequest == false) {
return false;
}
ConsistentRingRangeRequest otherCrrr = (ConsistentRingRangeRequest) other;
return get().equals(otherCrrr.get());
}
}
|
// ... existing code ...
public String toString() {
return "CRRR=[" + rangeRequest + "]";
}
@Override
public boolean equals(Object other) {
if (other instanceof ConsistentRingRangeRequest == false) {
return false;
}
ConsistentRingRangeRequest otherCrrr = (ConsistentRingRangeRequest) other;
return get().equals(otherCrrr.get());
}
}
// ... rest of the code ...
|
22d68da72421c17ee15e0c7be71740b10a93ee9e
|
testing/platform_test.h
|
testing/platform_test.h
|
// Copyright (c) 2006-2008 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef TESTING_PLATFORM_TEST_H_
#define TESTING_PLATFORM_TEST_H_
#include <gtest/gtest.h>
#if defined(GTEST_OS_MAC)
#ifdef __OBJC__
@class NSAutoreleasePool;
#else
class NSAutoreleasePool;
#endif
// The purpose of this class us to provide a hook for platform-specific
// operations across unit tests. For example, on the Mac, it creates and
// releases an outer NSAutoreleasePool for each test case. For now, it's only
// implemented on the Mac. To enable this for another platform, just adjust
// the #ifdefs and add a platform_test_<platform>.cc implementation file.
class PlatformTest : public testing::Test {
protected:
PlatformTest();
virtual ~PlatformTest();
private:
NSAutoreleasePool* pool_;
};
#else
typedef testing::Test PlatformTest;
#endif // GTEST_OS_MAC
#endif // TESTING_PLATFORM_TEST_H_
|
// Copyright (c) 2006-2008 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef TESTING_PLATFORM_TEST_H_
#define TESTING_PLATFORM_TEST_H_
#include <gtest/gtest.h>
#if defined(GTEST_OS_MAC)
#ifdef __OBJC__
@class NSAutoreleasePool;
#else
class NSAutoreleasePool;
#endif
// The purpose of this class us to provide a hook for platform-specific
// operations across unit tests. For example, on the Mac, it creates and
// releases an outer NSAutoreleasePool for each test case. For now, it's only
// implemented on the Mac. To enable this for another platform, just adjust
// the #ifdefs and add a platform_test_<platform>.cc implementation file.
class PlatformTest : public testing::Test {
public:
virtual ~PlatformTest();
protected:
PlatformTest();
private:
NSAutoreleasePool* pool_;
};
#else
typedef testing::Test PlatformTest;
#endif // GTEST_OS_MAC
#endif // TESTING_PLATFORM_TEST_H_
|
Change visibility of the destructor to public.
|
Change visibility of the destructor to public.
PlatformTest's destructor was set as protected, though the parent class
testing::Test declares it public.
BUG=none
Review URL: https://chromiumcodereview.appspot.com/11038058
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@161352 0039d316-1c4b-4281-b951-d872f2087c98
|
C
|
bsd-3-clause
|
pozdnyakov/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,pozdnyakov/chromium-crosswalk,dushu1203/chromium.src,krieger-od/nwjs_chromium.src,jaruba/chromium.src,mohamed--abdel-maksoud/chromium.src,krieger-od/nwjs_chromium.src,M4sse/chromium.src,PeterWangIntel/chromium-crosswalk,patrickm/chromium.src,axinging/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,dushu1203/chromium.src,bright-sparks/chromium-spacewalk,ondra-novak/chromium.src,Pluto-tv/chromium-crosswalk,Jonekee/chromium.src,anirudhSK/chromium,crosswalk-project/chromium-crosswalk-efl,patrickm/chromium.src,ChromiumWebApps/chromium,chuan9/chromium-crosswalk,pozdnyakov/chromium-crosswalk,axinging/chromium-crosswalk,dushu1203/chromium.src,dushu1203/chromium.src,timopulkkinen/BubbleFish,crosswalk-project/chromium-crosswalk-efl,hgl888/chromium-crosswalk,timopulkkinen/BubbleFish,fujunwei/chromium-crosswalk,hgl888/chromium-crosswalk-efl,dushu1203/chromium.src,hgl888/chromium-crosswalk,bright-sparks/chromium-spacewalk,PeterWangIntel/chromium-crosswalk,mogoweb/chromium-crosswalk,hujiajie/pa-chromium,dednal/chromium.src,mogoweb/chromium-crosswalk,nacl-webkit/chrome_deps,hgl888/chromium-crosswalk-efl,Fireblend/chromium-crosswalk,dushu1203/chromium.src,axinging/chromium-crosswalk,ondra-novak/chromium.src,anirudhSK/chromium,zcbenz/cefode-chromium,Fireblend/chromium-crosswalk,nacl-webkit/chrome_deps,hujiajie/pa-chromium,junmin-zhu/chromium-rivertrail,ChromiumWebApps/chromium,ChromiumWebApps/chromium,krieger-od/nwjs_chromium.src,junmin-zhu/chromium-rivertrail,mogoweb/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,ChromiumWebApps/chromium,ondra-novak/chromium.src,mogoweb/chromium-crosswalk,dushu1203/chromium.src,dednal/chromium.src,hgl888/chromium-crosswalk,Chilledheart/chromium,nacl-webkit/chrome_deps,timopulkkinen/BubbleFish,nacl-webkit/chrome_deps,anirudhSK/chromium,anirudhSK/chromium,krieger-od/nwjs_chromium.src,ltilve/chromium,Just-D/chromium-1,dednal/chromium.src,markYoungH/chromium.src,Just-D/chromium-1,jaruba/chromium.src,nacl-webkit/chrome_deps,junmin-zhu/chromium-rivertrail,hujiajie/pa-chromium,crosswalk-project/chromium-crosswalk-efl,Jonekee/chromium.src,nacl-webkit/chrome_deps,zcbenz/cefode-chromium,ltilve/chromium,markYoungH/chromium.src,junmin-zhu/chromium-rivertrail,pozdnyakov/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,ChromiumWebApps/chromium,pozdnyakov/chromium-crosswalk,anirudhSK/chromium,markYoungH/chromium.src,M4sse/chromium.src,fujunwei/chromium-crosswalk,pozdnyakov/chromium-crosswalk,zcbenz/cefode-chromium,littlstar/chromium.src,Jonekee/chromium.src,hgl888/chromium-crosswalk-efl,hgl888/chromium-crosswalk-efl,mogoweb/chromium-crosswalk,markYoungH/chromium.src,krieger-od/nwjs_chromium.src,timopulkkinen/BubbleFish,hgl888/chromium-crosswalk-efl,M4sse/chromium.src,mohamed--abdel-maksoud/chromium.src,Pluto-tv/chromium-crosswalk,Chilledheart/chromium,Jonekee/chromium.src,patrickm/chromium.src,littlstar/chromium.src,jaruba/chromium.src,crosswalk-project/chromium-crosswalk-efl,hujiajie/pa-chromium,TheTypoMaster/chromium-crosswalk,Fireblend/chromium-crosswalk,krieger-od/nwjs_chromium.src,patrickm/chromium.src,ChromiumWebApps/chromium,ltilve/chromium,timopulkkinen/BubbleFish,Chilledheart/chromium,hgl888/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,mogoweb/chromium-crosswalk,nacl-webkit/chrome_deps,dednal/chromium.src,dednal/chromium.src,patrickm/chromium.src,TheTypoMaster/chromium-crosswalk,anirudhSK/chromium,zcbenz/cefode-chromium,hgl888/chromium-crosswalk-efl,fujunwei/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,M4sse/chromium.src,hgl888/chromium-crosswalk-efl,anirudhSK/chromium,ChromiumWebApps/chromium,axinging/chromium-crosswalk,markYoungH/chromium.src,ondra-novak/chromium.src,dednal/chromium.src,littlstar/chromium.src,Jonekee/chromium.src,chuan9/chromium-crosswalk,Chilledheart/chromium,timopulkkinen/BubbleFish,ltilve/chromium,littlstar/chromium.src,M4sse/chromium.src,Fireblend/chromium-crosswalk,krieger-od/nwjs_chromium.src,krieger-od/nwjs_chromium.src,bright-sparks/chromium-spacewalk,bright-sparks/chromium-spacewalk,jaruba/chromium.src,junmin-zhu/chromium-rivertrail,ChromiumWebApps/chromium,mohamed--abdel-maksoud/chromium.src,hgl888/chromium-crosswalk,dednal/chromium.src,zcbenz/cefode-chromium,fujunwei/chromium-crosswalk,pozdnyakov/chromium-crosswalk,littlstar/chromium.src,crosswalk-project/chromium-crosswalk-efl,Fireblend/chromium-crosswalk,ltilve/chromium,Pluto-tv/chromium-crosswalk,jaruba/chromium.src,dushu1203/chromium.src,patrickm/chromium.src,Jonekee/chromium.src,ChromiumWebApps/chromium,fujunwei/chromium-crosswalk,timopulkkinen/BubbleFish,nacl-webkit/chrome_deps,hujiajie/pa-chromium,ltilve/chromium,axinging/chromium-crosswalk,chuan9/chromium-crosswalk,krieger-od/nwjs_chromium.src,TheTypoMaster/chromium-crosswalk,Just-D/chromium-1,zcbenz/cefode-chromium,mohamed--abdel-maksoud/chromium.src,ltilve/chromium,anirudhSK/chromium,markYoungH/chromium.src,M4sse/chromium.src,junmin-zhu/chromium-rivertrail,markYoungH/chromium.src,PeterWangIntel/chromium-crosswalk,Chilledheart/chromium,Fireblend/chromium-crosswalk,pozdnyakov/chromium-crosswalk,dushu1203/chromium.src,mohamed--abdel-maksoud/chromium.src,dushu1203/chromium.src,ChromiumWebApps/chromium,fujunwei/chromium-crosswalk,zcbenz/cefode-chromium,axinging/chromium-crosswalk,Pluto-tv/chromium-crosswalk,patrickm/chromium.src,hgl888/chromium-crosswalk,mogoweb/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,Jonekee/chromium.src,hgl888/chromium-crosswalk-efl,ondra-novak/chromium.src,chuan9/chromium-crosswalk,zcbenz/cefode-chromium,zcbenz/cefode-chromium,TheTypoMaster/chromium-crosswalk,Just-D/chromium-1,timopulkkinen/BubbleFish,junmin-zhu/chromium-rivertrail,Jonekee/chromium.src,timopulkkinen/BubbleFish,dednal/chromium.src,Fireblend/chromium-crosswalk,littlstar/chromium.src,patrickm/chromium.src,Just-D/chromium-1,junmin-zhu/chromium-rivertrail,anirudhSK/chromium,anirudhSK/chromium,markYoungH/chromium.src,fujunwei/chromium-crosswalk,M4sse/chromium.src,mohamed--abdel-maksoud/chromium.src,jaruba/chromium.src,M4sse/chromium.src,ChromiumWebApps/chromium,axinging/chromium-crosswalk,hgl888/chromium-crosswalk,Pluto-tv/chromium-crosswalk,markYoungH/chromium.src,timopulkkinen/BubbleFish,axinging/chromium-crosswalk,hujiajie/pa-chromium,zcbenz/cefode-chromium,Pluto-tv/chromium-crosswalk,krieger-od/nwjs_chromium.src,jaruba/chromium.src,dednal/chromium.src,M4sse/chromium.src,jaruba/chromium.src,crosswalk-project/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,littlstar/chromium.src,bright-sparks/chromium-spacewalk,ondra-novak/chromium.src,Chilledheart/chromium,jaruba/chromium.src,nacl-webkit/chrome_deps,crosswalk-project/chromium-crosswalk-efl,markYoungH/chromium.src,Pluto-tv/chromium-crosswalk,pozdnyakov/chromium-crosswalk,axinging/chromium-crosswalk,nacl-webkit/chrome_deps,pozdnyakov/chromium-crosswalk,Chilledheart/chromium,mogoweb/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,M4sse/chromium.src,zcbenz/cefode-chromium,axinging/chromium-crosswalk,hujiajie/pa-chromium,hujiajie/pa-chromium,jaruba/chromium.src,Just-D/chromium-1,TheTypoMaster/chromium-crosswalk,ltilve/chromium,Just-D/chromium-1,ltilve/chromium,ondra-novak/chromium.src,Pluto-tv/chromium-crosswalk,bright-sparks/chromium-spacewalk,hgl888/chromium-crosswalk,bright-sparks/chromium-spacewalk,Chilledheart/chromium,junmin-zhu/chromium-rivertrail,ondra-novak/chromium.src,PeterWangIntel/chromium-crosswalk,chuan9/chromium-crosswalk,ChromiumWebApps/chromium,anirudhSK/chromium,Fireblend/chromium-crosswalk,dushu1203/chromium.src,Jonekee/chromium.src,fujunwei/chromium-crosswalk,hgl888/chromium-crosswalk-efl,axinging/chromium-crosswalk,dednal/chromium.src,Just-D/chromium-1,hgl888/chromium-crosswalk,bright-sparks/chromium-spacewalk,hujiajie/pa-chromium,Chilledheart/chromium,Fireblend/chromium-crosswalk,bright-sparks/chromium-spacewalk,jaruba/chromium.src,chuan9/chromium-crosswalk,M4sse/chromium.src,littlstar/chromium.src,timopulkkinen/BubbleFish,dednal/chromium.src,hgl888/chromium-crosswalk-efl,mohamed--abdel-maksoud/chromium.src,mohamed--abdel-maksoud/chromium.src,chuan9/chromium-crosswalk,mogoweb/chromium-crosswalk,markYoungH/chromium.src,chuan9/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,ondra-novak/chromium.src,Pluto-tv/chromium-crosswalk,anirudhSK/chromium,pozdnyakov/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,nacl-webkit/chrome_deps,patrickm/chromium.src,TheTypoMaster/chromium-crosswalk,hujiajie/pa-chromium,fujunwei/chromium-crosswalk,junmin-zhu/chromium-rivertrail,Jonekee/chromium.src,hujiajie/pa-chromium,Jonekee/chromium.src,chuan9/chromium-crosswalk,mogoweb/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,Just-D/chromium-1,mohamed--abdel-maksoud/chromium.src,junmin-zhu/chromium-rivertrail,TheTypoMaster/chromium-crosswalk
|
c
|
## Code Before:
// Copyright (c) 2006-2008 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef TESTING_PLATFORM_TEST_H_
#define TESTING_PLATFORM_TEST_H_
#include <gtest/gtest.h>
#if defined(GTEST_OS_MAC)
#ifdef __OBJC__
@class NSAutoreleasePool;
#else
class NSAutoreleasePool;
#endif
// The purpose of this class us to provide a hook for platform-specific
// operations across unit tests. For example, on the Mac, it creates and
// releases an outer NSAutoreleasePool for each test case. For now, it's only
// implemented on the Mac. To enable this for another platform, just adjust
// the #ifdefs and add a platform_test_<platform>.cc implementation file.
class PlatformTest : public testing::Test {
protected:
PlatformTest();
virtual ~PlatformTest();
private:
NSAutoreleasePool* pool_;
};
#else
typedef testing::Test PlatformTest;
#endif // GTEST_OS_MAC
#endif // TESTING_PLATFORM_TEST_H_
## Instruction:
Change visibility of the destructor to public.
PlatformTest's destructor was set as protected, though the parent class
testing::Test declares it public.
BUG=none
Review URL: https://chromiumcodereview.appspot.com/11038058
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@161352 0039d316-1c4b-4281-b951-d872f2087c98
## Code After:
// Copyright (c) 2006-2008 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef TESTING_PLATFORM_TEST_H_
#define TESTING_PLATFORM_TEST_H_
#include <gtest/gtest.h>
#if defined(GTEST_OS_MAC)
#ifdef __OBJC__
@class NSAutoreleasePool;
#else
class NSAutoreleasePool;
#endif
// The purpose of this class us to provide a hook for platform-specific
// operations across unit tests. For example, on the Mac, it creates and
// releases an outer NSAutoreleasePool for each test case. For now, it's only
// implemented on the Mac. To enable this for another platform, just adjust
// the #ifdefs and add a platform_test_<platform>.cc implementation file.
class PlatformTest : public testing::Test {
public:
virtual ~PlatformTest();
protected:
PlatformTest();
private:
NSAutoreleasePool* pool_;
};
#else
typedef testing::Test PlatformTest;
#endif // GTEST_OS_MAC
#endif // TESTING_PLATFORM_TEST_H_
|
...
// implemented on the Mac. To enable this for another platform, just adjust
// the #ifdefs and add a platform_test_<platform>.cc implementation file.
class PlatformTest : public testing::Test {
public:
virtual ~PlatformTest();
protected:
PlatformTest();
private:
NSAutoreleasePool* pool_;
...
|
5e25577d067f891474c722000327026744068e88
|
src/unittest/python/permission_lambda_tests.py
|
src/unittest/python/permission_lambda_tests.py
|
from unittest2 import TestCase
import simplejson as json
import boto3
from moto import mock_s3
import permission_lambda
class PermissionLambdaTests(TestCase):
def _get_permission_statements(self, client, queue_url):
""" Return a list of policy statements for given queue"""
policy_response = client.get_queue_attributes(
QueueUrl=queue_url, AttributeNames=['Policy'])
policy = policy_response['Attributes']['Policy']
return json.loads(policy)['Statement']
@mock_s3
def test_get_usofa_accountlist_from_bucket(self):
bucketname = "testbucket"
usofa_data = {
"account1": {
"id": "123456789",
"email": "[email protected]"
},
"account2": {
"id": "987654321",
"email": "[email protected]"
}
}
client = boto3.client('s3')
client.create_bucket(
Bucket=bucketname,
CreateBucketConfiguration={
'LocationConstraint': 'eu-west-1'
})
client.put_object(
Bucket=bucketname,
Key="accounts.json",
Body=json.dumps(usofa_data)
)
accountlist = permission_lambda.get_usofa_accountlist(bucketname)
accountlist.sort()
self.assertEqual(accountlist, ["123456789", "987654321"])
|
from unittest2 import TestCase
import simplejson as json
import boto3
from moto import mock_s3
import permission_lambda
class PermissionLambdaTests(TestCase):
@mock_s3
def test_get_usofa_accountlist_from_bucket(self):
bucketname = "testbucket"
usofa_data = {
"account1": {
"id": "123456789",
"email": "[email protected]"
},
"account2": {
"id": "987654321",
"email": "[email protected]"
}
}
client = boto3.client('s3')
client.create_bucket(
Bucket=bucketname,
CreateBucketConfiguration={
'LocationConstraint': 'eu-west-1'
})
client.put_object(
Bucket=bucketname,
Key="accounts.json",
Body=json.dumps(usofa_data)
)
accountlist = permission_lambda.get_usofa_accountlist(bucketname)
accountlist.sort()
self.assertEqual(accountlist, ["123456789", "987654321"])
|
Remove Unittests done as integrationtests, due to NotImplementedErrors from moto
|
PIO-129: Remove Unittests done as integrationtests, due to NotImplementedErrors from moto
|
Python
|
apache-2.0
|
ImmobilienScout24/aws-set-sqs-permission-lambda
|
python
|
## Code Before:
from unittest2 import TestCase
import simplejson as json
import boto3
from moto import mock_s3
import permission_lambda
class PermissionLambdaTests(TestCase):
def _get_permission_statements(self, client, queue_url):
""" Return a list of policy statements for given queue"""
policy_response = client.get_queue_attributes(
QueueUrl=queue_url, AttributeNames=['Policy'])
policy = policy_response['Attributes']['Policy']
return json.loads(policy)['Statement']
@mock_s3
def test_get_usofa_accountlist_from_bucket(self):
bucketname = "testbucket"
usofa_data = {
"account1": {
"id": "123456789",
"email": "[email protected]"
},
"account2": {
"id": "987654321",
"email": "[email protected]"
}
}
client = boto3.client('s3')
client.create_bucket(
Bucket=bucketname,
CreateBucketConfiguration={
'LocationConstraint': 'eu-west-1'
})
client.put_object(
Bucket=bucketname,
Key="accounts.json",
Body=json.dumps(usofa_data)
)
accountlist = permission_lambda.get_usofa_accountlist(bucketname)
accountlist.sort()
self.assertEqual(accountlist, ["123456789", "987654321"])
## Instruction:
PIO-129: Remove Unittests done as integrationtests, due to NotImplementedErrors from moto
## Code After:
from unittest2 import TestCase
import simplejson as json
import boto3
from moto import mock_s3
import permission_lambda
class PermissionLambdaTests(TestCase):
@mock_s3
def test_get_usofa_accountlist_from_bucket(self):
bucketname = "testbucket"
usofa_data = {
"account1": {
"id": "123456789",
"email": "[email protected]"
},
"account2": {
"id": "987654321",
"email": "[email protected]"
}
}
client = boto3.client('s3')
client.create_bucket(
Bucket=bucketname,
CreateBucketConfiguration={
'LocationConstraint': 'eu-west-1'
})
client.put_object(
Bucket=bucketname,
Key="accounts.json",
Body=json.dumps(usofa_data)
)
accountlist = permission_lambda.get_usofa_accountlist(bucketname)
accountlist.sort()
self.assertEqual(accountlist, ["123456789", "987654321"])
|
# ... existing code ...
class PermissionLambdaTests(TestCase):
@mock_s3
def test_get_usofa_accountlist_from_bucket(self):
bucketname = "testbucket"
# ... rest of the code ...
|
48075a16190bbcc3d260dfa242a5553b129de8a8
|
tests/test_see.py
|
tests/test_see.py
|
from __future__ import print_function, unicode_literals
try:
import unittest2 as unittest
except ImportError:
import unittest
import see
class TestSee(unittest.TestCase):
def test_line_width(self):
# Arrange
default_width = 1
max_width = 1
# Act
width = see.line_width(default_width, max_width)
# Assert
self.assertIsInstance(width, int)
self.assertEqual(width, 1)
def test_regex_filter(self):
# Arrange
names = ["george", "helen"]
pat = "or*"
# Act
out = see.regex_filter(names, pat)
# Assert
self.assertIsInstance(out, tuple)
self.assertEqual(out, ("george",))
def test_fn_filter(self):
# Arrange
names = ["george", "helen"]
pat = "*or*"
# Act
out = see.fn_filter(names, pat)
# Assert
self.assertIsInstance(out, tuple)
self.assertEqual(out, ("george",))
def test_see_with_no_args(self):
# Act
out = see.see()
# Assert
self.assertIsInstance(out, see._SeeOutput)
if __name__ == '__main__':
unittest.main()
# End of file
|
from __future__ import print_function, unicode_literals
try:
import unittest2 as unittest
except ImportError:
import unittest
import os
import sys
sys.path.insert(0, os.path.dirname(__file__))
import see
class TestSee(unittest.TestCase):
def test_line_width(self):
# Arrange
default_width = 1
max_width = 1
# Act
width = see.line_width(default_width, max_width)
# Assert
self.assertIsInstance(width, int)
self.assertEqual(width, 1)
def test_regex_filter(self):
# Arrange
names = ["george", "helen"]
pat = "or*"
# Act
out = see.regex_filter(names, pat)
# Assert
self.assertIsInstance(out, tuple)
self.assertEqual(out, ("george",))
def test_fn_filter(self):
# Arrange
names = ["george", "helen"]
pat = "*or*"
# Act
out = see.fn_filter(names, pat)
# Assert
self.assertIsInstance(out, tuple)
self.assertEqual(out, ("george",))
def test_see_with_no_args(self):
# Act
out = see.see()
# Assert
self.assertIsInstance(out, see._SeeOutput)
if __name__ == '__main__':
unittest.main()
# End of file
|
Update tests to import see
|
Update tests to import see
|
Python
|
bsd-3-clause
|
araile/see
|
python
|
## Code Before:
from __future__ import print_function, unicode_literals
try:
import unittest2 as unittest
except ImportError:
import unittest
import see
class TestSee(unittest.TestCase):
def test_line_width(self):
# Arrange
default_width = 1
max_width = 1
# Act
width = see.line_width(default_width, max_width)
# Assert
self.assertIsInstance(width, int)
self.assertEqual(width, 1)
def test_regex_filter(self):
# Arrange
names = ["george", "helen"]
pat = "or*"
# Act
out = see.regex_filter(names, pat)
# Assert
self.assertIsInstance(out, tuple)
self.assertEqual(out, ("george",))
def test_fn_filter(self):
# Arrange
names = ["george", "helen"]
pat = "*or*"
# Act
out = see.fn_filter(names, pat)
# Assert
self.assertIsInstance(out, tuple)
self.assertEqual(out, ("george",))
def test_see_with_no_args(self):
# Act
out = see.see()
# Assert
self.assertIsInstance(out, see._SeeOutput)
if __name__ == '__main__':
unittest.main()
# End of file
## Instruction:
Update tests to import see
## Code After:
from __future__ import print_function, unicode_literals
try:
import unittest2 as unittest
except ImportError:
import unittest
import os
import sys
sys.path.insert(0, os.path.dirname(__file__))
import see
class TestSee(unittest.TestCase):
def test_line_width(self):
# Arrange
default_width = 1
max_width = 1
# Act
width = see.line_width(default_width, max_width)
# Assert
self.assertIsInstance(width, int)
self.assertEqual(width, 1)
def test_regex_filter(self):
# Arrange
names = ["george", "helen"]
pat = "or*"
# Act
out = see.regex_filter(names, pat)
# Assert
self.assertIsInstance(out, tuple)
self.assertEqual(out, ("george",))
def test_fn_filter(self):
# Arrange
names = ["george", "helen"]
pat = "*or*"
# Act
out = see.fn_filter(names, pat)
# Assert
self.assertIsInstance(out, tuple)
self.assertEqual(out, ("george",))
def test_see_with_no_args(self):
# Act
out = see.see()
# Assert
self.assertIsInstance(out, see._SeeOutput)
if __name__ == '__main__':
unittest.main()
# End of file
|
# ... existing code ...
import unittest2 as unittest
except ImportError:
import unittest
import os
import sys
sys.path.insert(0, os.path.dirname(__file__))
import see
# ... rest of the code ...
|
fca88336777b9c47404e7b397d39ef8d3676b7b5
|
src/zone_iterator/__main__.py
|
src/zone_iterator/__main__.py
|
import gzip
import sys
from . import zone_iterator, zone_dict_to_str, ZONE_FMT_STR
try:
from colorama import Fore, init as colorama_init
except ImportError:
HAS_COLOR = False
else:
HAS_COLOR = True
def main():
if HAS_COLOR:
colors = [Fore.GREEN, Fore.MAGENTA, Fore.BLUE, Fore.CYAN, Fore.YELLOW]
colorama_init(autoreset=True)
unpacked_fmt = ZONE_FMT_STR.split()
color_format = " ".join([color for segment in
zip(colors, unpacked_fmt)
for color in segment])
zone_file = sys.argv[1]
if zone_file[-2:] == 'gz':
our_open = gzip.open
else:
our_open = open
with our_open(zone_file, mode='rt') as zonefh:
for record in zone_iterator(zonefh):
print(zone_dict_to_str(record, fmt_str=color_format))
if __name__ == "__main__":
main()
|
import argparse
import gzip
import sys
from . import zone_iterator, zone_dict_to_str, ZONE_FMT_STR
try:
from colorama import Fore, init as colorama_init
except ImportError:
HAS_COLOR = False
else:
HAS_COLOR = True
def maybe_compressed_file(filename):
if filename[-2:] == 'gz':
our_open = gzip.open
else:
our_open = open
return our_open(filename, mode='rt')
def main():
parser = argparse.ArgumentParser()
parser.add_argument('inputs', nargs='*', type=maybe_compressed_file)
args = parser.parse_args()
if HAS_COLOR:
colors = [Fore.GREEN, Fore.MAGENTA, Fore.BLUE, Fore.CYAN, Fore.YELLOW]
colorama_init(autoreset=True)
unpacked_fmt = ZONE_FMT_STR.split()
color_format = " ".join([color for segment in
zip(colors, unpacked_fmt)
for color in segment])
for inputfile in args.inputs:
with inputfile as zonefh:
for record in zone_iterator(zonefh):
print(zone_dict_to_str(record, fmt_str=color_format))
if __name__ == "__main__":
main()
|
Switch to using argparse for the main script.
|
Switch to using argparse for the main script.
|
Python
|
agpl-3.0
|
maxrp/zone_normalize
|
python
|
## Code Before:
import gzip
import sys
from . import zone_iterator, zone_dict_to_str, ZONE_FMT_STR
try:
from colorama import Fore, init as colorama_init
except ImportError:
HAS_COLOR = False
else:
HAS_COLOR = True
def main():
if HAS_COLOR:
colors = [Fore.GREEN, Fore.MAGENTA, Fore.BLUE, Fore.CYAN, Fore.YELLOW]
colorama_init(autoreset=True)
unpacked_fmt = ZONE_FMT_STR.split()
color_format = " ".join([color for segment in
zip(colors, unpacked_fmt)
for color in segment])
zone_file = sys.argv[1]
if zone_file[-2:] == 'gz':
our_open = gzip.open
else:
our_open = open
with our_open(zone_file, mode='rt') as zonefh:
for record in zone_iterator(zonefh):
print(zone_dict_to_str(record, fmt_str=color_format))
if __name__ == "__main__":
main()
## Instruction:
Switch to using argparse for the main script.
## Code After:
import argparse
import gzip
import sys
from . import zone_iterator, zone_dict_to_str, ZONE_FMT_STR
try:
from colorama import Fore, init as colorama_init
except ImportError:
HAS_COLOR = False
else:
HAS_COLOR = True
def maybe_compressed_file(filename):
if filename[-2:] == 'gz':
our_open = gzip.open
else:
our_open = open
return our_open(filename, mode='rt')
def main():
parser = argparse.ArgumentParser()
parser.add_argument('inputs', nargs='*', type=maybe_compressed_file)
args = parser.parse_args()
if HAS_COLOR:
colors = [Fore.GREEN, Fore.MAGENTA, Fore.BLUE, Fore.CYAN, Fore.YELLOW]
colorama_init(autoreset=True)
unpacked_fmt = ZONE_FMT_STR.split()
color_format = " ".join([color for segment in
zip(colors, unpacked_fmt)
for color in segment])
for inputfile in args.inputs:
with inputfile as zonefh:
for record in zone_iterator(zonefh):
print(zone_dict_to_str(record, fmt_str=color_format))
if __name__ == "__main__":
main()
|
...
import argparse
import gzip
import sys
...
HAS_COLOR = True
def maybe_compressed_file(filename):
if filename[-2:] == 'gz':
our_open = gzip.open
else:
our_open = open
return our_open(filename, mode='rt')
def main():
parser = argparse.ArgumentParser()
parser.add_argument('inputs', nargs='*', type=maybe_compressed_file)
args = parser.parse_args()
if HAS_COLOR:
colors = [Fore.GREEN, Fore.MAGENTA, Fore.BLUE, Fore.CYAN, Fore.YELLOW]
...
zip(colors, unpacked_fmt)
for color in segment])
for inputfile in args.inputs:
with inputfile as zonefh:
for record in zone_iterator(zonefh):
print(zone_dict_to_str(record, fmt_str=color_format))
if __name__ == "__main__":
main()
...
|
47f46e3237ba2f746193e9074136f805e71bacec
|
pysteps/cascade/interface.py
|
pysteps/cascade/interface.py
|
from pysteps.cascade import decomposition, bandpass_filters
_cascade_methods = dict()
_cascade_methods['fft'] = decomposition.decomposition_fft
_cascade_methods['gaussian'] = bandpass_filters.filter_gaussian
_cascade_methods['uniform'] = bandpass_filters.filter_uniform
def get_method(name):
"""
Return a callable function for the bandpass filter or decomposition method
corresponding to the given name.\n
Filter methods:
+-------------------+------------------------------------------------------+
| Name | Description |
+===================+======================================================+
| gaussian | implementation of a bandpass filter using Gaussian |
| | weights |
+-------------------+------------------------------------------------------+
| uniform | implementation of a filter where all weights are set |
| | to one |
+-------------------+------------------------------------------------------+
Decomposition methods:
+-------------------+------------------------------------------------------+
| Name | Description |
+===================+======================================================+
| fft | decomposition based on Fast Fourier Transform (FFT) |
| | and a bandpass filter |
+-------------------+------------------------------------------------------+
"""
if isinstance(name, str):
name = name.lower()
try:
return _cascade_methods[name]
except KeyError:
raise ValueError("Unknown method {}\n".format(name)
+ "The available methods are:"
+ str(list(_cascade_methods.keys()))) from None
|
from pysteps.cascade import decomposition, bandpass_filters
_cascade_methods = dict()
_cascade_methods['fft'] = decomposition.decomposition_fft
_cascade_methods['gaussian'] = bandpass_filters.filter_gaussian
_cascade_methods['uniform'] = bandpass_filters.filter_uniform
def get_method(name):
"""
Return a callable function for the bandpass filter or decomposition method
corresponding to the given name.\n
Filter methods:
+-------------------+------------------------------------------------------+
| Name | Description |
+===================+======================================================+
| gaussian | implementation of a bandpass filter using Gaussian |
| | weights |
+-------------------+------------------------------------------------------+
| uniform | implementation of a filter where all weights are set |
| | to one |
+-------------------+------------------------------------------------------+
Decomposition methods:
+-------------------+------------------------------------------------------+
| Name | Description |
+===================+======================================================+
| fft | decomposition based on Fast Fourier Transform (FFT) |
| | and a bandpass filter |
+-------------------+------------------------------------------------------+
"""
if isinstance(name, str):
name = name.lower()
else:
raise TypeError("Only strings supported for the method's names.\n"
+ "Available names:"
+ str(list(_cascade_methods.keys()))) from None
try:
return _cascade_methods[name]
except KeyError:
raise ValueError("Unknown method {}\n".format(name)
+ "The available methods are:"
+ str(list(_cascade_methods.keys()))) from None
|
Raise exception on incorrect argument type
|
Raise exception on incorrect argument type
|
Python
|
bsd-3-clause
|
pySTEPS/pysteps
|
python
|
## Code Before:
from pysteps.cascade import decomposition, bandpass_filters
_cascade_methods = dict()
_cascade_methods['fft'] = decomposition.decomposition_fft
_cascade_methods['gaussian'] = bandpass_filters.filter_gaussian
_cascade_methods['uniform'] = bandpass_filters.filter_uniform
def get_method(name):
"""
Return a callable function for the bandpass filter or decomposition method
corresponding to the given name.\n
Filter methods:
+-------------------+------------------------------------------------------+
| Name | Description |
+===================+======================================================+
| gaussian | implementation of a bandpass filter using Gaussian |
| | weights |
+-------------------+------------------------------------------------------+
| uniform | implementation of a filter where all weights are set |
| | to one |
+-------------------+------------------------------------------------------+
Decomposition methods:
+-------------------+------------------------------------------------------+
| Name | Description |
+===================+======================================================+
| fft | decomposition based on Fast Fourier Transform (FFT) |
| | and a bandpass filter |
+-------------------+------------------------------------------------------+
"""
if isinstance(name, str):
name = name.lower()
try:
return _cascade_methods[name]
except KeyError:
raise ValueError("Unknown method {}\n".format(name)
+ "The available methods are:"
+ str(list(_cascade_methods.keys()))) from None
## Instruction:
Raise exception on incorrect argument type
## Code After:
from pysteps.cascade import decomposition, bandpass_filters
_cascade_methods = dict()
_cascade_methods['fft'] = decomposition.decomposition_fft
_cascade_methods['gaussian'] = bandpass_filters.filter_gaussian
_cascade_methods['uniform'] = bandpass_filters.filter_uniform
def get_method(name):
"""
Return a callable function for the bandpass filter or decomposition method
corresponding to the given name.\n
Filter methods:
+-------------------+------------------------------------------------------+
| Name | Description |
+===================+======================================================+
| gaussian | implementation of a bandpass filter using Gaussian |
| | weights |
+-------------------+------------------------------------------------------+
| uniform | implementation of a filter where all weights are set |
| | to one |
+-------------------+------------------------------------------------------+
Decomposition methods:
+-------------------+------------------------------------------------------+
| Name | Description |
+===================+======================================================+
| fft | decomposition based on Fast Fourier Transform (FFT) |
| | and a bandpass filter |
+-------------------+------------------------------------------------------+
"""
if isinstance(name, str):
name = name.lower()
else:
raise TypeError("Only strings supported for the method's names.\n"
+ "Available names:"
+ str(list(_cascade_methods.keys()))) from None
try:
return _cascade_methods[name]
except KeyError:
raise ValueError("Unknown method {}\n".format(name)
+ "The available methods are:"
+ str(list(_cascade_methods.keys()))) from None
|
...
from pysteps.cascade import decomposition, bandpass_filters
_cascade_methods = dict()
...
if isinstance(name, str):
name = name.lower()
else:
raise TypeError("Only strings supported for the method's names.\n"
+ "Available names:"
+ str(list(_cascade_methods.keys()))) from None
try:
return _cascade_methods[name]
except KeyError:
...
|
d488c1e021c3ce4335223a407cbd82182fd83708
|
symposion/cms/managers.py
|
symposion/cms/managers.py
|
from datetime import datetime
from django.db import models
class PublishedPageManager(models.Manager):
return qs.filter(publish_date__lte=datetime.now())
def get_queryset(self):
qs = super(PublishedPageManager, self).get_queryset()
|
from django.utils import timezone
from django.db import models
class PublishedPageManager(models.Manager):
def get_queryset(self):
qs = super(PublishedPageManager, self).get_queryset()
return qs.filter(publish_date__lte=timezone.now())
|
Use timezone.now instead of datetime.now
|
Use timezone.now instead of datetime.now
|
Python
|
bsd-3-clause
|
pyconau2017/symposion,pydata/symposion,faulteh/symposion,pyohio/symposion,euroscipy/symposion,pinax/symposion,euroscipy/symposion,pyconau2017/symposion,toulibre/symposion,miurahr/symposion,faulteh/symposion,miurahr/symposion,pydata/symposion,pinax/symposion,pyohio/symposion,toulibre/symposion
|
python
|
## Code Before:
from datetime import datetime
from django.db import models
class PublishedPageManager(models.Manager):
return qs.filter(publish_date__lte=datetime.now())
def get_queryset(self):
qs = super(PublishedPageManager, self).get_queryset()
## Instruction:
Use timezone.now instead of datetime.now
## Code After:
from django.utils import timezone
from django.db import models
class PublishedPageManager(models.Manager):
def get_queryset(self):
qs = super(PublishedPageManager, self).get_queryset()
return qs.filter(publish_date__lte=timezone.now())
|
// ... existing code ...
from django.utils import timezone
from django.db import models
// ... modified code ...
class PublishedPageManager(models.Manager):
def get_queryset(self):
qs = super(PublishedPageManager, self).get_queryset()
return qs.filter(publish_date__lte=timezone.now())
// ... rest of the code ...
|
7a936665eff8a6a8f6889334ad2238cbfcded18b
|
member.py
|
member.py
|
import requests
from credentials import label_id
from gmailauth import refresh
access_token = refresh()
headers = {'Authorization': ('Bearer ' + access_token)}
def list_messages(headers):
params = {'labelIds': label_id, 'q': 'newer_than:3d'}
r = requests.get('https://www.googleapis.com/gmail/v1/users/me/messages',
headers=headers, params=params)
j = r.json()
messages = []
if 'messages' in j:
messages.extend(j['messages'])
# return messages
message_ids = []
for item in messages:
message_ids.append(item['id'])
return message_ids
print(list_messages(headers))
def get_message(headers, identity):
params = {'id': identity, format: 'metadata'}
r = requests.get('https://www.googleapis.com/gmail/v1/users/me/messages/id',
headers=headers, params=params)
j = r.json()
print(r.status_code, r.reason)
h = j['payload']
subject = ''
for header in h['headers']:
if header['name'] == 'Subject':
subject = header['value']
break
print(subject)
for item in list_messages(headers):
get_message(headers, item)
# get_message(headers, list_messages(headers))
|
import requests
from base64 import urlsafe_b64decode
from credentials import label_id, url1, url2
from gmailauth import refresh
# access_token = refresh()
headers = {'Authorization': ('Bearer ' + access_token)}
def list_messages(headers):
params = {'labelIds': label_id, 'q': 'newer_than:2d'}
r = requests.get('https://www.googleapis.com/gmail/v1/users/me/messages',
headers=headers, params=params)
j = r.json()
messages = []
if 'messages' in j:
messages.extend(j['messages'])
# return messages
message_ids = []
for item in messages:
message_ids.append(item['id'])
return message_ids
def get_message(headers, identity):
params = {'id': identity, 'format': 'raw'}
r = requests.get('https://www.googleapis.com/gmail/v1/users/me/messages/id',
headers=headers, params=params)
j = r.json()
raw = j['raw']
d = urlsafe_b64decode(raw)
p = d.decode()
s = p.find('https')
l = len(p)
print(p[s:l])
print('----------')
return(p[s:l])
# for item in list_messages(headers):
# get_message(headers, item)
|
Return the order details URL from email body.
|
Return the order details URL from email body.
There is currently no Agile API method that will return the order
details for an activity so the URL from the email must be used in
conjunction with a web scraper to get the relevant details.
|
Python
|
mit
|
deadlyraptor/reels
|
python
|
## Code Before:
import requests
from credentials import label_id
from gmailauth import refresh
access_token = refresh()
headers = {'Authorization': ('Bearer ' + access_token)}
def list_messages(headers):
params = {'labelIds': label_id, 'q': 'newer_than:3d'}
r = requests.get('https://www.googleapis.com/gmail/v1/users/me/messages',
headers=headers, params=params)
j = r.json()
messages = []
if 'messages' in j:
messages.extend(j['messages'])
# return messages
message_ids = []
for item in messages:
message_ids.append(item['id'])
return message_ids
print(list_messages(headers))
def get_message(headers, identity):
params = {'id': identity, format: 'metadata'}
r = requests.get('https://www.googleapis.com/gmail/v1/users/me/messages/id',
headers=headers, params=params)
j = r.json()
print(r.status_code, r.reason)
h = j['payload']
subject = ''
for header in h['headers']:
if header['name'] == 'Subject':
subject = header['value']
break
print(subject)
for item in list_messages(headers):
get_message(headers, item)
# get_message(headers, list_messages(headers))
## Instruction:
Return the order details URL from email body.
There is currently no Agile API method that will return the order
details for an activity so the URL from the email must be used in
conjunction with a web scraper to get the relevant details.
## Code After:
import requests
from base64 import urlsafe_b64decode
from credentials import label_id, url1, url2
from gmailauth import refresh
# access_token = refresh()
headers = {'Authorization': ('Bearer ' + access_token)}
def list_messages(headers):
params = {'labelIds': label_id, 'q': 'newer_than:2d'}
r = requests.get('https://www.googleapis.com/gmail/v1/users/me/messages',
headers=headers, params=params)
j = r.json()
messages = []
if 'messages' in j:
messages.extend(j['messages'])
# return messages
message_ids = []
for item in messages:
message_ids.append(item['id'])
return message_ids
def get_message(headers, identity):
params = {'id': identity, 'format': 'raw'}
r = requests.get('https://www.googleapis.com/gmail/v1/users/me/messages/id',
headers=headers, params=params)
j = r.json()
raw = j['raw']
d = urlsafe_b64decode(raw)
p = d.decode()
s = p.find('https')
l = len(p)
print(p[s:l])
print('----------')
return(p[s:l])
# for item in list_messages(headers):
# get_message(headers, item)
|
# ... existing code ...
import requests
from base64 import urlsafe_b64decode
from credentials import label_id, url1, url2
from gmailauth import refresh
# access_token = refresh()
headers = {'Authorization': ('Bearer ' + access_token)}
def list_messages(headers):
params = {'labelIds': label_id, 'q': 'newer_than:2d'}
r = requests.get('https://www.googleapis.com/gmail/v1/users/me/messages',
headers=headers, params=params)
# ... modified code ...
message_ids.append(item['id'])
return message_ids
def get_message(headers, identity):
params = {'id': identity, 'format': 'raw'}
r = requests.get('https://www.googleapis.com/gmail/v1/users/me/messages/id',
headers=headers, params=params)
j = r.json()
raw = j['raw']
d = urlsafe_b64decode(raw)
p = d.decode()
s = p.find('https')
l = len(p)
print(p[s:l])
print('----------')
return(p[s:l])
# for item in list_messages(headers):
# get_message(headers, item)
# ... rest of the code ...
|
06b09af12d40193ce76b4912c1bea3a7a69e6a5a
|
uspek/src/main/java/mareklangiewicz/pl/uspek/USpekJUnitRunner.kt
|
uspek/src/main/java/mareklangiewicz/pl/uspek/USpekJUnitRunner.kt
|
package mareklangiewicz.pl.uspek
import org.junit.runner.Description
import org.junit.runner.Runner
import org.junit.runner.notification.RunNotifier
import java.util.*
class USpekJUnitRunner(testClass: Class<Any>) : Runner() {
private val rootDescription = Description.createSuiteDescription(testClass.simpleName, UUID.randomUUID().toString())
private val treeCollectionLogger = TreeCollectionLogger()
init {
USpek.log = treeCollectionLogger
val instance = testClass.newInstance()
testClass.declaredMethods.forEach { it.invoke(instance) }
rootDescription.addChild(createDescriptions(treeCollectionLogger.testTree!!, testClass.name))
}
override fun getDescription(): Description {
return rootDescription
}
override fun run(notifier: RunNotifier) {
println("USpek is running....")
}
private fun createDescriptions(testBranch: TestTree, testSuite: String): Description {
val description = if (testBranch.subtests.isNotEmpty()) {
Description.createSuiteDescription(testBranch.name, UUID.randomUUID().toString())
} else {
Description.createTestDescription(testSuite, testBranch.name)
}
testBranch.subtests.forEach {
val child = createDescriptions(it, testSuite + "." + testBranch.name)
description.addChild(child)
}
return description
}
}
|
package mareklangiewicz.pl.uspek
import org.junit.runner.Description
import org.junit.runner.Runner
import org.junit.runner.notification.RunNotifier
import java.util.*
class USpekJUnitRunner(testClass: Class<Any>) : Runner() {
private val rootDescription = Description.createSuiteDescription(testClass.simpleName, UUID.randomUUID().toString())
private val treeCollectionLogger = TreeCollectionLogger()
init {
USpek.log = treeCollectionLogger
val instance = testClass.newInstance()
testClass.declaredMethods.forEach { it.invoke(instance) }
rootDescription.addChild(createDescriptions(treeCollectionLogger.testTree!!, testClass.name))
}
override fun getDescription(): Description = rootDescription
override fun run(notifier: RunNotifier) {
println("USpek is running....")
}
private fun createDescriptions(testBranch: TestTree, testSuite: String): Description {
val description = if (testBranch.subtests.isNotEmpty()) {
Description.createSuiteDescription(testBranch.name, UUID.randomUUID().toString())
} else {
Description.createTestDescription(testSuite, testBranch.name)
}
testBranch.subtests.forEach {
val child = createDescriptions(it, testSuite + "." + testBranch.name)
description.addChild(child)
}
return description
}
}
|
Convert function body to expression
|
[refactoring] Convert function body to expression
|
Kotlin
|
apache-2.0
|
langara/USpek,langara/USpek,langara/USpek
|
kotlin
|
## Code Before:
package mareklangiewicz.pl.uspek
import org.junit.runner.Description
import org.junit.runner.Runner
import org.junit.runner.notification.RunNotifier
import java.util.*
class USpekJUnitRunner(testClass: Class<Any>) : Runner() {
private val rootDescription = Description.createSuiteDescription(testClass.simpleName, UUID.randomUUID().toString())
private val treeCollectionLogger = TreeCollectionLogger()
init {
USpek.log = treeCollectionLogger
val instance = testClass.newInstance()
testClass.declaredMethods.forEach { it.invoke(instance) }
rootDescription.addChild(createDescriptions(treeCollectionLogger.testTree!!, testClass.name))
}
override fun getDescription(): Description {
return rootDescription
}
override fun run(notifier: RunNotifier) {
println("USpek is running....")
}
private fun createDescriptions(testBranch: TestTree, testSuite: String): Description {
val description = if (testBranch.subtests.isNotEmpty()) {
Description.createSuiteDescription(testBranch.name, UUID.randomUUID().toString())
} else {
Description.createTestDescription(testSuite, testBranch.name)
}
testBranch.subtests.forEach {
val child = createDescriptions(it, testSuite + "." + testBranch.name)
description.addChild(child)
}
return description
}
}
## Instruction:
[refactoring] Convert function body to expression
## Code After:
package mareklangiewicz.pl.uspek
import org.junit.runner.Description
import org.junit.runner.Runner
import org.junit.runner.notification.RunNotifier
import java.util.*
class USpekJUnitRunner(testClass: Class<Any>) : Runner() {
private val rootDescription = Description.createSuiteDescription(testClass.simpleName, UUID.randomUUID().toString())
private val treeCollectionLogger = TreeCollectionLogger()
init {
USpek.log = treeCollectionLogger
val instance = testClass.newInstance()
testClass.declaredMethods.forEach { it.invoke(instance) }
rootDescription.addChild(createDescriptions(treeCollectionLogger.testTree!!, testClass.name))
}
override fun getDescription(): Description = rootDescription
override fun run(notifier: RunNotifier) {
println("USpek is running....")
}
private fun createDescriptions(testBranch: TestTree, testSuite: String): Description {
val description = if (testBranch.subtests.isNotEmpty()) {
Description.createSuiteDescription(testBranch.name, UUID.randomUUID().toString())
} else {
Description.createTestDescription(testSuite, testBranch.name)
}
testBranch.subtests.forEach {
val child = createDescriptions(it, testSuite + "." + testBranch.name)
description.addChild(child)
}
return description
}
}
|
# ... existing code ...
rootDescription.addChild(createDescriptions(treeCollectionLogger.testTree!!, testClass.name))
}
override fun getDescription(): Description = rootDescription
override fun run(notifier: RunNotifier) {
println("USpek is running....")
# ... rest of the code ...
|
dc04c35177815ff2aee46088cac7d6790e6831dd
|
swimlane/core/search/search_result.py
|
swimlane/core/search/search_result.py
|
"""This module provides a SearchResults class."""
from types import GeneratorType
from ..resources import Record, StatsResult
from ..swimlane_dict import SwimlaneDict
__metaclass__ = type
class SearchResult:
"""A class that wraps a Swimlane search result."""
def __init__(self, report, resp):
"""Init a SearchResult.
Args:
report (Report): The report that was used to initiate the search.
resp (SwimlaneDict): The JSON response from a search request.
"""
self.is_stats = isinstance(resp, GeneratorType)
if self.is_stats:
self.stats = (StatsResult(SwimlaneDict(r)) for r in resp)
else:
self.report = report
self.count = resp["count"]
self.offset = resp["offset"]
self.limit = resp["limit"]
results = (resp["results"][report.applicationIds[0]]
if resp["results"] else [])
self.records = (Record(SwimlaneDict(r)) for r in results)
|
"""This module provides a SearchResults class."""
from types import GeneratorType
from ..resources import Record, StatsResult
from ..swimlane_dict import SwimlaneDict
__metaclass__ = type
class SearchResult:
"""A class that wraps a Swimlane search result."""
def __init__(self, report, resp):
"""Init a SearchResult.
Args:
report (Report): The report that was used to initiate the search.
resp (SwimlaneDict): The JSON response from a search request.
"""
self.is_stats = isinstance(resp, GeneratorType)
if self.is_stats:
self.stats = (StatsResult(SwimlaneDict(r)) for r in resp)
else:
self.report = report
self.count = resp["count"]
self.offset = resp["offset"]
self.limit = resp["limit"]
results = []
if report.applicationIds[0] in resp['results']:
results = resp["results"][report.applicationIds[0]]
self.records = (Record(SwimlaneDict(r)) for r in results)
|
Fix a KeyError that is raised when there are no reuslts
|
Fix a KeyError that is raised when there are no reuslts
|
Python
|
mit
|
Swimlane/sw-python-client
|
python
|
## Code Before:
"""This module provides a SearchResults class."""
from types import GeneratorType
from ..resources import Record, StatsResult
from ..swimlane_dict import SwimlaneDict
__metaclass__ = type
class SearchResult:
"""A class that wraps a Swimlane search result."""
def __init__(self, report, resp):
"""Init a SearchResult.
Args:
report (Report): The report that was used to initiate the search.
resp (SwimlaneDict): The JSON response from a search request.
"""
self.is_stats = isinstance(resp, GeneratorType)
if self.is_stats:
self.stats = (StatsResult(SwimlaneDict(r)) for r in resp)
else:
self.report = report
self.count = resp["count"]
self.offset = resp["offset"]
self.limit = resp["limit"]
results = (resp["results"][report.applicationIds[0]]
if resp["results"] else [])
self.records = (Record(SwimlaneDict(r)) for r in results)
## Instruction:
Fix a KeyError that is raised when there are no reuslts
## Code After:
"""This module provides a SearchResults class."""
from types import GeneratorType
from ..resources import Record, StatsResult
from ..swimlane_dict import SwimlaneDict
__metaclass__ = type
class SearchResult:
"""A class that wraps a Swimlane search result."""
def __init__(self, report, resp):
"""Init a SearchResult.
Args:
report (Report): The report that was used to initiate the search.
resp (SwimlaneDict): The JSON response from a search request.
"""
self.is_stats = isinstance(resp, GeneratorType)
if self.is_stats:
self.stats = (StatsResult(SwimlaneDict(r)) for r in resp)
else:
self.report = report
self.count = resp["count"]
self.offset = resp["offset"]
self.limit = resp["limit"]
results = []
if report.applicationIds[0] in resp['results']:
results = resp["results"][report.applicationIds[0]]
self.records = (Record(SwimlaneDict(r)) for r in results)
|
...
self.count = resp["count"]
self.offset = resp["offset"]
self.limit = resp["limit"]
results = []
if report.applicationIds[0] in resp['results']:
results = resp["results"][report.applicationIds[0]]
self.records = (Record(SwimlaneDict(r)) for r in results)
...
|
b49fc69262882398330bd1602f003175ea75c66b
|
Android/Les4ElefantCowork/app/src/main/java/com/les4elefantastiq/les4elefantcowork/models/LiveFeedMessage.java
|
Android/Les4ElefantCowork/app/src/main/java/com/les4elefantastiq/les4elefantcowork/models/LiveFeedMessage.java
|
package com.les4elefantastiq.les4elefantcowork.models;
public class LiveFeedMessage {
// -------------- Objects, Variables -------------- //
public String text;
public int type;
public String dateTime; // Date ?
public String tweetLink;
public String sender;
public String coworkerLinkedInId;
public Boolean isBirthday;
// ----------------- Constructor ------------------ //
public LiveFeedMessage(String text, int type, String dateTime, String tweetLink, String sender, String coworkerLinkedInId, Boolean isBirthday) {
this.text = text;
this.type = type;
this.dateTime = dateTime;
this.tweetLink = tweetLink;
this.sender = sender;
this.coworkerLinkedInId = coworkerLinkedInId;
this.isBirthday = isBirthday;
}
// ---------------- Public Methods ---------------- //
// ---------------- Private Methods --------------- //
// ----------------- Miscellaneous ---------------- //
}
|
package com.les4elefantastiq.les4elefantcowork.models;
public class LiveFeedMessage {
// -------------- Objects, Variables -------------- //
public String text;
public int type;
public String dateTime; // Date ?
public String tweetLink;
public String sender;
public String coworkerLinkedInId;
public Boolean isBirthday;
public String pictureUrl;
// ----------------- Constructor ------------------ //
public LiveFeedMessage(String text, int type, String dateTime, String tweetLink, String sender, String coworkerLinkedInId, Boolean isBirthday, String pictureUrl) {
this.text = text;
this.type = type;
this.dateTime = dateTime;
this.tweetLink = tweetLink;
this.sender = sender;
this.coworkerLinkedInId = coworkerLinkedInId;
this.isBirthday = isBirthday;
this.pictureUrl = pictureUrl;
}
// ---------------- Public Methods ---------------- //
// ---------------- Private Methods --------------- //
// ----------------- Miscellaneous ---------------- //
}
|
Add pictureUrl to LivefeedMessage model
|
Add pictureUrl to LivefeedMessage model
|
Java
|
mit
|
micbelgique/DevCamp2016-Les4Elefantastiq,micbelgique/DevCamp2016-Les4Elefantastiq,micbelgique/DevCamp2016-Les4Elefantastiq,micbelgique/DevCamp2016-Les4Elefantastiq
|
java
|
## Code Before:
package com.les4elefantastiq.les4elefantcowork.models;
public class LiveFeedMessage {
// -------------- Objects, Variables -------------- //
public String text;
public int type;
public String dateTime; // Date ?
public String tweetLink;
public String sender;
public String coworkerLinkedInId;
public Boolean isBirthday;
// ----------------- Constructor ------------------ //
public LiveFeedMessage(String text, int type, String dateTime, String tweetLink, String sender, String coworkerLinkedInId, Boolean isBirthday) {
this.text = text;
this.type = type;
this.dateTime = dateTime;
this.tweetLink = tweetLink;
this.sender = sender;
this.coworkerLinkedInId = coworkerLinkedInId;
this.isBirthday = isBirthday;
}
// ---------------- Public Methods ---------------- //
// ---------------- Private Methods --------------- //
// ----------------- Miscellaneous ---------------- //
}
## Instruction:
Add pictureUrl to LivefeedMessage model
## Code After:
package com.les4elefantastiq.les4elefantcowork.models;
public class LiveFeedMessage {
// -------------- Objects, Variables -------------- //
public String text;
public int type;
public String dateTime; // Date ?
public String tweetLink;
public String sender;
public String coworkerLinkedInId;
public Boolean isBirthday;
public String pictureUrl;
// ----------------- Constructor ------------------ //
public LiveFeedMessage(String text, int type, String dateTime, String tweetLink, String sender, String coworkerLinkedInId, Boolean isBirthday, String pictureUrl) {
this.text = text;
this.type = type;
this.dateTime = dateTime;
this.tweetLink = tweetLink;
this.sender = sender;
this.coworkerLinkedInId = coworkerLinkedInId;
this.isBirthday = isBirthday;
this.pictureUrl = pictureUrl;
}
// ---------------- Public Methods ---------------- //
// ---------------- Private Methods --------------- //
// ----------------- Miscellaneous ---------------- //
}
|
// ... existing code ...
public String sender;
public String coworkerLinkedInId;
public Boolean isBirthday;
public String pictureUrl;
// ----------------- Constructor ------------------ //
public LiveFeedMessage(String text, int type, String dateTime, String tweetLink, String sender, String coworkerLinkedInId, Boolean isBirthday, String pictureUrl) {
this.text = text;
this.type = type;
this.dateTime = dateTime;
// ... modified code ...
this.sender = sender;
this.coworkerLinkedInId = coworkerLinkedInId;
this.isBirthday = isBirthday;
this.pictureUrl = pictureUrl;
}
// ---------------- Public Methods ---------------- //
// ... rest of the code ...
|
61a14eaff3979b4c43363e897642188346a683bf
|
app/src/main/java/org/wikipedia/interlanguage/AcceptLanguageUtil.java
|
app/src/main/java/org/wikipedia/interlanguage/AcceptLanguageUtil.java
|
package org.wikipedia.interlanguage;
import android.support.annotation.NonNull;
public final class AcceptLanguageUtil {
private static final float APP_LANGUAGE_QUALITY = .9f;
private static final float SYSTEM_LANGUAGE_QUALITY = .8f;
/**
* @return The value that should go in the Accept-Language header.
*/
@NonNull
public static String getAcceptLanguage(@NonNull String siteLanguageCode,
@NonNull String appLanguageCode,
@NonNull String systemLanguageCode) {
String acceptLanguage = siteLanguageCode;
acceptLanguage = appendToAcceptLanguage(acceptLanguage, appLanguageCode, APP_LANGUAGE_QUALITY);
acceptLanguage = appendToAcceptLanguage(acceptLanguage, systemLanguageCode, SYSTEM_LANGUAGE_QUALITY);
return acceptLanguage;
}
@NonNull
private static String appendToAcceptLanguage(@NonNull String acceptLanguage,
@NonNull String languageCode, float quality) {
// If accept-language already contains the language, just return accept-language.
if (acceptLanguage.contains(languageCode)) {
return acceptLanguage;
}
// If accept-language is empty, don't append. Just return the language.
if (acceptLanguage.isEmpty()) {
return languageCode;
}
// Accept-language is nonempty, append the language.
return String.format("%s,%s;q=%.1f", acceptLanguage, languageCode, quality);
}
private AcceptLanguageUtil() { }
}
|
package org.wikipedia.interlanguage;
import android.support.annotation.NonNull;
import java.util.Locale;
public final class AcceptLanguageUtil {
private static final float APP_LANGUAGE_QUALITY = .9f;
private static final float SYSTEM_LANGUAGE_QUALITY = .8f;
/**
* @return The value that should go in the Accept-Language header.
*/
@NonNull
public static String getAcceptLanguage(@NonNull String siteLanguageCode,
@NonNull String appLanguageCode,
@NonNull String systemLanguageCode) {
String acceptLanguage = siteLanguageCode;
acceptLanguage = appendToAcceptLanguage(acceptLanguage, appLanguageCode, APP_LANGUAGE_QUALITY);
acceptLanguage = appendToAcceptLanguage(acceptLanguage, systemLanguageCode, SYSTEM_LANGUAGE_QUALITY);
return acceptLanguage;
}
@NonNull
private static String appendToAcceptLanguage(@NonNull String acceptLanguage,
@NonNull String languageCode, float quality) {
// If accept-language already contains the language, just return accept-language.
if (acceptLanguage.contains(languageCode)) {
return acceptLanguage;
}
// If accept-language is empty, don't append. Just return the language.
if (acceptLanguage.isEmpty()) {
return languageCode;
}
// Accept-language is nonempty, append the language.
return String.format(Locale.ROOT, "%s,%s;q=%.1f", acceptLanguage, languageCode, quality);
}
private AcceptLanguageUtil() { }
}
|
Fix issue with Accept-Language header for certain system languages.
|
Fix issue with Accept-Language header for certain system languages.
The proper Locale was not being used when calling String.format(), which
was causing the resulting HTTP requests to fail.
Bug: T118910
Change-Id: I09e8c57d92eb969de816ed5025ad17f66aec386c
|
Java
|
apache-2.0
|
carloshwa/apps-android-wikipedia,SAGROUP2/apps-android-wikipedia,dbrant/apps-android-wikipedia,Duct-and-rice/KrswtkhrWiki4Android,SAGROUP2/apps-android-wikipedia,carloshwa/apps-android-wikipedia,carloshwa/apps-android-wikipedia,dbrant/apps-android-wikipedia,Duct-and-rice/KrswtkhrWiki4Android,anirudh24seven/apps-android-wikipedia,SAGROUP2/apps-android-wikipedia,anirudh24seven/apps-android-wikipedia,SAGROUP2/apps-android-wikipedia,wikimedia/apps-android-wikipedia,anirudh24seven/apps-android-wikipedia,wikimedia/apps-android-wikipedia,dbrant/apps-android-wikipedia,carloshwa/apps-android-wikipedia,Duct-and-rice/KrswtkhrWiki4Android,Duct-and-rice/KrswtkhrWiki4Android,wikimedia/apps-android-wikipedia,dbrant/apps-android-wikipedia,Duct-and-rice/KrswtkhrWiki4Android,anirudh24seven/apps-android-wikipedia,dbrant/apps-android-wikipedia,SAGROUP2/apps-android-wikipedia,carloshwa/apps-android-wikipedia,wikimedia/apps-android-wikipedia,anirudh24seven/apps-android-wikipedia
|
java
|
## Code Before:
package org.wikipedia.interlanguage;
import android.support.annotation.NonNull;
public final class AcceptLanguageUtil {
private static final float APP_LANGUAGE_QUALITY = .9f;
private static final float SYSTEM_LANGUAGE_QUALITY = .8f;
/**
* @return The value that should go in the Accept-Language header.
*/
@NonNull
public static String getAcceptLanguage(@NonNull String siteLanguageCode,
@NonNull String appLanguageCode,
@NonNull String systemLanguageCode) {
String acceptLanguage = siteLanguageCode;
acceptLanguage = appendToAcceptLanguage(acceptLanguage, appLanguageCode, APP_LANGUAGE_QUALITY);
acceptLanguage = appendToAcceptLanguage(acceptLanguage, systemLanguageCode, SYSTEM_LANGUAGE_QUALITY);
return acceptLanguage;
}
@NonNull
private static String appendToAcceptLanguage(@NonNull String acceptLanguage,
@NonNull String languageCode, float quality) {
// If accept-language already contains the language, just return accept-language.
if (acceptLanguage.contains(languageCode)) {
return acceptLanguage;
}
// If accept-language is empty, don't append. Just return the language.
if (acceptLanguage.isEmpty()) {
return languageCode;
}
// Accept-language is nonempty, append the language.
return String.format("%s,%s;q=%.1f", acceptLanguage, languageCode, quality);
}
private AcceptLanguageUtil() { }
}
## Instruction:
Fix issue with Accept-Language header for certain system languages.
The proper Locale was not being used when calling String.format(), which
was causing the resulting HTTP requests to fail.
Bug: T118910
Change-Id: I09e8c57d92eb969de816ed5025ad17f66aec386c
## Code After:
package org.wikipedia.interlanguage;
import android.support.annotation.NonNull;
import java.util.Locale;
public final class AcceptLanguageUtil {
private static final float APP_LANGUAGE_QUALITY = .9f;
private static final float SYSTEM_LANGUAGE_QUALITY = .8f;
/**
* @return The value that should go in the Accept-Language header.
*/
@NonNull
public static String getAcceptLanguage(@NonNull String siteLanguageCode,
@NonNull String appLanguageCode,
@NonNull String systemLanguageCode) {
String acceptLanguage = siteLanguageCode;
acceptLanguage = appendToAcceptLanguage(acceptLanguage, appLanguageCode, APP_LANGUAGE_QUALITY);
acceptLanguage = appendToAcceptLanguage(acceptLanguage, systemLanguageCode, SYSTEM_LANGUAGE_QUALITY);
return acceptLanguage;
}
@NonNull
private static String appendToAcceptLanguage(@NonNull String acceptLanguage,
@NonNull String languageCode, float quality) {
// If accept-language already contains the language, just return accept-language.
if (acceptLanguage.contains(languageCode)) {
return acceptLanguage;
}
// If accept-language is empty, don't append. Just return the language.
if (acceptLanguage.isEmpty()) {
return languageCode;
}
// Accept-language is nonempty, append the language.
return String.format(Locale.ROOT, "%s,%s;q=%.1f", acceptLanguage, languageCode, quality);
}
private AcceptLanguageUtil() { }
}
|
# ... existing code ...
package org.wikipedia.interlanguage;
import android.support.annotation.NonNull;
import java.util.Locale;
public final class AcceptLanguageUtil {
private static final float APP_LANGUAGE_QUALITY = .9f;
# ... modified code ...
}
// Accept-language is nonempty, append the language.
return String.format(Locale.ROOT, "%s,%s;q=%.1f", acceptLanguage, languageCode, quality);
}
private AcceptLanguageUtil() { }
# ... rest of the code ...
|
b2ed8de7302cbea0a80b87f3dfe370ca0a60d75a
|
kawasemi/backends/github.py
|
kawasemi/backends/github.py
|
import json
import requests
from .base import BaseChannel
from ..exceptions import HttpError
class GitHubChannel(BaseChannel):
def __init__(self, token, owner, repository,
base_url="https://api.github.com", *args, **kwargs):
self.token = token
self.url = base_url + "/repos/" + owner + "/" + repository + "/issues"
def send(self, message, fail_silently=False, options=None):
headers = {
"Authorization": "token " + self.token,
"Content-Type": "application/json"
}
payload = {
"title": message
}
self._set_payload_from_options(payload, options, "github", [
"body", "milestone", "labels", "assignees"])
try:
response = requests.post(self.url,
headers=headers,
data=json.dumps(payload))
if response.status_code != requests.codes.created:
raise HttpError(response.status_code, response.text)
except:
if not fail_silently:
raise
|
import json
import requests
from .base import BaseChannel
from ..exceptions import HttpError
class GitHubChannel(BaseChannel):
def __init__(self, token, owner, repository,
base_url="https://api.github.com", *args, **kwargs):
self.token = token
self.url = base_url + "/repos/" + owner + "/" + repository + "/issues"
def send(self, message, fail_silently=False, options=None):
headers = {
"Accept": "application/vnd.github.v3+json",
"Authorization": "token " + self.token,
"Content-Type": "application/json"
}
payload = {
"title": message
}
self._set_payload_from_options(payload, options, "github", [
"body", "milestone", "labels", "assignees"])
try:
response = requests.post(self.url,
headers=headers,
data=json.dumps(payload))
if response.status_code != requests.codes.created:
raise HttpError(response.status_code, response.text)
except:
if not fail_silently:
raise
|
Set Accept header explicitly in GitHubChannel
|
Set Accept header explicitly in GitHubChannel
|
Python
|
mit
|
ymyzk/kawasemi,ymyzk/django-channels
|
python
|
## Code Before:
import json
import requests
from .base import BaseChannel
from ..exceptions import HttpError
class GitHubChannel(BaseChannel):
def __init__(self, token, owner, repository,
base_url="https://api.github.com", *args, **kwargs):
self.token = token
self.url = base_url + "/repos/" + owner + "/" + repository + "/issues"
def send(self, message, fail_silently=False, options=None):
headers = {
"Authorization": "token " + self.token,
"Content-Type": "application/json"
}
payload = {
"title": message
}
self._set_payload_from_options(payload, options, "github", [
"body", "milestone", "labels", "assignees"])
try:
response = requests.post(self.url,
headers=headers,
data=json.dumps(payload))
if response.status_code != requests.codes.created:
raise HttpError(response.status_code, response.text)
except:
if not fail_silently:
raise
## Instruction:
Set Accept header explicitly in GitHubChannel
## Code After:
import json
import requests
from .base import BaseChannel
from ..exceptions import HttpError
class GitHubChannel(BaseChannel):
def __init__(self, token, owner, repository,
base_url="https://api.github.com", *args, **kwargs):
self.token = token
self.url = base_url + "/repos/" + owner + "/" + repository + "/issues"
def send(self, message, fail_silently=False, options=None):
headers = {
"Accept": "application/vnd.github.v3+json",
"Authorization": "token " + self.token,
"Content-Type": "application/json"
}
payload = {
"title": message
}
self._set_payload_from_options(payload, options, "github", [
"body", "milestone", "labels", "assignees"])
try:
response = requests.post(self.url,
headers=headers,
data=json.dumps(payload))
if response.status_code != requests.codes.created:
raise HttpError(response.status_code, response.text)
except:
if not fail_silently:
raise
|
# ... existing code ...
def send(self, message, fail_silently=False, options=None):
headers = {
"Accept": "application/vnd.github.v3+json",
"Authorization": "token " + self.token,
"Content-Type": "application/json"
}
# ... rest of the code ...
|
e08c7352fc5de7e098e434bfc1f2df4384c3405a
|
tests/base.py
|
tests/base.py
|
import unittest
import glob
import os
import mmstats
class MmstatsTestCase(unittest.TestCase):
def setUp(self):
super(MmstatsTestCase, self).setUp()
self.path = mmstats.DEFAULT_PATH
# Clean out stale mmstats files
for fn in glob.glob(os.path.join(self.path, 'test*.mmstats')):
try:
os.remove(fn)
pass
except OSError:
print 'Could not remove: %s' % fn
def tearDown(self):
# clean the dir after tests
for fn in glob.glob(os.path.join(self.path, 'test*.mmstats')):
try:
os.remove(fn)
pass
except OSError:
continue
|
import unittest
import glob
import os
import mmstats
class MmstatsTestCase(unittest.TestCase):
@property
def files(self):
return glob.glob(os.path.join(self.path, 'test*.mmstats'))
def setUp(self):
super(MmstatsTestCase, self).setUp()
self.path = mmstats.DEFAULT_PATH
# Clean out stale mmstats files
for fn in self.files:
try:
os.remove(fn)
pass
except OSError:
print 'Could not remove: %s' % fn
def tearDown(self):
# clean the dir after tests
for fn in self.files:
try:
os.remove(fn)
pass
except OSError:
continue
|
Refactor test harness file discovery
|
Refactor test harness file discovery
|
Python
|
bsd-3-clause
|
schmichael/mmstats,schmichael/mmstats,schmichael/mmstats,schmichael/mmstats
|
python
|
## Code Before:
import unittest
import glob
import os
import mmstats
class MmstatsTestCase(unittest.TestCase):
def setUp(self):
super(MmstatsTestCase, self).setUp()
self.path = mmstats.DEFAULT_PATH
# Clean out stale mmstats files
for fn in glob.glob(os.path.join(self.path, 'test*.mmstats')):
try:
os.remove(fn)
pass
except OSError:
print 'Could not remove: %s' % fn
def tearDown(self):
# clean the dir after tests
for fn in glob.glob(os.path.join(self.path, 'test*.mmstats')):
try:
os.remove(fn)
pass
except OSError:
continue
## Instruction:
Refactor test harness file discovery
## Code After:
import unittest
import glob
import os
import mmstats
class MmstatsTestCase(unittest.TestCase):
@property
def files(self):
return glob.glob(os.path.join(self.path, 'test*.mmstats'))
def setUp(self):
super(MmstatsTestCase, self).setUp()
self.path = mmstats.DEFAULT_PATH
# Clean out stale mmstats files
for fn in self.files:
try:
os.remove(fn)
pass
except OSError:
print 'Could not remove: %s' % fn
def tearDown(self):
# clean the dir after tests
for fn in self.files:
try:
os.remove(fn)
pass
except OSError:
continue
|
# ... existing code ...
class MmstatsTestCase(unittest.TestCase):
@property
def files(self):
return glob.glob(os.path.join(self.path, 'test*.mmstats'))
def setUp(self):
super(MmstatsTestCase, self).setUp()
self.path = mmstats.DEFAULT_PATH
# Clean out stale mmstats files
for fn in self.files:
try:
os.remove(fn)
pass
# ... modified code ...
def tearDown(self):
# clean the dir after tests
for fn in self.files:
try:
os.remove(fn)
pass
# ... rest of the code ...
|
b77e8f9a081517701cccf9f177c81eaca877e8c7
|
pombola/images/admin.py
|
pombola/images/admin.py
|
from django.contrib import admin
from django.contrib.contenttypes.generic import GenericTabularInline
from sorl.thumbnail import get_thumbnail
from sorl.thumbnail.admin import AdminImageMixin
from pombola.images import models
class ImageAdmin(AdminImageMixin, admin.ModelAdmin):
list_display = [ 'thumbnail', 'content_object', 'is_primary', 'source', ]
def thumbnail(self, obj):
im = get_thumbnail(obj.image, '100x100')
return '<img src="%s" />' % ( im.url )
thumbnail.allow_tags = True
class ImageAdminInline(AdminImageMixin, GenericTabularInline):
model = models.Image
extra = 0
can_delete = True
admin.site.register( models.Image, ImageAdmin )
|
from django.contrib import admin
from django.contrib.contenttypes.generic import GenericTabularInline
from sorl.thumbnail import get_thumbnail
from sorl.thumbnail.admin import AdminImageMixin
from pombola.images import models
class ImageAdmin(AdminImageMixin, admin.ModelAdmin):
list_display = [ 'thumbnail', 'content_object', 'is_primary', 'source', ]
def thumbnail(self, obj):
if obj.image:
im = get_thumbnail(obj.image, '100x100')
return '<img src="%s" />' % ( im.url )
else:
return "NO IMAGE FOUND"
thumbnail.allow_tags = True
class ImageAdminInline(AdminImageMixin, GenericTabularInline):
model = models.Image
extra = 0
can_delete = True
admin.site.register( models.Image, ImageAdmin )
|
Handle entries that have no image associated with them
|
Handle entries that have no image associated with them
|
Python
|
agpl-3.0
|
ken-muturi/pombola,mysociety/pombola,geoffkilpin/pombola,hzj123/56th,ken-muturi/pombola,patricmutwiri/pombola,geoffkilpin/pombola,ken-muturi/pombola,mysociety/pombola,ken-muturi/pombola,mysociety/pombola,hzj123/56th,mysociety/pombola,patricmutwiri/pombola,patricmutwiri/pombola,geoffkilpin/pombola,hzj123/56th,ken-muturi/pombola,ken-muturi/pombola,patricmutwiri/pombola,geoffkilpin/pombola,hzj123/56th,patricmutwiri/pombola,patricmutwiri/pombola,hzj123/56th,geoffkilpin/pombola,mysociety/pombola,geoffkilpin/pombola,mysociety/pombola,hzj123/56th
|
python
|
## Code Before:
from django.contrib import admin
from django.contrib.contenttypes.generic import GenericTabularInline
from sorl.thumbnail import get_thumbnail
from sorl.thumbnail.admin import AdminImageMixin
from pombola.images import models
class ImageAdmin(AdminImageMixin, admin.ModelAdmin):
list_display = [ 'thumbnail', 'content_object', 'is_primary', 'source', ]
def thumbnail(self, obj):
im = get_thumbnail(obj.image, '100x100')
return '<img src="%s" />' % ( im.url )
thumbnail.allow_tags = True
class ImageAdminInline(AdminImageMixin, GenericTabularInline):
model = models.Image
extra = 0
can_delete = True
admin.site.register( models.Image, ImageAdmin )
## Instruction:
Handle entries that have no image associated with them
## Code After:
from django.contrib import admin
from django.contrib.contenttypes.generic import GenericTabularInline
from sorl.thumbnail import get_thumbnail
from sorl.thumbnail.admin import AdminImageMixin
from pombola.images import models
class ImageAdmin(AdminImageMixin, admin.ModelAdmin):
list_display = [ 'thumbnail', 'content_object', 'is_primary', 'source', ]
def thumbnail(self, obj):
if obj.image:
im = get_thumbnail(obj.image, '100x100')
return '<img src="%s" />' % ( im.url )
else:
return "NO IMAGE FOUND"
thumbnail.allow_tags = True
class ImageAdminInline(AdminImageMixin, GenericTabularInline):
model = models.Image
extra = 0
can_delete = True
admin.site.register( models.Image, ImageAdmin )
|
# ... existing code ...
list_display = [ 'thumbnail', 'content_object', 'is_primary', 'source', ]
def thumbnail(self, obj):
if obj.image:
im = get_thumbnail(obj.image, '100x100')
return '<img src="%s" />' % ( im.url )
else:
return "NO IMAGE FOUND"
thumbnail.allow_tags = True
# ... rest of the code ...
|
428b4b0025dd7bb0edf5d3df8c32703d96ab577b
|
src/shared/unit_orders.py
|
src/shared/unit_orders.py
|
class UnitOrders(object):
def __init__(self):
self.orders = {}
def giveOrders(self, unit, orders):
if orders is not None and not isinstance(orders, list):
orders = list(orders)
self.orders[unit] = orders
def getNextOrder(self, unit):
try:
return self.orders[unit][0]
except (KeyError, IndexError):
return None
def removeNextOrder(self, unit):
self.orders[unit] = self.orders[unit][1:]
if not self.orders[unit]:
del self.orders[unit]
def getAllUnitsNextOrders(self):
return {x: self.getNextOrder(x) for x in self.orders}
|
class UnitOrders(object):
def __init__(self):
self.orders = {}
def giveOrders(self, unit, orders):
if orders is not None and not isinstance(orders, list):
orders = list(orders)
self.orders[unit] = orders
def getNextOrder(self, unit):
try:
orders = self.orders[unit]
if orders is None:
return None
else:
return orders[0]
except (KeyError, IndexError):
return None
def removeNextOrder(self, unit):
self.orders[unit] = self.orders[unit][1:]
if not self.orders[unit]:
del self.orders[unit]
def getAllUnitsNextOrders(self):
return {x: self.getNextOrder(x) for x in self.orders}
|
Check for None before indexing.
|
Check for None before indexing.
|
Python
|
mit
|
CheeseLord/warts,CheeseLord/warts
|
python
|
## Code Before:
class UnitOrders(object):
def __init__(self):
self.orders = {}
def giveOrders(self, unit, orders):
if orders is not None and not isinstance(orders, list):
orders = list(orders)
self.orders[unit] = orders
def getNextOrder(self, unit):
try:
return self.orders[unit][0]
except (KeyError, IndexError):
return None
def removeNextOrder(self, unit):
self.orders[unit] = self.orders[unit][1:]
if not self.orders[unit]:
del self.orders[unit]
def getAllUnitsNextOrders(self):
return {x: self.getNextOrder(x) for x in self.orders}
## Instruction:
Check for None before indexing.
## Code After:
class UnitOrders(object):
def __init__(self):
self.orders = {}
def giveOrders(self, unit, orders):
if orders is not None and not isinstance(orders, list):
orders = list(orders)
self.orders[unit] = orders
def getNextOrder(self, unit):
try:
orders = self.orders[unit]
if orders is None:
return None
else:
return orders[0]
except (KeyError, IndexError):
return None
def removeNextOrder(self, unit):
self.orders[unit] = self.orders[unit][1:]
if not self.orders[unit]:
del self.orders[unit]
def getAllUnitsNextOrders(self):
return {x: self.getNextOrder(x) for x in self.orders}
|
// ... existing code ...
def getNextOrder(self, unit):
try:
orders = self.orders[unit]
if orders is None:
return None
else:
return orders[0]
except (KeyError, IndexError):
return None
// ... rest of the code ...
|
97401a56e59d06acdd455f111dbe993265f2a39d
|
setup.py
|
setup.py
|
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(
name='urbansim',
version='0.2dev',
description='Tool for modeling metropolitan real estate markets',
author='Synthicity',
author_email='[email protected]',
license='AGPL',
url='https://github.com/synthicity/urbansim',
classifiers=[
'Development Status :: 4 - Beta',
'Programming Language :: Python :: 2.7',
'License :: OSI Approved :: GNU Affero General Public License v3'
],
packages=find_packages(exclude=['urbansimd', '*.tests']),
package_data={'urbansim.urbansim': ['templates/*.template']},
install_requires=[
'Django>=1.6.2',
'jinja2>=2.7.2',
'numpy>=1.8.0',
'pandas>=0.13.1',
'patsy>=0.2.1',
'pyyaml>=3.10',
'scipy>=0.13.3',
'shapely>=1.3.0',
'simplejson>=3.3.3',
'statsmodels>=0.5.0',
'tables>=3.1.0'
],
entry_points={
'console_scripts': [
'urbansim_compile = urbansim.urbansim.compilecli:main',
'urbansim_serve = urbansim.server.servecli:main'
]
}
)
|
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(
name='urbansim',
version='0.2dev',
description='Tool for modeling metropolitan real estate markets',
author='Synthicity',
author_email='[email protected]',
license='AGPL',
url='https://github.com/synthicity/urbansim',
classifiers=[
'Development Status :: 4 - Beta',
'Programming Language :: Python :: 2.7',
'License :: OSI Approved :: GNU Affero General Public License v3'
],
packages=find_packages(exclude=['*.tests']),
package_data={'urbansim.urbansim': ['templates/*.template']},
install_requires=[
'Django>=1.6.2',
'jinja2>=2.7.2',
'numpy>=1.8.0',
'pandas>=0.13.1',
'patsy>=0.2.1',
'pyyaml>=3.10',
'scipy>=0.13.3',
'shapely>=1.3.0',
'simplejson>=3.3.3',
'statsmodels>=0.5.0',
'tables>=3.1.0'
],
entry_points={
'console_scripts': [
'urbansim_compile = urbansim.urbansim.compilecli:main',
'urbansim_serve = urbansim.server.servecli:main'
]
}
)
|
Remove urbansimd from excluded packages.
|
Remove urbansimd from excluded packages.
|
Python
|
bsd-3-clause
|
SANDAG/urbansim,apdjustino/urbansim,synthicity/urbansim,UDST/urbansim,UDST/urbansim,bricegnichols/urbansim,AZMAG/urbansim,ual/urbansim,UDST/urbansim,VladimirTyrin/urbansim,waddell/urbansim,VladimirTyrin/urbansim,bricegnichols/urbansim,SANDAG/urbansim,waddell/urbansim,waddell/urbansim,apdjustino/urbansim,apdjustino/urbansim,SANDAG/urbansim,synthicity/urbansim,ual/urbansim,VladimirTyrin/urbansim,synthicity/urbansim,apdjustino/urbansim,AZMAG/urbansim,ual/urbansim,waddell/urbansim,bricegnichols/urbansim,bricegnichols/urbansim,ual/urbansim,VladimirTyrin/urbansim,synthicity/urbansim,AZMAG/urbansim,UDST/urbansim,AZMAG/urbansim,SANDAG/urbansim
|
python
|
## Code Before:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(
name='urbansim',
version='0.2dev',
description='Tool for modeling metropolitan real estate markets',
author='Synthicity',
author_email='[email protected]',
license='AGPL',
url='https://github.com/synthicity/urbansim',
classifiers=[
'Development Status :: 4 - Beta',
'Programming Language :: Python :: 2.7',
'License :: OSI Approved :: GNU Affero General Public License v3'
],
packages=find_packages(exclude=['urbansimd', '*.tests']),
package_data={'urbansim.urbansim': ['templates/*.template']},
install_requires=[
'Django>=1.6.2',
'jinja2>=2.7.2',
'numpy>=1.8.0',
'pandas>=0.13.1',
'patsy>=0.2.1',
'pyyaml>=3.10',
'scipy>=0.13.3',
'shapely>=1.3.0',
'simplejson>=3.3.3',
'statsmodels>=0.5.0',
'tables>=3.1.0'
],
entry_points={
'console_scripts': [
'urbansim_compile = urbansim.urbansim.compilecli:main',
'urbansim_serve = urbansim.server.servecli:main'
]
}
)
## Instruction:
Remove urbansimd from excluded packages.
## Code After:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(
name='urbansim',
version='0.2dev',
description='Tool for modeling metropolitan real estate markets',
author='Synthicity',
author_email='[email protected]',
license='AGPL',
url='https://github.com/synthicity/urbansim',
classifiers=[
'Development Status :: 4 - Beta',
'Programming Language :: Python :: 2.7',
'License :: OSI Approved :: GNU Affero General Public License v3'
],
packages=find_packages(exclude=['*.tests']),
package_data={'urbansim.urbansim': ['templates/*.template']},
install_requires=[
'Django>=1.6.2',
'jinja2>=2.7.2',
'numpy>=1.8.0',
'pandas>=0.13.1',
'patsy>=0.2.1',
'pyyaml>=3.10',
'scipy>=0.13.3',
'shapely>=1.3.0',
'simplejson>=3.3.3',
'statsmodels>=0.5.0',
'tables>=3.1.0'
],
entry_points={
'console_scripts': [
'urbansim_compile = urbansim.urbansim.compilecli:main',
'urbansim_serve = urbansim.server.servecli:main'
]
}
)
|
# ... existing code ...
'Programming Language :: Python :: 2.7',
'License :: OSI Approved :: GNU Affero General Public License v3'
],
packages=find_packages(exclude=['*.tests']),
package_data={'urbansim.urbansim': ['templates/*.template']},
install_requires=[
'Django>=1.6.2',
# ... rest of the code ...
|
929779ef4504ea114c174f2b2d83a0535143d76d
|
app/src/main/java/net/ericschrag/ud_demo/data/GithubService.java
|
app/src/main/java/net/ericschrag/ud_demo/data/GithubService.java
|
package net.ericschrag.ud_demo.data;
import net.ericschrag.ud_demo.data.model.GithubUser;
import java.util.List;
import retrofit.http.GET;
public interface GithubService {
@GET("/users")
public List<GithubUser> getUsers();
}
|
package net.ericschrag.ud_demo.data;
import net.ericschrag.ud_demo.data.model.GithubUser;
import java.util.List;
import retrofit.http.GET;
import retrofit.http.Headers;
public interface GithubService {
@Headers("Accept: application/vnd.github.v3+json")
@GET("/users")
public List<GithubUser> getUsers();
}
|
Make sure to tell Github API what version we are expecting
|
Make sure to tell Github API what version we are expecting
|
Java
|
apache-2.0
|
Kusand/ud-demo
|
java
|
## Code Before:
package net.ericschrag.ud_demo.data;
import net.ericschrag.ud_demo.data.model.GithubUser;
import java.util.List;
import retrofit.http.GET;
public interface GithubService {
@GET("/users")
public List<GithubUser> getUsers();
}
## Instruction:
Make sure to tell Github API what version we are expecting
## Code After:
package net.ericschrag.ud_demo.data;
import net.ericschrag.ud_demo.data.model.GithubUser;
import java.util.List;
import retrofit.http.GET;
import retrofit.http.Headers;
public interface GithubService {
@Headers("Accept: application/vnd.github.v3+json")
@GET("/users")
public List<GithubUser> getUsers();
}
|
# ... existing code ...
import java.util.List;
import retrofit.http.GET;
import retrofit.http.Headers;
public interface GithubService {
@Headers("Accept: application/vnd.github.v3+json")
@GET("/users")
public List<GithubUser> getUsers();
}
# ... rest of the code ...
|
d0feed675897570d92eeb7b801b8ba094171bee0
|
send_email.py
|
send_email.py
|
import smtplib
from email.mime.text import MIMEText
def process_email(data):
table_template = open('table_template.html', 'r').read()
same_guild_html = []
for game in data['same_guild']:
tt = table_template
same_guild_html.append(tt.format(**game))
games_html = []
for game in data['games']:
tt = table_template
games_html.append(tt.format(**game))
send(same_guild_html + games_html)
def connect():
gmail_user = '[email protected]'
gmail_passwd = 'passsword'
server = smtplib.SMTP('smtp.gmail.com', 587)
server.ehlo()
server.starttls()
server.ehlo()
server.login(gmail_user, gmail_passwd)
return server
def send(data):
TO = '[email protected]'
FROM = '[email protected]'
SUBJECT = 'KGS - DuelGo Results'
msg = MIMEText('\n'.join(data), 'html')
msg['To'] = TO
msg['From'] = FROM
msg['Subject'] = SUBJECT
server = connect()
try:
server.sendmail(FROM, [TO], msg.as_string())
print('\t ... sent')
except:
print('\t ... ERROR sending')
server.quit()
|
import datetime
import smtplib
from email.mime.text import MIMEText
def process_email(data):
table_template = open('table_template.html', 'r').read()
same_guild_html = []
for game in data['same_guild']:
tt = table_template
same_guild_html.append(tt.format(**game))
games_html = []
for game in data['games']:
tt = table_template
games_html.append(tt.format(**game))
send(same_guild_html + games_html)
def connect():
gmail_user = '[email protected]'
gmail_passwd = 'passsword'
server = smtplib.SMTP('smtp.gmail.com', 587)
server.ehlo()
server.starttls()
server.ehlo()
server.login(gmail_user, gmail_passwd)
return server
def send(data):
TO = '[email protected]'
FROM = '[email protected]'
SUBJECT = 'KGS - DuelGo Results - {}'.format(datetime.datetime.now().strftime('%m/%d/%Y %I:%M %p'))
msg = MIMEText('\n'.join(data), 'html')
msg['To'] = TO
msg['From'] = FROM
msg['Subject'] = SUBJECT
server = connect()
try:
server.sendmail(FROM, [TO], msg.as_string())
print('\t ... sent')
except:
print('\t ... ERROR sending')
server.quit()
|
Update email subject with datetime so as to not have it end up in a thread in email client.
|
Update email subject with datetime so as to not have it end up in a thread in email client.
|
Python
|
agpl-3.0
|
v01d-cypher/kgs_league_scorer,v01d-cypher/kgs_league_scorer
|
python
|
## Code Before:
import smtplib
from email.mime.text import MIMEText
def process_email(data):
table_template = open('table_template.html', 'r').read()
same_guild_html = []
for game in data['same_guild']:
tt = table_template
same_guild_html.append(tt.format(**game))
games_html = []
for game in data['games']:
tt = table_template
games_html.append(tt.format(**game))
send(same_guild_html + games_html)
def connect():
gmail_user = '[email protected]'
gmail_passwd = 'passsword'
server = smtplib.SMTP('smtp.gmail.com', 587)
server.ehlo()
server.starttls()
server.ehlo()
server.login(gmail_user, gmail_passwd)
return server
def send(data):
TO = '[email protected]'
FROM = '[email protected]'
SUBJECT = 'KGS - DuelGo Results'
msg = MIMEText('\n'.join(data), 'html')
msg['To'] = TO
msg['From'] = FROM
msg['Subject'] = SUBJECT
server = connect()
try:
server.sendmail(FROM, [TO], msg.as_string())
print('\t ... sent')
except:
print('\t ... ERROR sending')
server.quit()
## Instruction:
Update email subject with datetime so as to not have it end up in a thread in email client.
## Code After:
import datetime
import smtplib
from email.mime.text import MIMEText
def process_email(data):
table_template = open('table_template.html', 'r').read()
same_guild_html = []
for game in data['same_guild']:
tt = table_template
same_guild_html.append(tt.format(**game))
games_html = []
for game in data['games']:
tt = table_template
games_html.append(tt.format(**game))
send(same_guild_html + games_html)
def connect():
gmail_user = '[email protected]'
gmail_passwd = 'passsword'
server = smtplib.SMTP('smtp.gmail.com', 587)
server.ehlo()
server.starttls()
server.ehlo()
server.login(gmail_user, gmail_passwd)
return server
def send(data):
TO = '[email protected]'
FROM = '[email protected]'
SUBJECT = 'KGS - DuelGo Results - {}'.format(datetime.datetime.now().strftime('%m/%d/%Y %I:%M %p'))
msg = MIMEText('\n'.join(data), 'html')
msg['To'] = TO
msg['From'] = FROM
msg['Subject'] = SUBJECT
server = connect()
try:
server.sendmail(FROM, [TO], msg.as_string())
print('\t ... sent')
except:
print('\t ... ERROR sending')
server.quit()
|
// ... existing code ...
import datetime
import smtplib
from email.mime.text import MIMEText
// ... modified code ...
def send(data):
TO = '[email protected]'
FROM = '[email protected]'
SUBJECT = 'KGS - DuelGo Results - {}'.format(datetime.datetime.now().strftime('%m/%d/%Y %I:%M %p'))
msg = MIMEText('\n'.join(data), 'html')
msg['To'] = TO
// ... rest of the code ...
|
0bddababba1a9127834a073c543a9ff13dd24e24
|
src/main/java/rsc/publisher/PublisherSubscribeOnValue.java
|
src/main/java/rsc/publisher/PublisherSubscribeOnValue.java
|
package rsc.publisher;
import java.util.Objects;
import org.reactivestreams.Subscriber;
import rsc.flow.Cancellation;
import rsc.scheduler.Scheduler;
import rsc.util.EmptySubscription;
import rsc.util.ExceptionHelper;
/**
* Publisher indicating a scalar/empty source that subscribes on the specified scheduler.
*
* @param <T>
*/
final class PublisherSubscribeOnValue<T> extends Px<T> {
final T value;
final Scheduler scheduler;
public PublisherSubscribeOnValue(T value,
Scheduler scheduler) {
this.value = value;
this.scheduler = Objects.requireNonNull(scheduler, "scheduler");
}
@Override
public void subscribe(Subscriber<? super T> s) {
Scheduler.Worker worker;
try {
worker = scheduler.createWorker();
} catch (Throwable e) {
ExceptionHelper.throwIfFatal(e);
EmptySubscription.error(s, e);
return;
}
if (worker == null) {
EmptySubscription.error(s, new NullPointerException("The scheduler returned a null Function"));
return;
}
T v = value;
if (v == null) {
PublisherSubscribeOn.ScheduledEmpty parent = new PublisherSubscribeOn.ScheduledEmpty(s);
s.onSubscribe(parent);
Cancellation f = scheduler.schedule(parent);
parent.setFuture(f);
} else {
s.onSubscribe(new PublisherSubscribeOn.ScheduledScalar<>(s, v, scheduler));
}
}
}
|
package rsc.publisher;
import java.util.Objects;
import org.reactivestreams.Subscriber;
import rsc.flow.Cancellation;
import rsc.scheduler.Scheduler;
/**
* Publisher indicating a scalar/empty source that subscribes on the specified scheduler.
*
* @param <T>
*/
final class PublisherSubscribeOnValue<T> extends Px<T> {
final T value;
final Scheduler scheduler;
public PublisherSubscribeOnValue(T value,
Scheduler scheduler) {
this.value = value;
this.scheduler = Objects.requireNonNull(scheduler, "scheduler");
}
@Override
public void subscribe(Subscriber<? super T> s) {
T v = value;
if (v == null) {
PublisherSubscribeOn.ScheduledEmpty parent = new PublisherSubscribeOn.ScheduledEmpty(s);
s.onSubscribe(parent);
Cancellation f = scheduler.schedule(parent);
parent.setFuture(f);
} else {
s.onSubscribe(new PublisherSubscribeOn.ScheduledScalar<>(s, v, scheduler));
}
}
}
|
Remove unused worker (and thus a worker leak)
|
Remove unused worker (and thus a worker leak)
|
Java
|
apache-2.0
|
reactor/reactive-streams-commons,reactor/reactive-streams-commons
|
java
|
## Code Before:
package rsc.publisher;
import java.util.Objects;
import org.reactivestreams.Subscriber;
import rsc.flow.Cancellation;
import rsc.scheduler.Scheduler;
import rsc.util.EmptySubscription;
import rsc.util.ExceptionHelper;
/**
* Publisher indicating a scalar/empty source that subscribes on the specified scheduler.
*
* @param <T>
*/
final class PublisherSubscribeOnValue<T> extends Px<T> {
final T value;
final Scheduler scheduler;
public PublisherSubscribeOnValue(T value,
Scheduler scheduler) {
this.value = value;
this.scheduler = Objects.requireNonNull(scheduler, "scheduler");
}
@Override
public void subscribe(Subscriber<? super T> s) {
Scheduler.Worker worker;
try {
worker = scheduler.createWorker();
} catch (Throwable e) {
ExceptionHelper.throwIfFatal(e);
EmptySubscription.error(s, e);
return;
}
if (worker == null) {
EmptySubscription.error(s, new NullPointerException("The scheduler returned a null Function"));
return;
}
T v = value;
if (v == null) {
PublisherSubscribeOn.ScheduledEmpty parent = new PublisherSubscribeOn.ScheduledEmpty(s);
s.onSubscribe(parent);
Cancellation f = scheduler.schedule(parent);
parent.setFuture(f);
} else {
s.onSubscribe(new PublisherSubscribeOn.ScheduledScalar<>(s, v, scheduler));
}
}
}
## Instruction:
Remove unused worker (and thus a worker leak)
## Code After:
package rsc.publisher;
import java.util.Objects;
import org.reactivestreams.Subscriber;
import rsc.flow.Cancellation;
import rsc.scheduler.Scheduler;
/**
* Publisher indicating a scalar/empty source that subscribes on the specified scheduler.
*
* @param <T>
*/
final class PublisherSubscribeOnValue<T> extends Px<T> {
final T value;
final Scheduler scheduler;
public PublisherSubscribeOnValue(T value,
Scheduler scheduler) {
this.value = value;
this.scheduler = Objects.requireNonNull(scheduler, "scheduler");
}
@Override
public void subscribe(Subscriber<? super T> s) {
T v = value;
if (v == null) {
PublisherSubscribeOn.ScheduledEmpty parent = new PublisherSubscribeOn.ScheduledEmpty(s);
s.onSubscribe(parent);
Cancellation f = scheduler.schedule(parent);
parent.setFuture(f);
} else {
s.onSubscribe(new PublisherSubscribeOn.ScheduledScalar<>(s, v, scheduler));
}
}
}
|
...
import rsc.flow.Cancellation;
import rsc.scheduler.Scheduler;
/**
* Publisher indicating a scalar/empty source that subscribes on the specified scheduler.
...
@Override
public void subscribe(Subscriber<? super T> s) {
T v = value;
if (v == null) {
PublisherSubscribeOn.ScheduledEmpty parent = new PublisherSubscribeOn.ScheduledEmpty(s);
...
|
4fe55df3bb668a2eafdb65a3a31ad27ffa5dc3c2
|
pytable.py
|
pytable.py
|
from __future__ import print_function
from operator import itemgetter
import monoidal_tables as mt
from monoidal_tables import renderers
if __name__ == '__main__':
table = (mt.integer('X', itemgetter('x')) +
mt.integer('Y', itemgetter('y')) +
mt.align_center(mt.column('Name', itemgetter('name'))))
data = [
{'x': 0, 'y': 0, 'name': 'Origin'},
{'x': 5, 'y': 5, 'name': 'Diagonal'},
{'x': 12, 'y': 8, 'name': 'Up'},
]
table.render(data, renderer=renderers.FancyRenderer)
|
from __future__ import print_function
from operator import itemgetter
import monoidal_tables as mt
from monoidal_tables import renderers
if __name__ == '__main__':
table = (mt.integer('X', itemgetter('x')) +
mt.set_class(mt.integer('Y', itemgetter('y')), 'col-y') +
mt.align_center(mt.column('Name', itemgetter('name'))))
data = [
{'x': 0, 'y': 0, 'name': 'Origin'},
{'x': 5, 'y': 5, 'name': 'Diagonal'},
{'x': 12, 'y': 8, 'name': 'Up'},
]
table.render(data, renderer=renderers.FancyRenderer)
|
Update example to show HTML class
|
Update example to show HTML class
|
Python
|
bsd-3-clause
|
lubomir/monoidal-tables
|
python
|
## Code Before:
from __future__ import print_function
from operator import itemgetter
import monoidal_tables as mt
from monoidal_tables import renderers
if __name__ == '__main__':
table = (mt.integer('X', itemgetter('x')) +
mt.integer('Y', itemgetter('y')) +
mt.align_center(mt.column('Name', itemgetter('name'))))
data = [
{'x': 0, 'y': 0, 'name': 'Origin'},
{'x': 5, 'y': 5, 'name': 'Diagonal'},
{'x': 12, 'y': 8, 'name': 'Up'},
]
table.render(data, renderer=renderers.FancyRenderer)
## Instruction:
Update example to show HTML class
## Code After:
from __future__ import print_function
from operator import itemgetter
import monoidal_tables as mt
from monoidal_tables import renderers
if __name__ == '__main__':
table = (mt.integer('X', itemgetter('x')) +
mt.set_class(mt.integer('Y', itemgetter('y')), 'col-y') +
mt.align_center(mt.column('Name', itemgetter('name'))))
data = [
{'x': 0, 'y': 0, 'name': 'Origin'},
{'x': 5, 'y': 5, 'name': 'Diagonal'},
{'x': 12, 'y': 8, 'name': 'Up'},
]
table.render(data, renderer=renderers.FancyRenderer)
|
# ... existing code ...
if __name__ == '__main__':
table = (mt.integer('X', itemgetter('x')) +
mt.set_class(mt.integer('Y', itemgetter('y')), 'col-y') +
mt.align_center(mt.column('Name', itemgetter('name'))))
data = [
{'x': 0, 'y': 0, 'name': 'Origin'},
# ... rest of the code ...
|
622b81296b292035b970891cd259eaac113d20c1
|
apps/accounts/conf.py
|
apps/accounts/conf.py
|
from django.conf import settings # noqa
from appconf import AppConf
class AccountConf(AppConf):
"""
Custom settings for the account module. Mainly settings required for
the login on the remote system.
"""
PID = 3
LOGIN_TYPE = 'login'
LOGIN_SUCCESS_URL = 'home'
ENFORCE_LOGIN_TIMEOUT = 300 # 5 Minutes
ENFORCE_LOGIN_COOKIE_NAME = 'login_expiry'
ENFORCE_LOGIN_SALT = settings.AUTH_API_USER
ENFORCE_LOGIN_NAME = 'force_login_check'
# The name of the UNCCD role as provided by the remote system.
UNCCD_ROLE_NAME = 'UNCCD national focal point'
|
from django.conf import settings # noqa
from appconf import AppConf
class AccountConf(AppConf):
"""
Custom settings for the account module. Mainly settings required for
the login on the remote system.
"""
PID = 3
LOGIN_TYPE = 'login'
LOGIN_SUCCESS_URL = 'home'
ENFORCE_LOGIN_TIMEOUT = 300 # 5 Minutes
ENFORCE_LOGIN_COOKIE_NAME = 'login_expiry'
ENFORCE_LOGIN_SALT = settings.AUTH_API_USER
ENFORCE_LOGIN_NAME = 'force_login_check'
# The name of the UNCCD role as provided by the remote system.
UNCCD_ROLE_NAME = 'UNCCD Focal Point'
|
Change internal name of UNCCD role back to previous correct value
|
Change internal name of UNCCD role back to previous correct value
|
Python
|
apache-2.0
|
CDE-UNIBE/qcat,CDE-UNIBE/qcat,CDE-UNIBE/qcat,CDE-UNIBE/qcat
|
python
|
## Code Before:
from django.conf import settings # noqa
from appconf import AppConf
class AccountConf(AppConf):
"""
Custom settings for the account module. Mainly settings required for
the login on the remote system.
"""
PID = 3
LOGIN_TYPE = 'login'
LOGIN_SUCCESS_URL = 'home'
ENFORCE_LOGIN_TIMEOUT = 300 # 5 Minutes
ENFORCE_LOGIN_COOKIE_NAME = 'login_expiry'
ENFORCE_LOGIN_SALT = settings.AUTH_API_USER
ENFORCE_LOGIN_NAME = 'force_login_check'
# The name of the UNCCD role as provided by the remote system.
UNCCD_ROLE_NAME = 'UNCCD national focal point'
## Instruction:
Change internal name of UNCCD role back to previous correct value
## Code After:
from django.conf import settings # noqa
from appconf import AppConf
class AccountConf(AppConf):
"""
Custom settings for the account module. Mainly settings required for
the login on the remote system.
"""
PID = 3
LOGIN_TYPE = 'login'
LOGIN_SUCCESS_URL = 'home'
ENFORCE_LOGIN_TIMEOUT = 300 # 5 Minutes
ENFORCE_LOGIN_COOKIE_NAME = 'login_expiry'
ENFORCE_LOGIN_SALT = settings.AUTH_API_USER
ENFORCE_LOGIN_NAME = 'force_login_check'
# The name of the UNCCD role as provided by the remote system.
UNCCD_ROLE_NAME = 'UNCCD Focal Point'
|
...
ENFORCE_LOGIN_NAME = 'force_login_check'
# The name of the UNCCD role as provided by the remote system.
UNCCD_ROLE_NAME = 'UNCCD Focal Point'
...
|
e4452ff7e8c27e2e8315c2edb8627a2e92ca86e3
|
panoptes_cli/scripts/panoptes.py
|
panoptes_cli/scripts/panoptes.py
|
import click
import os
import yaml
from panoptes_client import Panoptes
@click.group()
@click.option(
'--endpoint', type=str
)
@click.pass_context
def cli(ctx, endpoint):
ctx.config_dir = os.path.join(os.environ['HOME'], '.panoptes')
ctx.config_file = os.path.join(ctx.config_dir, 'config.yml')
ctx.config = {
'endpoint': 'https://panoptes.zooniverse.org',
'username': '',
'password': '',
}
try:
with open(ctx.config_file) as conf_f:
ctx.config.update(yaml.load(conf_f))
except IOError:
pass
if endpoint:
ctx.config['endpoint'] = endpoint
Panoptes.connect(
endpoint=ctx.config['endpoint'],
username=ctx.config['username'],
password=ctx.config['password']
)
from panoptes_cli.commands.configure import *
from panoptes_cli.commands.project import *
from panoptes_cli.commands.subject import *
from panoptes_cli.commands.subject_set import *
from panoptes_cli.commands.workflow import *
|
import click
import os
import yaml
from panoptes_client import Panoptes
@click.group()
@click.option(
'--endpoint', type=str
)
@click.pass_context
def cli(ctx, endpoint):
ctx.config_dir = os.path.expanduser('~/.panoptes/')
ctx.config_file = os.path.join(ctx.config_dir, 'config.yml')
ctx.config = {
'endpoint': 'https://panoptes.zooniverse.org',
'username': '',
'password': '',
}
try:
with open(ctx.config_file) as conf_f:
ctx.config.update(yaml.load(conf_f))
except IOError:
pass
if endpoint:
ctx.config['endpoint'] = endpoint
Panoptes.connect(
endpoint=ctx.config['endpoint'],
username=ctx.config['username'],
password=ctx.config['password']
)
from panoptes_cli.commands.configure import *
from panoptes_cli.commands.project import *
from panoptes_cli.commands.subject import *
from panoptes_cli.commands.subject_set import *
from panoptes_cli.commands.workflow import *
|
Use os.path.expanduser to find config directory
|
Use os.path.expanduser to find config directory
Works on Windows and Unix.
|
Python
|
apache-2.0
|
zooniverse/panoptes-cli
|
python
|
## Code Before:
import click
import os
import yaml
from panoptes_client import Panoptes
@click.group()
@click.option(
'--endpoint', type=str
)
@click.pass_context
def cli(ctx, endpoint):
ctx.config_dir = os.path.join(os.environ['HOME'], '.panoptes')
ctx.config_file = os.path.join(ctx.config_dir, 'config.yml')
ctx.config = {
'endpoint': 'https://panoptes.zooniverse.org',
'username': '',
'password': '',
}
try:
with open(ctx.config_file) as conf_f:
ctx.config.update(yaml.load(conf_f))
except IOError:
pass
if endpoint:
ctx.config['endpoint'] = endpoint
Panoptes.connect(
endpoint=ctx.config['endpoint'],
username=ctx.config['username'],
password=ctx.config['password']
)
from panoptes_cli.commands.configure import *
from panoptes_cli.commands.project import *
from panoptes_cli.commands.subject import *
from panoptes_cli.commands.subject_set import *
from panoptes_cli.commands.workflow import *
## Instruction:
Use os.path.expanduser to find config directory
Works on Windows and Unix.
## Code After:
import click
import os
import yaml
from panoptes_client import Panoptes
@click.group()
@click.option(
'--endpoint', type=str
)
@click.pass_context
def cli(ctx, endpoint):
ctx.config_dir = os.path.expanduser('~/.panoptes/')
ctx.config_file = os.path.join(ctx.config_dir, 'config.yml')
ctx.config = {
'endpoint': 'https://panoptes.zooniverse.org',
'username': '',
'password': '',
}
try:
with open(ctx.config_file) as conf_f:
ctx.config.update(yaml.load(conf_f))
except IOError:
pass
if endpoint:
ctx.config['endpoint'] = endpoint
Panoptes.connect(
endpoint=ctx.config['endpoint'],
username=ctx.config['username'],
password=ctx.config['password']
)
from panoptes_cli.commands.configure import *
from panoptes_cli.commands.project import *
from panoptes_cli.commands.subject import *
from panoptes_cli.commands.subject_set import *
from panoptes_cli.commands.workflow import *
|
// ... existing code ...
)
@click.pass_context
def cli(ctx, endpoint):
ctx.config_dir = os.path.expanduser('~/.panoptes/')
ctx.config_file = os.path.join(ctx.config_dir, 'config.yml')
ctx.config = {
'endpoint': 'https://panoptes.zooniverse.org',
// ... rest of the code ...
|
347117ce2ec6022192607b9725065de4e0bf2923
|
src/main/java/org/cytoscape/commandDialog/internal/tasks/SleepCommandTask.java
|
src/main/java/org/cytoscape/commandDialog/internal/tasks/SleepCommandTask.java
|
package org.cytoscape.commandDialog.internal.tasks;
import org.cytoscape.work.AbstractTask;
import org.cytoscape.work.ProvidesTitle;
import org.cytoscape.work.TaskMonitor;
import org.cytoscape.work.Tunable;
public class SleepCommandTask extends AbstractEmptyObservableTask {
@ProvidesTitle
public String getTitle() { return "Sleeping..."; }
@Tunable(description="Duration of sleep in seconds")
public double duration;
public SleepCommandTask() {
super();
}
@Override
public void run(TaskMonitor arg0) throws Exception {
if (duration != 0d) {
arg0.showMessage(TaskMonitor.Level.INFO, "Sleeping for "+duration+" seconds");
Thread.sleep((long)duration*1000);
arg0.showMessage(TaskMonitor.Level.INFO, "Slept for "+duration+" seconds");
}
}
}
|
package org.cytoscape.commandDialog.internal.tasks;
import org.cytoscape.work.AbstractTask;
import org.cytoscape.work.ProvidesTitle;
import org.cytoscape.work.TaskMonitor;
import org.cytoscape.work.Tunable;
public class SleepCommandTask extends AbstractEmptyObservableTask {
@ProvidesTitle
public String getTitle() { return "Sleeping..."; }
@Tunable(description="Duration of sleep in seconds",
longDescription="Enter the time in seconds to sleep",
exampleStringValue="5")
public double duration;
public SleepCommandTask() {
super();
}
@Override
public void run(TaskMonitor arg0) throws Exception {
if (duration != 0d) {
arg0.showMessage(TaskMonitor.Level.INFO, "Sleeping for "+duration+" seconds");
Thread.sleep((long)duration*1000);
arg0.showMessage(TaskMonitor.Level.INFO, "Slept for "+duration+" seconds");
}
}
}
|
Add longDescription and exampleStringValue to sleep
|
Add longDescription and exampleStringValue to sleep
|
Java
|
lgpl-2.1
|
cytoscape/command-dialog
|
java
|
## Code Before:
package org.cytoscape.commandDialog.internal.tasks;
import org.cytoscape.work.AbstractTask;
import org.cytoscape.work.ProvidesTitle;
import org.cytoscape.work.TaskMonitor;
import org.cytoscape.work.Tunable;
public class SleepCommandTask extends AbstractEmptyObservableTask {
@ProvidesTitle
public String getTitle() { return "Sleeping..."; }
@Tunable(description="Duration of sleep in seconds")
public double duration;
public SleepCommandTask() {
super();
}
@Override
public void run(TaskMonitor arg0) throws Exception {
if (duration != 0d) {
arg0.showMessage(TaskMonitor.Level.INFO, "Sleeping for "+duration+" seconds");
Thread.sleep((long)duration*1000);
arg0.showMessage(TaskMonitor.Level.INFO, "Slept for "+duration+" seconds");
}
}
}
## Instruction:
Add longDescription and exampleStringValue to sleep
## Code After:
package org.cytoscape.commandDialog.internal.tasks;
import org.cytoscape.work.AbstractTask;
import org.cytoscape.work.ProvidesTitle;
import org.cytoscape.work.TaskMonitor;
import org.cytoscape.work.Tunable;
public class SleepCommandTask extends AbstractEmptyObservableTask {
@ProvidesTitle
public String getTitle() { return "Sleeping..."; }
@Tunable(description="Duration of sleep in seconds",
longDescription="Enter the time in seconds to sleep",
exampleStringValue="5")
public double duration;
public SleepCommandTask() {
super();
}
@Override
public void run(TaskMonitor arg0) throws Exception {
if (duration != 0d) {
arg0.showMessage(TaskMonitor.Level.INFO, "Sleeping for "+duration+" seconds");
Thread.sleep((long)duration*1000);
arg0.showMessage(TaskMonitor.Level.INFO, "Slept for "+duration+" seconds");
}
}
}
|
# ... existing code ...
@ProvidesTitle
public String getTitle() { return "Sleeping..."; }
@Tunable(description="Duration of sleep in seconds",
longDescription="Enter the time in seconds to sleep",
exampleStringValue="5")
public double duration;
public SleepCommandTask() {
# ... rest of the code ...
|
c13fa6ae63ef93364adc41d332eaa249f3585ec6
|
lib/bytestream.h
|
lib/bytestream.h
|
typedef struct _ByteStream {
char* filename;
size_t size;
uint8_t* data;
uint32_t offset;
int exhausted;
} ByteStream;
ByteStream* bsalloc(unsigned int size);
ByteStream* bsmap(char* filename);
int bsfree(ByteStream* bstream);
void bsseek(ByteStream* bs, uint32_t offset);
void bsreset(ByteStream* bs);
unsigned int bsread(ByteStream* bs, uint8_t* buf, size_t size);
unsigned int bsread_offset(ByteStream* bs, uint8_t* buf, size_t size,
uint32_t offset);
int bswrite(ByteStream* bs, uint8_t* data, unsigned int size);
unsigned int bswrite_offset(ByteStream* bs, uint8_t* buf, size_t size,
uint32_t offset);
int bssave(ByteStream* bs, char* filename);
#endif
|
// MAP_ANONYMOUS is MAP_ANON on OSX, so this will let us compile
#ifndef MAP_ANONYMOUS
#define MAP_ANONYMOUS MAP_ANON
#endif
typedef struct _ByteStream {
char* filename;
size_t size;
uint8_t* data;
uint32_t offset;
int exhausted;
} ByteStream;
ByteStream* bsalloc(unsigned int size);
ByteStream* bsmap(char* filename);
int bsfree(ByteStream* bstream);
void bsseek(ByteStream* bs, uint32_t offset);
void bsreset(ByteStream* bs);
unsigned int bsread(ByteStream* bs, uint8_t* buf, size_t size);
unsigned int bsread_offset(ByteStream* bs, uint8_t* buf, size_t size,
uint32_t offset);
int bswrite(ByteStream* bs, uint8_t* data, unsigned int size);
unsigned int bswrite_offset(ByteStream* bs, uint8_t* buf, size_t size,
uint32_t offset);
int bssave(ByteStream* bs, char* filename);
#endif
|
Fix compilation on OSX MAP_ANONYMOUS is MAP_ANON on OSX, so this will let the project compile.
|
Fix compilation on OSX
MAP_ANONYMOUS is MAP_ANON on OSX, so this
will let the project compile.
|
C
|
bsd-3-clause
|
strazzere/dexterity,rchiossi/dexterity,rchiossi/dexterity,rchiossi/dexterity,strazzere/dexterity,strazzere/dexterity
|
c
|
## Code Before:
typedef struct _ByteStream {
char* filename;
size_t size;
uint8_t* data;
uint32_t offset;
int exhausted;
} ByteStream;
ByteStream* bsalloc(unsigned int size);
ByteStream* bsmap(char* filename);
int bsfree(ByteStream* bstream);
void bsseek(ByteStream* bs, uint32_t offset);
void bsreset(ByteStream* bs);
unsigned int bsread(ByteStream* bs, uint8_t* buf, size_t size);
unsigned int bsread_offset(ByteStream* bs, uint8_t* buf, size_t size,
uint32_t offset);
int bswrite(ByteStream* bs, uint8_t* data, unsigned int size);
unsigned int bswrite_offset(ByteStream* bs, uint8_t* buf, size_t size,
uint32_t offset);
int bssave(ByteStream* bs, char* filename);
#endif
## Instruction:
Fix compilation on OSX
MAP_ANONYMOUS is MAP_ANON on OSX, so this
will let the project compile.
## Code After:
// MAP_ANONYMOUS is MAP_ANON on OSX, so this will let us compile
#ifndef MAP_ANONYMOUS
#define MAP_ANONYMOUS MAP_ANON
#endif
typedef struct _ByteStream {
char* filename;
size_t size;
uint8_t* data;
uint32_t offset;
int exhausted;
} ByteStream;
ByteStream* bsalloc(unsigned int size);
ByteStream* bsmap(char* filename);
int bsfree(ByteStream* bstream);
void bsseek(ByteStream* bs, uint32_t offset);
void bsreset(ByteStream* bs);
unsigned int bsread(ByteStream* bs, uint8_t* buf, size_t size);
unsigned int bsread_offset(ByteStream* bs, uint8_t* buf, size_t size,
uint32_t offset);
int bswrite(ByteStream* bs, uint8_t* data, unsigned int size);
unsigned int bswrite_offset(ByteStream* bs, uint8_t* buf, size_t size,
uint32_t offset);
int bssave(ByteStream* bs, char* filename);
#endif
|
# ... existing code ...
// MAP_ANONYMOUS is MAP_ANON on OSX, so this will let us compile
#ifndef MAP_ANONYMOUS
#define MAP_ANONYMOUS MAP_ANON
#endif
typedef struct _ByteStream {
char* filename;
# ... rest of the code ...
|
f4c9482e41ec2ee6c894a413e8fcb0349a9edbd1
|
tapiriik/web/templatetags/displayutils.py
|
tapiriik/web/templatetags/displayutils.py
|
from django import template
import json
register = template.Library()
@register.filter(name="format_meters")
def meters_to_kms(value):
try:
return round(value / 1000)
except:
return "NaN"
@register.filter(name='json')
def jsonit(obj):
return json.dumps(obj)
@register.filter(name='dict_get')
def dict_get(tdict, key):
if type(tdict) is not dict:
tdict = tdict.__dict__
return tdict.get(key, None)
@register.filter(name='format')
def format(format, var):
return format.format(var)
@register.simple_tag
def stringformat(value, *args):
return value.format(*args)
@register.filter(name="percentage")
def percentage(value, *args):
if not value:
return "NaN"
return str(round(float(value) * 100)) + "%"
|
from django import template
import json
register = template.Library()
@register.filter(name="format_meters")
def meters_to_kms(value):
try:
return round(value / 1000)
except:
return "NaN"
@register.filter(name='json')
def jsonit(obj):
return json.dumps(obj)
@register.filter(name='dict_get')
def dict_get(tdict, key):
if type(tdict) is not dict:
tdict = tdict.__dict__
return tdict.get(key, None)
@register.filter(name='format')
def format(format, var):
return format.format(var)
@register.simple_tag
def stringformat(value, *args):
return value.format(*args)
@register.filter(name="percentage")
def percentage(value, *args):
if not value:
return "NaN"
try:
return str(round(float(value) * 100)) + "%"
except ValueError:
return value
|
Fix broken diagnostic dashboard with new sync progress values
|
Fix broken diagnostic dashboard with new sync progress values
|
Python
|
apache-2.0
|
campbellr/tapiriik,niosus/tapiriik,gavioto/tapiriik,cheatos101/tapiriik,cheatos101/tapiriik,brunoflores/tapiriik,abhijit86k/tapiriik,mjnbike/tapiriik,dlenski/tapiriik,abhijit86k/tapiriik,cpfair/tapiriik,marxin/tapiriik,abhijit86k/tapiriik,dlenski/tapiriik,cheatos101/tapiriik,abs0/tapiriik,niosus/tapiriik,dmschreiber/tapiriik,gavioto/tapiriik,cmgrote/tapiriik,campbellr/tapiriik,abs0/tapiriik,cheatos101/tapiriik,brunoflores/tapiriik,mjnbike/tapiriik,cmgrote/tapiriik,cpfair/tapiriik,marxin/tapiriik,dmschreiber/tapiriik,mduggan/tapiriik,cmgrote/tapiriik,mduggan/tapiriik,dmschreiber/tapiriik,cgourlay/tapiriik,brunoflores/tapiriik,dlenski/tapiriik,mjnbike/tapiriik,cpfair/tapiriik,abs0/tapiriik,gavioto/tapiriik,campbellr/tapiriik,marxin/tapiriik,campbellr/tapiriik,dlenski/tapiriik,mduggan/tapiriik,gavioto/tapiriik,brunoflores/tapiriik,mduggan/tapiriik,olamy/tapiriik,niosus/tapiriik,marxin/tapiriik,cgourlay/tapiriik,dmschreiber/tapiriik,abhijit86k/tapiriik,mjnbike/tapiriik,cmgrote/tapiriik,cgourlay/tapiriik,cpfair/tapiriik,niosus/tapiriik,olamy/tapiriik,cgourlay/tapiriik,olamy/tapiriik,olamy/tapiriik,abs0/tapiriik
|
python
|
## Code Before:
from django import template
import json
register = template.Library()
@register.filter(name="format_meters")
def meters_to_kms(value):
try:
return round(value / 1000)
except:
return "NaN"
@register.filter(name='json')
def jsonit(obj):
return json.dumps(obj)
@register.filter(name='dict_get')
def dict_get(tdict, key):
if type(tdict) is not dict:
tdict = tdict.__dict__
return tdict.get(key, None)
@register.filter(name='format')
def format(format, var):
return format.format(var)
@register.simple_tag
def stringformat(value, *args):
return value.format(*args)
@register.filter(name="percentage")
def percentage(value, *args):
if not value:
return "NaN"
return str(round(float(value) * 100)) + "%"
## Instruction:
Fix broken diagnostic dashboard with new sync progress values
## Code After:
from django import template
import json
register = template.Library()
@register.filter(name="format_meters")
def meters_to_kms(value):
try:
return round(value / 1000)
except:
return "NaN"
@register.filter(name='json')
def jsonit(obj):
return json.dumps(obj)
@register.filter(name='dict_get')
def dict_get(tdict, key):
if type(tdict) is not dict:
tdict = tdict.__dict__
return tdict.get(key, None)
@register.filter(name='format')
def format(format, var):
return format.format(var)
@register.simple_tag
def stringformat(value, *args):
return value.format(*args)
@register.filter(name="percentage")
def percentage(value, *args):
if not value:
return "NaN"
try:
return str(round(float(value) * 100)) + "%"
except ValueError:
return value
|
// ... existing code ...
def percentage(value, *args):
if not value:
return "NaN"
try:
return str(round(float(value) * 100)) + "%"
except ValueError:
return value
// ... rest of the code ...
|
5cda63163acec59a43c3975f1320b7268dcf337b
|
devito/parameters.py
|
devito/parameters.py
|
"""The parameters dictionary contains global parameter settings."""
__all__ = ['Parameters', 'parameters']
# Be EXTREMELY careful when writing to a Parameters dictionary
# Read here for reference: http://wiki.c2.com/?GlobalVariablesAreBad
# If any issues related to global state arise, the following class should
# be made immutable. It shall only be written to at application startup
# and never modified.
class Parameters(dict):
""" A dictionary-like class to hold global configuration parameters for devito
On top of a normal dict, this provides the option to provide callback functions
so that any interested module can be informed when the configuration changes.
"""
def __init__(self, name=None, **kwargs):
self._name = name
self.update_functions = None
for key, value in iteritems(kwargs):
self[key] = value
def __setitem__(self, key, value):
super(Parameters, self).__setitem__(key, value)
# If a Parameters dictionary is being added as a child,
# ask it to tell us when it is updated
if isinstance(value, Parameters):
child_update = lambda x: self._updated(*x)
value.update_functions.push(child_update)
# Tell everyone we've been updated
self._updated(key, value)
def _updated(self, key, value):
""" Call any provided update functions so everyone knows we've been updated
"""
for f in self.update_functions:
f(key, value)
|
"""The parameters dictionary contains global parameter settings."""
__all__ = ['Parameters', 'parameters']
# Be EXTREMELY careful when writing to a Parameters dictionary
# Read here for reference: http://wiki.c2.com/?GlobalVariablesAreBad
# https://softwareengineering.stackexchange.com/questions/148108/why-is-global-state-so-evil
# If any issues related to global state arise, the following class should
# be made immutable. It shall only be written to at application startup
# and never modified.
class Parameters(dict):
""" A dictionary-like class to hold global configuration parameters for devito
On top of a normal dict, this provides the option to provide callback functions
so that any interested module can be informed when the configuration changes.
"""
def __init__(self, name=None, **kwargs):
self._name = name
self.update_functions = None
for key, value in iteritems(kwargs):
self[key] = value
def __setitem__(self, key, value):
super(Parameters, self).__setitem__(key, value)
# If a Parameters dictionary is being added as a child,
# ask it to tell us when it is updated
if isinstance(value, Parameters):
child_update = lambda x: self._updated(*x)
value.update_functions.push(child_update)
# Tell everyone we've been updated
self._updated(key, value)
def _updated(self, key, value):
""" Call any provided update functions so everyone knows we've been updated
"""
for f in self.update_functions:
f(key, value)
parameters = Parameters()
parameters["log_level"] = 'info'
|
Add parameter for log level
|
Add parameter for log level
|
Python
|
mit
|
opesci/devito,opesci/devito
|
python
|
## Code Before:
"""The parameters dictionary contains global parameter settings."""
__all__ = ['Parameters', 'parameters']
# Be EXTREMELY careful when writing to a Parameters dictionary
# Read here for reference: http://wiki.c2.com/?GlobalVariablesAreBad
# If any issues related to global state arise, the following class should
# be made immutable. It shall only be written to at application startup
# and never modified.
class Parameters(dict):
""" A dictionary-like class to hold global configuration parameters for devito
On top of a normal dict, this provides the option to provide callback functions
so that any interested module can be informed when the configuration changes.
"""
def __init__(self, name=None, **kwargs):
self._name = name
self.update_functions = None
for key, value in iteritems(kwargs):
self[key] = value
def __setitem__(self, key, value):
super(Parameters, self).__setitem__(key, value)
# If a Parameters dictionary is being added as a child,
# ask it to tell us when it is updated
if isinstance(value, Parameters):
child_update = lambda x: self._updated(*x)
value.update_functions.push(child_update)
# Tell everyone we've been updated
self._updated(key, value)
def _updated(self, key, value):
""" Call any provided update functions so everyone knows we've been updated
"""
for f in self.update_functions:
f(key, value)
## Instruction:
Add parameter for log level
## Code After:
"""The parameters dictionary contains global parameter settings."""
__all__ = ['Parameters', 'parameters']
# Be EXTREMELY careful when writing to a Parameters dictionary
# Read here for reference: http://wiki.c2.com/?GlobalVariablesAreBad
# https://softwareengineering.stackexchange.com/questions/148108/why-is-global-state-so-evil
# If any issues related to global state arise, the following class should
# be made immutable. It shall only be written to at application startup
# and never modified.
class Parameters(dict):
""" A dictionary-like class to hold global configuration parameters for devito
On top of a normal dict, this provides the option to provide callback functions
so that any interested module can be informed when the configuration changes.
"""
def __init__(self, name=None, **kwargs):
self._name = name
self.update_functions = None
for key, value in iteritems(kwargs):
self[key] = value
def __setitem__(self, key, value):
super(Parameters, self).__setitem__(key, value)
# If a Parameters dictionary is being added as a child,
# ask it to tell us when it is updated
if isinstance(value, Parameters):
child_update = lambda x: self._updated(*x)
value.update_functions.push(child_update)
# Tell everyone we've been updated
self._updated(key, value)
def _updated(self, key, value):
""" Call any provided update functions so everyone knows we've been updated
"""
for f in self.update_functions:
f(key, value)
parameters = Parameters()
parameters["log_level"] = 'info'
|
...
# Be EXTREMELY careful when writing to a Parameters dictionary
# Read here for reference: http://wiki.c2.com/?GlobalVariablesAreBad
# https://softwareengineering.stackexchange.com/questions/148108/why-is-global-state-so-evil
# If any issues related to global state arise, the following class should
# be made immutable. It shall only be written to at application startup
# and never modified.
...
"""
for f in self.update_functions:
f(key, value)
parameters = Parameters()
parameters["log_level"] = 'info'
...
|
a4df3f966e232e8327522a3db32870f5dcea0c03
|
cartridge/shop/middleware.py
|
cartridge/shop/middleware.py
|
from mezzanine.conf import settings
from cartridge.shop.models import Cart
class ShopMiddleware(object):
def __init__(self):
old = ("SHOP_SSL_ENABLED", "SHOP_FORCE_HOST", "SHOP_FORCE_SSL_VIEWS")
for name in old:
try:
getattr(settings, name)
except AttributeError:
pass
else:
import warnings
warnings.warn("The settings %s are deprecated; "
"use SSL_ENABLED, SSL_FORCE_HOST and "
"SSL_FORCE_URL_PREFIXES, and add "
"mezzanine.core.middleware.SSLRedirectMiddleware to "
"MIDDLEWARE_CLASSES." % ", ".join(old))
break
def process_request(self, request):
"""
Adds cart and wishlist attributes to the current request.
"""
request.cart = Cart.objects.from_request(request)
wishlist = request.COOKIES.get("wishlist", "").split(",")
if not wishlist[0]:
wishlist = []
request.wishlist = wishlist
|
from mezzanine.conf import settings
from cartridge.shop.models import Cart
class SSLRedirect(object):
def __init__(self):
old = ("SHOP_SSL_ENABLED", "SHOP_FORCE_HOST", "SHOP_FORCE_SSL_VIEWS")
for name in old:
try:
getattr(settings, name)
except AttributeError:
pass
else:
import warnings
warnings.warn("The settings %s are deprecated; "
"use SSL_ENABLED, SSL_FORCE_HOST and "
"SSL_FORCE_URL_PREFIXES, and add "
"mezzanine.core.middleware.SSLRedirectMiddleware to "
"MIDDLEWARE_CLASSES." % ", ".join(old))
break
class ShopMiddleware(SSLRedirect):
"""
Adds cart and wishlist attributes to the current request.
"""
def process_request(self, request):
request.cart = Cart.objects.from_request(request)
wishlist = request.COOKIES.get("wishlist", "").split(",")
if not wishlist[0]:
wishlist = []
request.wishlist = wishlist
|
Add deprecated fallback for SSLMiddleware.
|
Add deprecated fallback for SSLMiddleware.
|
Python
|
bsd-2-clause
|
traxxas/cartridge,traxxas/cartridge,Parisson/cartridge,Kniyl/cartridge,syaiful6/cartridge,jaywink/cartridge-reservable,wbtuomela/cartridge,syaiful6/cartridge,ryneeverett/cartridge,wbtuomela/cartridge,dsanders11/cartridge,wbtuomela/cartridge,dsanders11/cartridge,wyzex/cartridge,jaywink/cartridge-reservable,Parisson/cartridge,viaregio/cartridge,ryneeverett/cartridge,jaywink/cartridge-reservable,Kniyl/cartridge,traxxas/cartridge,wyzex/cartridge,wyzex/cartridge,stephenmcd/cartridge,Parisson/cartridge,dsanders11/cartridge,stephenmcd/cartridge,ryneeverett/cartridge,viaregio/cartridge,stephenmcd/cartridge,syaiful6/cartridge,Kniyl/cartridge
|
python
|
## Code Before:
from mezzanine.conf import settings
from cartridge.shop.models import Cart
class ShopMiddleware(object):
def __init__(self):
old = ("SHOP_SSL_ENABLED", "SHOP_FORCE_HOST", "SHOP_FORCE_SSL_VIEWS")
for name in old:
try:
getattr(settings, name)
except AttributeError:
pass
else:
import warnings
warnings.warn("The settings %s are deprecated; "
"use SSL_ENABLED, SSL_FORCE_HOST and "
"SSL_FORCE_URL_PREFIXES, and add "
"mezzanine.core.middleware.SSLRedirectMiddleware to "
"MIDDLEWARE_CLASSES." % ", ".join(old))
break
def process_request(self, request):
"""
Adds cart and wishlist attributes to the current request.
"""
request.cart = Cart.objects.from_request(request)
wishlist = request.COOKIES.get("wishlist", "").split(",")
if not wishlist[0]:
wishlist = []
request.wishlist = wishlist
## Instruction:
Add deprecated fallback for SSLMiddleware.
## Code After:
from mezzanine.conf import settings
from cartridge.shop.models import Cart
class SSLRedirect(object):
def __init__(self):
old = ("SHOP_SSL_ENABLED", "SHOP_FORCE_HOST", "SHOP_FORCE_SSL_VIEWS")
for name in old:
try:
getattr(settings, name)
except AttributeError:
pass
else:
import warnings
warnings.warn("The settings %s are deprecated; "
"use SSL_ENABLED, SSL_FORCE_HOST and "
"SSL_FORCE_URL_PREFIXES, and add "
"mezzanine.core.middleware.SSLRedirectMiddleware to "
"MIDDLEWARE_CLASSES." % ", ".join(old))
break
class ShopMiddleware(SSLRedirect):
"""
Adds cart and wishlist attributes to the current request.
"""
def process_request(self, request):
request.cart = Cart.objects.from_request(request)
wishlist = request.COOKIES.get("wishlist", "").split(",")
if not wishlist[0]:
wishlist = []
request.wishlist = wishlist
|
...
from cartridge.shop.models import Cart
class SSLRedirect(object):
def __init__(self):
old = ("SHOP_SSL_ENABLED", "SHOP_FORCE_HOST", "SHOP_FORCE_SSL_VIEWS")
...
"MIDDLEWARE_CLASSES." % ", ".join(old))
break
class ShopMiddleware(SSLRedirect):
"""
Adds cart and wishlist attributes to the current request.
"""
def process_request(self, request):
request.cart = Cart.objects.from_request(request)
wishlist = request.COOKIES.get("wishlist", "").split(",")
if not wishlist[0]:
...
|
9912974a283912acd31fa4ee85de2fb44c2cf862
|
nn/model.py
|
nn/model.py
|
import abc
import tensorflow as tf
class Model(metaclass=abc.ABCMeta):
@abc.astractmethod
def __init__(self, **hyperparameters_and_initial_parameters):
return NotImplemented
@abc.astractmethod
def train(self, *input_tensors) -> tf.Tensor: # scalar loss
return NotImplemented
@abc.astractmethod
def test(self, *input_tensors) -> tf.Tensor: # labels
return NotImplemented
|
import abc
import tensorflow as tf
class Model(metaclass=abc.ABCMeta):
@abc.astractmethod
def __init__(self, **hyperparameters_and_initial_parameters):
return NotImplemented
@abc.astractmethod
def train(self, *input_tensors) -> tf.Operation: # training operation
return NotImplemented
@abc.astractmethod
def test(self, *input_tensors) -> tf.Tensor: # labels
return NotImplemented
|
Fix type annotation for Model.train()
|
Fix type annotation for Model.train()
|
Python
|
unlicense
|
raviqqe/tensorflow-extenteten,raviqqe/tensorflow-extenteten
|
python
|
## Code Before:
import abc
import tensorflow as tf
class Model(metaclass=abc.ABCMeta):
@abc.astractmethod
def __init__(self, **hyperparameters_and_initial_parameters):
return NotImplemented
@abc.astractmethod
def train(self, *input_tensors) -> tf.Tensor: # scalar loss
return NotImplemented
@abc.astractmethod
def test(self, *input_tensors) -> tf.Tensor: # labels
return NotImplemented
## Instruction:
Fix type annotation for Model.train()
## Code After:
import abc
import tensorflow as tf
class Model(metaclass=abc.ABCMeta):
@abc.astractmethod
def __init__(self, **hyperparameters_and_initial_parameters):
return NotImplemented
@abc.astractmethod
def train(self, *input_tensors) -> tf.Operation: # training operation
return NotImplemented
@abc.astractmethod
def test(self, *input_tensors) -> tf.Tensor: # labels
return NotImplemented
|
...
return NotImplemented
@abc.astractmethod
def train(self, *input_tensors) -> tf.Operation: # training operation
return NotImplemented
@abc.astractmethod
...
|
c8d2d6a4eace2107639badd17983e048dc9259e5
|
mfh.py
|
mfh.py
|
import os
import sys
import time
from multiprocessing import Process, Event
import mfhclient
import update
from arguments import parse
def main():
q = Event()
mfhclient_process = Process(
args=(args, q,),
name="mfhclient_process",
target=mfhclient.main,
)
mfhclient_process.start()
trigger_process = Process(
args=(q,),
name="trigger_process",
target=update.trigger,
)
trigger_process.start()
trigger_process.join()
while mfhclient_process.is_alive():
time.sleep(5)
else:
update.pull("origin", "master")
sys.stdout.flush()
os.execl(sys.executable, sys.executable, *sys.argv)
if __name__ == '__main__':
# Parse arguments
args = parse()
main()
|
import os
import sys
import time
from multiprocessing import Process, Event
import mfhclient
import update
from arguments import parse
from settings import HONEYPORT
def main():
update_event = Event()
mfhclient_process = Process(
args=(args, update_event,),
name="mfhclient_process",
target=mfhclient.main,
)
if args.client is not None:
mfhclient_process.start()
trigger_process = Process(
args=(update_event,),
name="trigger_process",
target=update.trigger,
)
trigger_process.start()
trigger_process.join()
while mfhclient_process.is_alive():
time.sleep(5)
else:
update.pull("origin", "master")
sys.stdout.flush()
os.execl(sys.executable, sys.executable, *sys.argv)
if __name__ == '__main__':
# Parse arguments
args = parse()
if args.c:
args.client = HONEYPORT
main()
|
Add condition to only launch client if -c or --client is specified
|
Add condition to only launch client if -c or --client is specified
|
Python
|
mit
|
Zloool/manyfaced-honeypot
|
python
|
## Code Before:
import os
import sys
import time
from multiprocessing import Process, Event
import mfhclient
import update
from arguments import parse
def main():
q = Event()
mfhclient_process = Process(
args=(args, q,),
name="mfhclient_process",
target=mfhclient.main,
)
mfhclient_process.start()
trigger_process = Process(
args=(q,),
name="trigger_process",
target=update.trigger,
)
trigger_process.start()
trigger_process.join()
while mfhclient_process.is_alive():
time.sleep(5)
else:
update.pull("origin", "master")
sys.stdout.flush()
os.execl(sys.executable, sys.executable, *sys.argv)
if __name__ == '__main__':
# Parse arguments
args = parse()
main()
## Instruction:
Add condition to only launch client if -c or --client is specified
## Code After:
import os
import sys
import time
from multiprocessing import Process, Event
import mfhclient
import update
from arguments import parse
from settings import HONEYPORT
def main():
update_event = Event()
mfhclient_process = Process(
args=(args, update_event,),
name="mfhclient_process",
target=mfhclient.main,
)
if args.client is not None:
mfhclient_process.start()
trigger_process = Process(
args=(update_event,),
name="trigger_process",
target=update.trigger,
)
trigger_process.start()
trigger_process.join()
while mfhclient_process.is_alive():
time.sleep(5)
else:
update.pull("origin", "master")
sys.stdout.flush()
os.execl(sys.executable, sys.executable, *sys.argv)
if __name__ == '__main__':
# Parse arguments
args = parse()
if args.c:
args.client = HONEYPORT
main()
|
// ... existing code ...
import mfhclient
import update
from arguments import parse
from settings import HONEYPORT
def main():
update_event = Event()
mfhclient_process = Process(
args=(args, update_event,),
name="mfhclient_process",
target=mfhclient.main,
)
if args.client is not None:
mfhclient_process.start()
trigger_process = Process(
args=(update_event,),
name="trigger_process",
target=update.trigger,
)
// ... modified code ...
if __name__ == '__main__':
# Parse arguments
args = parse()
if args.c:
args.client = HONEYPORT
main()
// ... rest of the code ...
|
8aec91209521d7f2701e63c681f4b765c1b2c6bb
|
src/program/lwaftr/tests/subcommands/run_test.py
|
src/program/lwaftr/tests/subcommands/run_test.py
|
import unittest
from test_env import DATA_DIR, SNABB_CMD, BaseTestCase, nic_names
SNABB_PCI0, SNABB_PCI1 = nic_names()
@unittest.skipUnless(SNABB_PCI0 and SNABB_PCI1, 'NICs not configured')
class TestRun(BaseTestCase):
cmd_args = (
str(SNABB_CMD), 'lwaftr', 'run',
'--duration', '1',
'--bench-file', '/dev/null',
'--conf', str(DATA_DIR / 'icmp_on_fail.conf'),
'--v4', SNABB_PCI0,
'--v6', SNABB_PCI1
)
def test_run(self):
output = self.run_cmd(self.cmd_args)
self.assertIn(b'link report', output,
b'\n'.join((b'OUTPUT', output)))
if __name__ == '__main__':
unittest.main()
|
import unittest
from test_env import DATA_DIR, SNABB_CMD, BaseTestCase, nic_names, ENC
SNABB_PCI0, SNABB_PCI1 = nic_names()
@unittest.skipUnless(SNABB_PCI0 and SNABB_PCI1, 'NICs not configured')
class TestRun(BaseTestCase):
cmd_args = (
str(SNABB_CMD), 'lwaftr', 'run',
'--duration', '1',
'--bench-file', '/dev/null',
'--conf', str(DATA_DIR / 'icmp_on_fail.conf'),
'--v4', SNABB_PCI0,
'--v6', SNABB_PCI1
)
def test_run(self):
output = self.run_cmd(self.cmd_args)
self.assertIn(b'link report', output,
b'\n'.join((b'OUTPUT', output)))
def test_run_on_a_stick_migration(self):
# The LwAFTR should be abel to migrate from non-on-a-stick -> on-a-stick
run_cmd = list(self.cmd_args)[:-4]
run_cmd.extend((
"--on-a-stick",
SNABB_PCI0
))
# The best way to check is to see if it's what it's saying it'll do.
output = self.run_cmd(run_cmd).decode(ENC)
self.assertIn("Migrating instance", output)
migration_line = [l for l in output.split("\n") if "Migrating" in l][0]
self.assertIn(SNABB_PCI0, migration_line)
if __name__ == '__main__':
unittest.main()
|
Add test for migration using --on-a-stick command
|
Add test for migration using --on-a-stick command
When --on-a-stick command is used with a single instance defined it
should migrate the instance to work in a stick (replace the device
with the one provided in the flag and remove the device in
`external-device`). It just checks that it works by not crashing and
says it's going to do it in the output.
|
Python
|
apache-2.0
|
alexandergall/snabbswitch,Igalia/snabbswitch,snabbco/snabb,snabbco/snabb,snabbco/snabb,dpino/snabbswitch,dpino/snabb,eugeneia/snabb,Igalia/snabb,dpino/snabb,eugeneia/snabb,eugeneia/snabbswitch,eugeneia/snabb,SnabbCo/snabbswitch,eugeneia/snabb,Igalia/snabb,eugeneia/snabb,snabbco/snabb,eugeneia/snabb,Igalia/snabb,dpino/snabbswitch,Igalia/snabb,alexandergall/snabbswitch,alexandergall/snabbswitch,Igalia/snabb,snabbco/snabb,dpino/snabb,alexandergall/snabbswitch,dpino/snabb,Igalia/snabbswitch,Igalia/snabb,eugeneia/snabbswitch,alexandergall/snabbswitch,snabbco/snabb,Igalia/snabbswitch,dpino/snabb,Igalia/snabb,dpino/snabbswitch,Igalia/snabbswitch,Igalia/snabb,snabbco/snabb,alexandergall/snabbswitch,eugeneia/snabbswitch,Igalia/snabbswitch,SnabbCo/snabbswitch,alexandergall/snabbswitch,snabbco/snabb,eugeneia/snabb,alexandergall/snabbswitch,SnabbCo/snabbswitch,dpino/snabb,dpino/snabb,SnabbCo/snabbswitch,dpino/snabbswitch,eugeneia/snabb,eugeneia/snabbswitch
|
python
|
## Code Before:
import unittest
from test_env import DATA_DIR, SNABB_CMD, BaseTestCase, nic_names
SNABB_PCI0, SNABB_PCI1 = nic_names()
@unittest.skipUnless(SNABB_PCI0 and SNABB_PCI1, 'NICs not configured')
class TestRun(BaseTestCase):
cmd_args = (
str(SNABB_CMD), 'lwaftr', 'run',
'--duration', '1',
'--bench-file', '/dev/null',
'--conf', str(DATA_DIR / 'icmp_on_fail.conf'),
'--v4', SNABB_PCI0,
'--v6', SNABB_PCI1
)
def test_run(self):
output = self.run_cmd(self.cmd_args)
self.assertIn(b'link report', output,
b'\n'.join((b'OUTPUT', output)))
if __name__ == '__main__':
unittest.main()
## Instruction:
Add test for migration using --on-a-stick command
When --on-a-stick command is used with a single instance defined it
should migrate the instance to work in a stick (replace the device
with the one provided in the flag and remove the device in
`external-device`). It just checks that it works by not crashing and
says it's going to do it in the output.
## Code After:
import unittest
from test_env import DATA_DIR, SNABB_CMD, BaseTestCase, nic_names, ENC
SNABB_PCI0, SNABB_PCI1 = nic_names()
@unittest.skipUnless(SNABB_PCI0 and SNABB_PCI1, 'NICs not configured')
class TestRun(BaseTestCase):
cmd_args = (
str(SNABB_CMD), 'lwaftr', 'run',
'--duration', '1',
'--bench-file', '/dev/null',
'--conf', str(DATA_DIR / 'icmp_on_fail.conf'),
'--v4', SNABB_PCI0,
'--v6', SNABB_PCI1
)
def test_run(self):
output = self.run_cmd(self.cmd_args)
self.assertIn(b'link report', output,
b'\n'.join((b'OUTPUT', output)))
def test_run_on_a_stick_migration(self):
# The LwAFTR should be abel to migrate from non-on-a-stick -> on-a-stick
run_cmd = list(self.cmd_args)[:-4]
run_cmd.extend((
"--on-a-stick",
SNABB_PCI0
))
# The best way to check is to see if it's what it's saying it'll do.
output = self.run_cmd(run_cmd).decode(ENC)
self.assertIn("Migrating instance", output)
migration_line = [l for l in output.split("\n") if "Migrating" in l][0]
self.assertIn(SNABB_PCI0, migration_line)
if __name__ == '__main__':
unittest.main()
|
// ... existing code ...
import unittest
from test_env import DATA_DIR, SNABB_CMD, BaseTestCase, nic_names, ENC
SNABB_PCI0, SNABB_PCI1 = nic_names()
// ... modified code ...
self.assertIn(b'link report', output,
b'\n'.join((b'OUTPUT', output)))
def test_run_on_a_stick_migration(self):
# The LwAFTR should be abel to migrate from non-on-a-stick -> on-a-stick
run_cmd = list(self.cmd_args)[:-4]
run_cmd.extend((
"--on-a-stick",
SNABB_PCI0
))
# The best way to check is to see if it's what it's saying it'll do.
output = self.run_cmd(run_cmd).decode(ENC)
self.assertIn("Migrating instance", output)
migration_line = [l for l in output.split("\n") if "Migrating" in l][0]
self.assertIn(SNABB_PCI0, migration_line)
if __name__ == '__main__':
unittest.main()
// ... rest of the code ...
|
2a7d28573d1e4f07250da1d30209304fdb6de90d
|
sqlobject/tests/test_blob.py
|
sqlobject/tests/test_blob.py
|
import pytest
from sqlobject import BLOBCol, SQLObject
from sqlobject.compat import PY2
from sqlobject.tests.dbtest import setupClass, supports
########################################
# BLOB columns
########################################
class ImageData(SQLObject):
image = BLOBCol(default=b'emptydata', length=256)
def test_BLOBCol():
if not supports('blobData'):
pytest.skip("blobData isn't supported")
setupClass(ImageData)
if PY2:
data = ''.join([chr(x) for x in range(256)])
else:
data = bytes(range(256))
prof = ImageData()
prof.image = data
iid = prof.id
ImageData._connection.cache.clear()
prof2 = ImageData.get(iid)
assert prof2.image == data
ImageData(image='string')
assert ImageData.selectBy(image='string').count() == 1
|
import pytest
from sqlobject import BLOBCol, SQLObject
from sqlobject.compat import PY2
from sqlobject.tests.dbtest import setupClass, supports
########################################
# BLOB columns
########################################
class ImageData(SQLObject):
image = BLOBCol(default=b'emptydata', length=256)
def test_BLOBCol():
if not supports('blobData'):
pytest.skip("blobData isn't supported")
setupClass(ImageData)
if PY2:
data = ''.join([chr(x) for x in range(256)])
else:
data = bytes(range(256))
prof = ImageData(image=data)
iid = prof.id
ImageData._connection.cache.clear()
prof2 = ImageData.get(iid)
assert prof2.image == data
ImageData(image=b'string')
assert ImageData.selectBy(image=b'string').count() == 1
|
Use byte string for test
|
Tests(blob): Use byte string for test
|
Python
|
lgpl-2.1
|
sqlobject/sqlobject,drnlm/sqlobject,sqlobject/sqlobject,drnlm/sqlobject
|
python
|
## Code Before:
import pytest
from sqlobject import BLOBCol, SQLObject
from sqlobject.compat import PY2
from sqlobject.tests.dbtest import setupClass, supports
########################################
# BLOB columns
########################################
class ImageData(SQLObject):
image = BLOBCol(default=b'emptydata', length=256)
def test_BLOBCol():
if not supports('blobData'):
pytest.skip("blobData isn't supported")
setupClass(ImageData)
if PY2:
data = ''.join([chr(x) for x in range(256)])
else:
data = bytes(range(256))
prof = ImageData()
prof.image = data
iid = prof.id
ImageData._connection.cache.clear()
prof2 = ImageData.get(iid)
assert prof2.image == data
ImageData(image='string')
assert ImageData.selectBy(image='string').count() == 1
## Instruction:
Tests(blob): Use byte string for test
## Code After:
import pytest
from sqlobject import BLOBCol, SQLObject
from sqlobject.compat import PY2
from sqlobject.tests.dbtest import setupClass, supports
########################################
# BLOB columns
########################################
class ImageData(SQLObject):
image = BLOBCol(default=b'emptydata', length=256)
def test_BLOBCol():
if not supports('blobData'):
pytest.skip("blobData isn't supported")
setupClass(ImageData)
if PY2:
data = ''.join([chr(x) for x in range(256)])
else:
data = bytes(range(256))
prof = ImageData(image=data)
iid = prof.id
ImageData._connection.cache.clear()
prof2 = ImageData.get(iid)
assert prof2.image == data
ImageData(image=b'string')
assert ImageData.selectBy(image=b'string').count() == 1
|
// ... existing code ...
else:
data = bytes(range(256))
prof = ImageData(image=data)
iid = prof.id
ImageData._connection.cache.clear()
// ... modified code ...
prof2 = ImageData.get(iid)
assert prof2.image == data
ImageData(image=b'string')
assert ImageData.selectBy(image=b'string').count() == 1
// ... rest of the code ...
|
931b6a0f7a83b495b52506f25b15a92b5c7a26af
|
setup.py
|
setup.py
|
import os
from setuptools import setup, find_packages
def read(fname):
with open(os.path.join(os.path.dirname(__file__), fname)) as f:
return f.read()
setup(
name='pynubank',
version='2.17.0',
url='https://github.com/andreroggeri/pynubank',
author='André Roggeri Campos',
author_email='[email protected]',
license='MIT',
packages=find_packages(),
package_data={'pynubank': ['queries/*.gql', 'utils/mocked_responses/*.json']},
install_requires=['requests', 'qrcode', 'pyOpenSSL', 'colorama', 'requests-pkcs12'],
setup_requires=['pytest-runner'],
long_description=read("README.md"),
long_description_content_type="text/markdown",
entry_points={
'console_scripts': [
'pynubank = pynubank.cli:main'
]
},
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
]
)
|
import os
from setuptools import setup, find_packages
def read(fname):
with open(os.path.join(os.path.dirname(__file__), fname)) as f:
return f.read()
setup(
name='pynubank',
version='2.17.0',
url='https://github.com/andreroggeri/pynubank',
author='André Roggeri Campos',
author_email='[email protected]',
license='MIT',
packages=find_packages(),
package_data={'pynubank': ['queries/*.gql', 'utils/mocked_responses/*.json']},
install_requires=['requests', 'qrcode', 'pyOpenSSL', 'colorama', 'requests-pkcs12'],
setup_requires=['pytest-runner'],
long_description=read("README.md"),
long_description_content_type="text/markdown",
entry_points={
'console_scripts': [
'pynubank = pynubank.cli:main'
]
},
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10',
]
)
|
Update classifiers to include python 3.10
|
chore: Update classifiers to include python 3.10
|
Python
|
mit
|
andreroggeri/pynubank
|
python
|
## Code Before:
import os
from setuptools import setup, find_packages
def read(fname):
with open(os.path.join(os.path.dirname(__file__), fname)) as f:
return f.read()
setup(
name='pynubank',
version='2.17.0',
url='https://github.com/andreroggeri/pynubank',
author='André Roggeri Campos',
author_email='[email protected]',
license='MIT',
packages=find_packages(),
package_data={'pynubank': ['queries/*.gql', 'utils/mocked_responses/*.json']},
install_requires=['requests', 'qrcode', 'pyOpenSSL', 'colorama', 'requests-pkcs12'],
setup_requires=['pytest-runner'],
long_description=read("README.md"),
long_description_content_type="text/markdown",
entry_points={
'console_scripts': [
'pynubank = pynubank.cli:main'
]
},
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
]
)
## Instruction:
chore: Update classifiers to include python 3.10
## Code After:
import os
from setuptools import setup, find_packages
def read(fname):
with open(os.path.join(os.path.dirname(__file__), fname)) as f:
return f.read()
setup(
name='pynubank',
version='2.17.0',
url='https://github.com/andreroggeri/pynubank',
author='André Roggeri Campos',
author_email='[email protected]',
license='MIT',
packages=find_packages(),
package_data={'pynubank': ['queries/*.gql', 'utils/mocked_responses/*.json']},
install_requires=['requests', 'qrcode', 'pyOpenSSL', 'colorama', 'requests-pkcs12'],
setup_requires=['pytest-runner'],
long_description=read("README.md"),
long_description_content_type="text/markdown",
entry_points={
'console_scripts': [
'pynubank = pynubank.cli:main'
]
},
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10',
]
)
|
// ... existing code ...
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10',
]
)
// ... rest of the code ...
|
74bb8764fbeb65cb4a5b67597f3af4e8c2773794
|
dataportal/replay/core.py
|
dataportal/replay/core.py
|
"""Module for Enaml widgets that are generally useful"""
from enaml.widgets.api import PushButton, Timer
from atom.api import Typed, observe, Event
from enaml.core.declarative import d_
from enaml.layout.api import (grid, align)
class ProgrammaticButton(PushButton):
clicked = d_(Event(bool), writable=True)
toggled = d_(Event(bool), writable=True)
class TimerButton(ProgrammaticButton):
timer = d_(Typed(Timer))
checkable = True
@observe('checked')
def checked_changed(self, changed):
if self.checked:
self.timer.start()
else:
self.timer.stop()
def generate_grid(container, num_cols):
""" Generate grid constraints with given number of columns.
Notes
-----
Shamelessly copied from enaml/examples/layout/advanced/factory_func.enaml
"""
rows = []
widgets = container.visible_widgets()
row_iters = (iter(widgets),) * num_cols
rows = list(zip(*row_iters))
return [grid(*rows, row_spacing=0, column_spacing=0, row_align='v_center',
column_align='h_center'),
align('width', *widgets)]
|
"""Module for Enaml widgets that are generally useful"""
from enaml.widgets.api import PushButton, Timer
from atom.api import Typed, observe, Event
from enaml.core.declarative import d_
from enaml.layout.api import (grid, align)
class ProgrammaticButton(PushButton):
clicked = d_(Event(bool), writable=True)
toggled = d_(Event(bool), writable=True)
class TimerButton(ProgrammaticButton):
timer = d_(Typed(Timer))
checkable = True
@observe('checked')
def checked_changed(self, changed):
if self.checked:
self.timer.start()
else:
self.timer.stop()
def generate_grid(container, num_cols):
""" Generate grid constraints with given number of columns.
Notes
-----
Shamelessly copied from enaml/examples/layout/advanced/factory_func.enaml
"""
rows = []
widgets = container.visible_widgets()
row_iters = (iter(widgets),) * num_cols
rows = list(zip(*row_iters))
return [grid(*rows, row_spacing=0, column_spacing=0, row_align='v_center',
column_align='h_center'),
align('width', *widgets)]
non_stateful_attrs = ['history']
def save_state(history, history_key, state):
for key, val in state.items():
if key in non_stateful_attrs:
state[key] = None
history.put(history_key, state)
|
Add helper function to save state
|
ENH: Add helper function to save state
Sanitizes the Atom.__getstate() function for any blacklisted
Atom members that are unimportant for state saving or
non-JSON-serializable
|
Python
|
bsd-3-clause
|
ericdill/datamuxer,NSLS-II/dataportal,danielballan/datamuxer,NSLS-II/dataportal,ericdill/databroker,ericdill/databroker,ericdill/datamuxer,danielballan/datamuxer,danielballan/dataportal,tacaswell/dataportal,tacaswell/dataportal,NSLS-II/datamuxer,danielballan/dataportal
|
python
|
## Code Before:
"""Module for Enaml widgets that are generally useful"""
from enaml.widgets.api import PushButton, Timer
from atom.api import Typed, observe, Event
from enaml.core.declarative import d_
from enaml.layout.api import (grid, align)
class ProgrammaticButton(PushButton):
clicked = d_(Event(bool), writable=True)
toggled = d_(Event(bool), writable=True)
class TimerButton(ProgrammaticButton):
timer = d_(Typed(Timer))
checkable = True
@observe('checked')
def checked_changed(self, changed):
if self.checked:
self.timer.start()
else:
self.timer.stop()
def generate_grid(container, num_cols):
""" Generate grid constraints with given number of columns.
Notes
-----
Shamelessly copied from enaml/examples/layout/advanced/factory_func.enaml
"""
rows = []
widgets = container.visible_widgets()
row_iters = (iter(widgets),) * num_cols
rows = list(zip(*row_iters))
return [grid(*rows, row_spacing=0, column_spacing=0, row_align='v_center',
column_align='h_center'),
align('width', *widgets)]
## Instruction:
ENH: Add helper function to save state
Sanitizes the Atom.__getstate() function for any blacklisted
Atom members that are unimportant for state saving or
non-JSON-serializable
## Code After:
"""Module for Enaml widgets that are generally useful"""
from enaml.widgets.api import PushButton, Timer
from atom.api import Typed, observe, Event
from enaml.core.declarative import d_
from enaml.layout.api import (grid, align)
class ProgrammaticButton(PushButton):
clicked = d_(Event(bool), writable=True)
toggled = d_(Event(bool), writable=True)
class TimerButton(ProgrammaticButton):
timer = d_(Typed(Timer))
checkable = True
@observe('checked')
def checked_changed(self, changed):
if self.checked:
self.timer.start()
else:
self.timer.stop()
def generate_grid(container, num_cols):
""" Generate grid constraints with given number of columns.
Notes
-----
Shamelessly copied from enaml/examples/layout/advanced/factory_func.enaml
"""
rows = []
widgets = container.visible_widgets()
row_iters = (iter(widgets),) * num_cols
rows = list(zip(*row_iters))
return [grid(*rows, row_spacing=0, column_spacing=0, row_align='v_center',
column_align='h_center'),
align('width', *widgets)]
non_stateful_attrs = ['history']
def save_state(history, history_key, state):
for key, val in state.items():
if key in non_stateful_attrs:
state[key] = None
history.put(history_key, state)
|
# ... existing code ...
return [grid(*rows, row_spacing=0, column_spacing=0, row_align='v_center',
column_align='h_center'),
align('width', *widgets)]
non_stateful_attrs = ['history']
def save_state(history, history_key, state):
for key, val in state.items():
if key in non_stateful_attrs:
state[key] = None
history.put(history_key, state)
# ... rest of the code ...
|
bd70ef56d95958b8f105bdff31b675d66c40bca8
|
serfnode/handler/supervisor.py
|
serfnode/handler/supervisor.py
|
import os
import subprocess
import docker_utils
import jinja2
env = jinja2.Environment(loader=jinja2.FileSystemLoader('/programs'))
def supervisor_install(block, **kwargs):
"""Update supervisor with `block` config.
- `block` is the name to a .conf template file (in directory
`/programs`)
- `kwargs` are the key/values to use in the template
"""
conf_filename = '{}.conf'.format(kwargs['target'])
template = env.get_template(block)
kwargs.update({
'DOCKER': docker_utils.DOCKER,
'DOCKER_SOCKET': docker_utils.DOCKER_SOCKET,
'DOCKER_RUN': docker_utils.DOCKER_RUN})
conf = template.render(kwargs)
with open(os.path.join(
'/etc/supervisor/conf.d', conf_filename), 'w') as f:
f.write(conf)
def supervisor_exec(*args):
return subprocess.check_output(
['supervisorctl'] + list(args))
def supervisor_update():
supervisor_exec('reread')
supervisor_exec('update')
def start(block, **kwargs):
supervisor_install(block, **kwargs)
supervisor_update()
supervisor_exec('start', '{}:*'.format(kwargs['target']))
def stop(block):
supervisor_exec('stop', '{}:*'.format(block))
|
import os
import subprocess
import docker_utils
import docker
import jinja2
env = jinja2.Environment(loader=jinja2.FileSystemLoader('/programs'))
def supervisor_install(block, **kwargs):
"""Update supervisor with `block` config.
- `block` is the name to a .conf template file (in directory
`/programs`)
- `kwargs` are the key/values to use in the template
"""
conf_filename = '{}.conf'.format(kwargs['target'])
template = env.get_template(block)
kwargs.update({
'DOCKER': docker_utils.DOCKER,
'DOCKER_SOCKET': docker_utils.DOCKER_SOCKET,
'DOCKER_RUN': docker_utils.DOCKER_RUN})
conf = template.render(kwargs)
with open(os.path.join(
'/etc/supervisor/conf.d', conf_filename), 'w') as f:
f.write(conf)
def supervisor_exec(*args):
return subprocess.check_output(
['supervisorctl'] + list(args))
def supervisor_update():
supervisor_exec('reread')
supervisor_exec('update')
def start(block, **kwargs):
supervisor_install(block, **kwargs)
supervisor_update()
supervisor_exec('start', '{}:*'.format(kwargs['target']))
def start_docker(target, name, cmdline):
start('app.conf', target=target,
ARGS='--cidfile=/app --name={} {}'.format(name, cmdline),
NAME=name)
def stop(block):
supervisor_exec('stop', '{}:*'.format(block))
|
Add convenience function to start docker
|
Add convenience function to start docker
Mainly to be used from supervisor.
|
Python
|
mit
|
waltermoreira/serfnode,waltermoreira/serfnode,waltermoreira/serfnode
|
python
|
## Code Before:
import os
import subprocess
import docker_utils
import jinja2
env = jinja2.Environment(loader=jinja2.FileSystemLoader('/programs'))
def supervisor_install(block, **kwargs):
"""Update supervisor with `block` config.
- `block` is the name to a .conf template file (in directory
`/programs`)
- `kwargs` are the key/values to use in the template
"""
conf_filename = '{}.conf'.format(kwargs['target'])
template = env.get_template(block)
kwargs.update({
'DOCKER': docker_utils.DOCKER,
'DOCKER_SOCKET': docker_utils.DOCKER_SOCKET,
'DOCKER_RUN': docker_utils.DOCKER_RUN})
conf = template.render(kwargs)
with open(os.path.join(
'/etc/supervisor/conf.d', conf_filename), 'w') as f:
f.write(conf)
def supervisor_exec(*args):
return subprocess.check_output(
['supervisorctl'] + list(args))
def supervisor_update():
supervisor_exec('reread')
supervisor_exec('update')
def start(block, **kwargs):
supervisor_install(block, **kwargs)
supervisor_update()
supervisor_exec('start', '{}:*'.format(kwargs['target']))
def stop(block):
supervisor_exec('stop', '{}:*'.format(block))
## Instruction:
Add convenience function to start docker
Mainly to be used from supervisor.
## Code After:
import os
import subprocess
import docker_utils
import docker
import jinja2
env = jinja2.Environment(loader=jinja2.FileSystemLoader('/programs'))
def supervisor_install(block, **kwargs):
"""Update supervisor with `block` config.
- `block` is the name to a .conf template file (in directory
`/programs`)
- `kwargs` are the key/values to use in the template
"""
conf_filename = '{}.conf'.format(kwargs['target'])
template = env.get_template(block)
kwargs.update({
'DOCKER': docker_utils.DOCKER,
'DOCKER_SOCKET': docker_utils.DOCKER_SOCKET,
'DOCKER_RUN': docker_utils.DOCKER_RUN})
conf = template.render(kwargs)
with open(os.path.join(
'/etc/supervisor/conf.d', conf_filename), 'w') as f:
f.write(conf)
def supervisor_exec(*args):
return subprocess.check_output(
['supervisorctl'] + list(args))
def supervisor_update():
supervisor_exec('reread')
supervisor_exec('update')
def start(block, **kwargs):
supervisor_install(block, **kwargs)
supervisor_update()
supervisor_exec('start', '{}:*'.format(kwargs['target']))
def start_docker(target, name, cmdline):
start('app.conf', target=target,
ARGS='--cidfile=/app --name={} {}'.format(name, cmdline),
NAME=name)
def stop(block):
supervisor_exec('stop', '{}:*'.format(block))
|
// ... existing code ...
import subprocess
import docker_utils
import docker
import jinja2
env = jinja2.Environment(loader=jinja2.FileSystemLoader('/programs'))
// ... modified code ...
supervisor_exec('start', '{}:*'.format(kwargs['target']))
def start_docker(target, name, cmdline):
start('app.conf', target=target,
ARGS='--cidfile=/app --name={} {}'.format(name, cmdline),
NAME=name)
def stop(block):
supervisor_exec('stop', '{}:*'.format(block))
// ... rest of the code ...
|
f849961e75dc956d669813fddb5b13627b224e1e
|
pyang/plugins/name.py
|
pyang/plugins/name.py
|
import optparse
from pyang import plugin
def pyang_plugin_init():
plugin.register_plugin(NamePlugin())
class NamePlugin(plugin.PyangPlugin):
def add_output_format(self, fmts):
self.multiple_modules = True
fmts['name'] = self
def add_opts(self, optparser):
optlist = [
optparse.make_option("--name-print-revision",
dest="print_revision",
action="store_true",
help="Print the name and revision in name@revision format"),
]
g = optparser.add_option_group("Name output specific options")
g.add_options(optlist)
def setup_fmt(self, ctx):
ctx.implicit_errors = False
def emit(self, ctx, modules, fd):
emit_name(ctx, modules, fd)
def emit_name(ctx, modules, fd):
for module in modules:
bstr = ""
rstr = ""
if ctx.opts.print_revision:
r = module.search_one('revision')
if r is not None:
rstr = '@%s' % r.arg
b = module.search_one('belongs-to')
if b is not None:
bstr = " (belongs-to %s)" % b.arg
fd.write("%s%s%s\n" % (module.arg, rstr, bstr))
|
import optparse
from pyang import plugin
def pyang_plugin_init():
plugin.register_plugin(NamePlugin())
class NamePlugin(plugin.PyangPlugin):
def add_output_format(self, fmts):
self.multiple_modules = True
fmts['name'] = self
def add_opts(self, optparser):
optlist = [
optparse.make_option("--name-print-revision",
dest="print_revision",
action="store_true",
help="Print the name and revision in name@revision format"),
]
g = optparser.add_option_group("Name output specific options")
g.add_options(optlist)
def setup_fmt(self, ctx):
ctx.implicit_errors = False
def emit(self, ctx, modules, fd):
emit_name(ctx, modules, fd)
def emit_name(ctx, modules, fd):
for module in modules:
bstr = ""
rstr = ""
if ctx.opts.print_revision:
rs = module.i_latest_revision
if rs is None:
r = module.search_one('revision')
if r is not None:
rs = r.arg
if rs is not None:
rstr = '@%s' % rs
b = module.search_one('belongs-to')
if b is not None:
bstr = " (belongs-to %s)" % b.arg
fd.write("%s%s%s\n" % (module.arg, rstr, bstr))
|
Use i_latest_revision to ensure we get the latest revision.
|
Use i_latest_revision to ensure we get the latest revision.
|
Python
|
isc
|
mbj4668/pyang,mbj4668/pyang
|
python
|
## Code Before:
import optparse
from pyang import plugin
def pyang_plugin_init():
plugin.register_plugin(NamePlugin())
class NamePlugin(plugin.PyangPlugin):
def add_output_format(self, fmts):
self.multiple_modules = True
fmts['name'] = self
def add_opts(self, optparser):
optlist = [
optparse.make_option("--name-print-revision",
dest="print_revision",
action="store_true",
help="Print the name and revision in name@revision format"),
]
g = optparser.add_option_group("Name output specific options")
g.add_options(optlist)
def setup_fmt(self, ctx):
ctx.implicit_errors = False
def emit(self, ctx, modules, fd):
emit_name(ctx, modules, fd)
def emit_name(ctx, modules, fd):
for module in modules:
bstr = ""
rstr = ""
if ctx.opts.print_revision:
r = module.search_one('revision')
if r is not None:
rstr = '@%s' % r.arg
b = module.search_one('belongs-to')
if b is not None:
bstr = " (belongs-to %s)" % b.arg
fd.write("%s%s%s\n" % (module.arg, rstr, bstr))
## Instruction:
Use i_latest_revision to ensure we get the latest revision.
## Code After:
import optparse
from pyang import plugin
def pyang_plugin_init():
plugin.register_plugin(NamePlugin())
class NamePlugin(plugin.PyangPlugin):
def add_output_format(self, fmts):
self.multiple_modules = True
fmts['name'] = self
def add_opts(self, optparser):
optlist = [
optparse.make_option("--name-print-revision",
dest="print_revision",
action="store_true",
help="Print the name and revision in name@revision format"),
]
g = optparser.add_option_group("Name output specific options")
g.add_options(optlist)
def setup_fmt(self, ctx):
ctx.implicit_errors = False
def emit(self, ctx, modules, fd):
emit_name(ctx, modules, fd)
def emit_name(ctx, modules, fd):
for module in modules:
bstr = ""
rstr = ""
if ctx.opts.print_revision:
rs = module.i_latest_revision
if rs is None:
r = module.search_one('revision')
if r is not None:
rs = r.arg
if rs is not None:
rstr = '@%s' % rs
b = module.search_one('belongs-to')
if b is not None:
bstr = " (belongs-to %s)" % b.arg
fd.write("%s%s%s\n" % (module.arg, rstr, bstr))
|
// ... existing code ...
bstr = ""
rstr = ""
if ctx.opts.print_revision:
rs = module.i_latest_revision
if rs is None:
r = module.search_one('revision')
if r is not None:
rs = r.arg
if rs is not None:
rstr = '@%s' % rs
b = module.search_one('belongs-to')
if b is not None:
bstr = " (belongs-to %s)" % b.arg
// ... rest of the code ...
|
b254afaea67e08d83611423a3a1ec7afcee849b3
|
setup.py
|
setup.py
|
"""Install Wallace as a command line utility."""
from setuptools import setup
setup_args = dict(
name='wallace-platform',
packages=['wallace'],
version="0.11.2",
description='Wallace, a platform for experimental cultural evolution',
url='http://github.com/berkeley-cocosci/Wallace',
author='Berkeley CoCoSci',
author_email='[email protected]',
license='MIT',
keywords=['science', 'cultural evolution', 'experiments', 'psychology'],
classifiers=[],
zip_safe=False,
entry_points={
'console_scripts': [
'wallace = wallace.command_line:wallace',
],
}
)
# Read in requirements.txt for dependencies.
setup_args['install_requires'] = install_requires = []
setup_args['dependency_links'] = dependency_links = []
with open('requirements.txt') as f:
for line in f.readlines():
req = line.strip()
if not req or req.startswith('#'):
continue
if req.startswith('-e '):
dependency_links.append(req[3:])
else:
install_requires.append(req)
setup(**setup_args)
|
"""Install Wallace as a command line utility."""
from setuptools import setup
setup_args = dict(
name='wallace-platform',
packages=['wallace'],
version="0.11.2",
description='Wallace, a platform for experimental cultural evolution',
url='http://github.com/berkeley-cocosci/Wallace',
author='Berkeley CoCoSci',
author_email='[email protected]',
license='MIT',
keywords=['science', 'cultural evolution', 'experiments', 'psychology'],
classifiers=[],
zip_safe=False,
entry_points={
'console_scripts': [
'wallace = wallace.command_line:wallace',
],
},
dependency_links=['-e git+git://github.com/berkeley-cocosci/psiTurk.git@wallace3#egg=psiturk']
)
# Read in requirements.txt for dependencies.
setup_args['install_requires'] = install_requires = []
setup_args['dependency_links'] = dependency_links = []
with open('requirements.txt') as f:
for line in f.readlines():
req = line.strip()
if not req or req.startswith('#'):
continue
if req.startswith('-e '):
dependency_links.append(req[3:])
else:
install_requires.append(req)
setup(**setup_args)
|
Add custom psiTurk as dependency link
|
Add custom psiTurk as dependency link
|
Python
|
mit
|
jcpeterson/Dallinger,jcpeterson/Dallinger,berkeley-cocosci/Wallace,Dallinger/Dallinger,jcpeterson/Dallinger,suchow/Wallace,Dallinger/Dallinger,suchow/Wallace,Dallinger/Dallinger,berkeley-cocosci/Wallace,jcpeterson/Dallinger,Dallinger/Dallinger,suchow/Wallace,jcpeterson/Dallinger,Dallinger/Dallinger,berkeley-cocosci/Wallace
|
python
|
## Code Before:
"""Install Wallace as a command line utility."""
from setuptools import setup
setup_args = dict(
name='wallace-platform',
packages=['wallace'],
version="0.11.2",
description='Wallace, a platform for experimental cultural evolution',
url='http://github.com/berkeley-cocosci/Wallace',
author='Berkeley CoCoSci',
author_email='[email protected]',
license='MIT',
keywords=['science', 'cultural evolution', 'experiments', 'psychology'],
classifiers=[],
zip_safe=False,
entry_points={
'console_scripts': [
'wallace = wallace.command_line:wallace',
],
}
)
# Read in requirements.txt for dependencies.
setup_args['install_requires'] = install_requires = []
setup_args['dependency_links'] = dependency_links = []
with open('requirements.txt') as f:
for line in f.readlines():
req = line.strip()
if not req or req.startswith('#'):
continue
if req.startswith('-e '):
dependency_links.append(req[3:])
else:
install_requires.append(req)
setup(**setup_args)
## Instruction:
Add custom psiTurk as dependency link
## Code After:
"""Install Wallace as a command line utility."""
from setuptools import setup
setup_args = dict(
name='wallace-platform',
packages=['wallace'],
version="0.11.2",
description='Wallace, a platform for experimental cultural evolution',
url='http://github.com/berkeley-cocosci/Wallace',
author='Berkeley CoCoSci',
author_email='[email protected]',
license='MIT',
keywords=['science', 'cultural evolution', 'experiments', 'psychology'],
classifiers=[],
zip_safe=False,
entry_points={
'console_scripts': [
'wallace = wallace.command_line:wallace',
],
},
dependency_links=['-e git+git://github.com/berkeley-cocosci/psiTurk.git@wallace3#egg=psiturk']
)
# Read in requirements.txt for dependencies.
setup_args['install_requires'] = install_requires = []
setup_args['dependency_links'] = dependency_links = []
with open('requirements.txt') as f:
for line in f.readlines():
req = line.strip()
if not req or req.startswith('#'):
continue
if req.startswith('-e '):
dependency_links.append(req[3:])
else:
install_requires.append(req)
setup(**setup_args)
|
// ... existing code ...
'console_scripts': [
'wallace = wallace.command_line:wallace',
],
},
dependency_links=['-e git+git://github.com/berkeley-cocosci/psiTurk.git@wallace3#egg=psiturk']
)
# Read in requirements.txt for dependencies.
// ... rest of the code ...
|
3bbfc62cb194c1c68ce24ffe9fa0732a0f00fd9c
|
test/664-raceway.py
|
test/664-raceway.py
|
assert_has_feature(
16, 10476, 25242, 'roads',
{ 'id': 28825404, 'kind': 'minor_road', 'highway': 'raceway' })
# https://www.openstreetmap.org/way/59440900
# Thunderoad Speedway Go-carts
assert_has_feature(
16, 10516, 25247, 'roads',
{ 'id': 59440900, 'kind': 'minor_road', 'highway': 'raceway' })
|
assert_has_feature(
16, 10476, 25242, 'roads',
{ 'id': 28825404, 'kind': 'minor_road', 'highway': 'raceway', 'sort_key': 375 })
# https://www.openstreetmap.org/way/59440900
# Thunderoad Speedway Go-carts
assert_has_feature(
16, 10516, 25247, 'roads',
{ 'id': 59440900, 'kind': 'minor_road', 'highway': 'raceway', 'sort_key': 375 })
|
Add sort_key assertion to raceway tests
|
Add sort_key assertion to raceway tests
|
Python
|
mit
|
mapzen/vector-datasource,mapzen/vector-datasource,mapzen/vector-datasource
|
python
|
## Code Before:
assert_has_feature(
16, 10476, 25242, 'roads',
{ 'id': 28825404, 'kind': 'minor_road', 'highway': 'raceway' })
# https://www.openstreetmap.org/way/59440900
# Thunderoad Speedway Go-carts
assert_has_feature(
16, 10516, 25247, 'roads',
{ 'id': 59440900, 'kind': 'minor_road', 'highway': 'raceway' })
## Instruction:
Add sort_key assertion to raceway tests
## Code After:
assert_has_feature(
16, 10476, 25242, 'roads',
{ 'id': 28825404, 'kind': 'minor_road', 'highway': 'raceway', 'sort_key': 375 })
# https://www.openstreetmap.org/way/59440900
# Thunderoad Speedway Go-carts
assert_has_feature(
16, 10516, 25247, 'roads',
{ 'id': 59440900, 'kind': 'minor_road', 'highway': 'raceway', 'sort_key': 375 })
|
// ... existing code ...
assert_has_feature(
16, 10476, 25242, 'roads',
{ 'id': 28825404, 'kind': 'minor_road', 'highway': 'raceway', 'sort_key': 375 })
# https://www.openstreetmap.org/way/59440900
# Thunderoad Speedway Go-carts
assert_has_feature(
16, 10516, 25247, 'roads',
{ 'id': 59440900, 'kind': 'minor_road', 'highway': 'raceway', 'sort_key': 375 })
// ... rest of the code ...
|
c0e99ae16eeeca28db4cb9cbc0a40008d36f36fd
|
liquibase-core/src/main/java/liquibase/sqlgenerator/core/GetViewDefinitionGeneratorDB2.java
|
liquibase-core/src/main/java/liquibase/sqlgenerator/core/GetViewDefinitionGeneratorDB2.java
|
package liquibase.sqlgenerator.core;
import liquibase.CatalogAndSchema;
import liquibase.database.Database;
import liquibase.database.core.AbstractDb2Database;
import liquibase.database.core.Db2zDatabase;
import liquibase.sql.Sql;
import liquibase.sql.UnparsedSql;
import liquibase.sqlgenerator.SqlGenerator;
import liquibase.sqlgenerator.SqlGeneratorChain;
import liquibase.statement.core.GetViewDefinitionStatement;
public class GetViewDefinitionGeneratorDB2 extends GetViewDefinitionGenerator {
@Override
public int getPriority() {
return SqlGenerator.PRIORITY_DATABASE;
}
@Override
public boolean supports(GetViewDefinitionStatement statement, Database database) {
return database instanceof AbstractDb2Database;
}
@Override
public Sql[] generateSql(GetViewDefinitionStatement statement, Database database, SqlGeneratorChain sqlGeneratorChain) {
CatalogAndSchema schema = new CatalogAndSchema(statement.getCatalogName(), statement.getSchemaName()).customize(database);
if (database instanceof Db2zDatabase) {
return new Sql[] {
new UnparsedSql("select STATEMENT AS view_definition from SYSIBM.SYSVIEWS where NAME='" + statement.getViewName() + "' and (PATHSCHEMAS LIKE '%" + schema.getSchemaName() + "%' OR CREATOR = '" + schema.getSchemaName() + "')")
};
}
return new Sql[] {
new UnparsedSql("select view_definition from SYSIBM.VIEWS where TABLE_NAME='" + statement.getViewName() + "' and TABLE_SCHEMA='" + schema.getSchemaName() + "'")
};
}
}
|
package liquibase.sqlgenerator.core;
import liquibase.CatalogAndSchema;
import liquibase.database.Database;
import liquibase.database.core.AbstractDb2Database;
import liquibase.database.core.Db2zDatabase;
import liquibase.sql.Sql;
import liquibase.sql.UnparsedSql;
import liquibase.sqlgenerator.SqlGenerator;
import liquibase.sqlgenerator.SqlGeneratorChain;
import liquibase.statement.core.GetViewDefinitionStatement;
public class GetViewDefinitionGeneratorDB2 extends GetViewDefinitionGenerator {
@Override
public int getPriority() {
return SqlGenerator.PRIORITY_DATABASE;
}
@Override
public boolean supports(GetViewDefinitionStatement statement, Database database) {
return database instanceof AbstractDb2Database;
}
@Override
public Sql[] generateSql(GetViewDefinitionStatement statement, Database database, SqlGeneratorChain sqlGeneratorChain) {
CatalogAndSchema schema = new CatalogAndSchema(statement.getCatalogName(), statement.getSchemaName()).customize(database);
if (database instanceof Db2zDatabase) {
return new Sql[] {
new UnparsedSql("select STATEMENT AS view_definition from SYSIBM.SYSVIEWS where NAME='" + statement.getViewName() + "' and CREATOR = '" + schema.getSchemaName() + "'")
};
}
return new Sql[] {
new UnparsedSql("select view_definition from SYSIBM.VIEWS where TABLE_NAME='" + statement.getViewName() + "' and TABLE_SCHEMA='" + schema.getSchemaName() + "'")
};
}
}
|
Remove redundant PATHSCHEMAS from where part of SQL statement looking for a view definition to make query return unique only results
|
[DAT-12296] Remove redundant PATHSCHEMAS from where part of SQL statement looking for a view definition to make query return unique only results
|
Java
|
apache-2.0
|
Datical/liquibase,Datical/liquibase,Datical/liquibase,Datical/liquibase
|
java
|
## Code Before:
package liquibase.sqlgenerator.core;
import liquibase.CatalogAndSchema;
import liquibase.database.Database;
import liquibase.database.core.AbstractDb2Database;
import liquibase.database.core.Db2zDatabase;
import liquibase.sql.Sql;
import liquibase.sql.UnparsedSql;
import liquibase.sqlgenerator.SqlGenerator;
import liquibase.sqlgenerator.SqlGeneratorChain;
import liquibase.statement.core.GetViewDefinitionStatement;
public class GetViewDefinitionGeneratorDB2 extends GetViewDefinitionGenerator {
@Override
public int getPriority() {
return SqlGenerator.PRIORITY_DATABASE;
}
@Override
public boolean supports(GetViewDefinitionStatement statement, Database database) {
return database instanceof AbstractDb2Database;
}
@Override
public Sql[] generateSql(GetViewDefinitionStatement statement, Database database, SqlGeneratorChain sqlGeneratorChain) {
CatalogAndSchema schema = new CatalogAndSchema(statement.getCatalogName(), statement.getSchemaName()).customize(database);
if (database instanceof Db2zDatabase) {
return new Sql[] {
new UnparsedSql("select STATEMENT AS view_definition from SYSIBM.SYSVIEWS where NAME='" + statement.getViewName() + "' and (PATHSCHEMAS LIKE '%" + schema.getSchemaName() + "%' OR CREATOR = '" + schema.getSchemaName() + "')")
};
}
return new Sql[] {
new UnparsedSql("select view_definition from SYSIBM.VIEWS where TABLE_NAME='" + statement.getViewName() + "' and TABLE_SCHEMA='" + schema.getSchemaName() + "'")
};
}
}
## Instruction:
[DAT-12296] Remove redundant PATHSCHEMAS from where part of SQL statement looking for a view definition to make query return unique only results
## Code After:
package liquibase.sqlgenerator.core;
import liquibase.CatalogAndSchema;
import liquibase.database.Database;
import liquibase.database.core.AbstractDb2Database;
import liquibase.database.core.Db2zDatabase;
import liquibase.sql.Sql;
import liquibase.sql.UnparsedSql;
import liquibase.sqlgenerator.SqlGenerator;
import liquibase.sqlgenerator.SqlGeneratorChain;
import liquibase.statement.core.GetViewDefinitionStatement;
public class GetViewDefinitionGeneratorDB2 extends GetViewDefinitionGenerator {
@Override
public int getPriority() {
return SqlGenerator.PRIORITY_DATABASE;
}
@Override
public boolean supports(GetViewDefinitionStatement statement, Database database) {
return database instanceof AbstractDb2Database;
}
@Override
public Sql[] generateSql(GetViewDefinitionStatement statement, Database database, SqlGeneratorChain sqlGeneratorChain) {
CatalogAndSchema schema = new CatalogAndSchema(statement.getCatalogName(), statement.getSchemaName()).customize(database);
if (database instanceof Db2zDatabase) {
return new Sql[] {
new UnparsedSql("select STATEMENT AS view_definition from SYSIBM.SYSVIEWS where NAME='" + statement.getViewName() + "' and CREATOR = '" + schema.getSchemaName() + "'")
};
}
return new Sql[] {
new UnparsedSql("select view_definition from SYSIBM.VIEWS where TABLE_NAME='" + statement.getViewName() + "' and TABLE_SCHEMA='" + schema.getSchemaName() + "'")
};
}
}
|
// ... existing code ...
if (database instanceof Db2zDatabase) {
return new Sql[] {
new UnparsedSql("select STATEMENT AS view_definition from SYSIBM.SYSVIEWS where NAME='" + statement.getViewName() + "' and CREATOR = '" + schema.getSchemaName() + "'")
};
}
return new Sql[] {
// ... rest of the code ...
|
4a6449b806dc755fe3f9d18966c0420da2a4d0fc
|
devito/dle/manipulation.py
|
devito/dle/manipulation.py
|
import cgen as c
from devito.codeprinter import ccode
from devito.nodes import Element, Iteration
from devito.visitors import MergeOuterIterations
__all__ = ['compose_nodes', 'copy_arrays']
def compose_nodes(nodes):
"""Build an Iteration/Expression tree by nesting the nodes in ``nodes``."""
l = list(nodes)
body = l.pop(-1)
while l:
handle = l.pop(-1)
body = handle._rebuild(body, **handle.args_frozen)
return body
def copy_arrays(mapper):
"""Build an Iteration/Expression tree performing the copy ``k = v`` for each
(k, v) in mapper. (k, v) are expected to be of type :class:`IndexedData`."""
# Build the Iteration tree for the copy
iterations = []
for k, v in mapper.items():
handle = []
indices = k.function.indices
for i, j in zip(k.shape, indices):
handle.append(Iteration([], dimension=j, limits=j.symbolic_size))
handle.append(Element(c.Assign(ccode(k[indices]), ccode(v[indices]))))
iterations.append(compose_nodes(handle))
# Maybe some Iterations are mergeable
iterations = MergeOuterIterations().visit(iterations)
return iterations
|
from sympy import Eq
from devito.codeprinter import ccode
from devito.nodes import Expression, Iteration
from devito.visitors import MergeOuterIterations
__all__ = ['compose_nodes', 'copy_arrays']
def compose_nodes(nodes):
"""Build an Iteration/Expression tree by nesting the nodes in ``nodes``."""
l = list(nodes)
body = l.pop(-1)
while l:
handle = l.pop(-1)
body = handle._rebuild(body, **handle.args_frozen)
return body
def copy_arrays(mapper):
"""Build an Iteration/Expression tree performing the copy ``k = v`` for each
(k, v) in mapper. (k, v) are expected to be of type :class:`IndexedData`."""
# Build the Iteration tree for the copy
iterations = []
for k, v in mapper.items():
handle = []
indices = k.function.indices
for i, j in zip(k.shape, indices):
handle.append(Iteration([], dimension=j, limits=j.symbolic_size))
handle.append(Expression(Eq(k[indices], v[indices]), dtype=k.function.dtype))
iterations.append(compose_nodes(handle))
# Maybe some Iterations are mergeable
iterations = MergeOuterIterations().visit(iterations)
return iterations
|
Use Expression, not Element, in copy_arrays
|
dle: Use Expression, not Element, in copy_arrays
|
Python
|
mit
|
opesci/devito,opesci/devito
|
python
|
## Code Before:
import cgen as c
from devito.codeprinter import ccode
from devito.nodes import Element, Iteration
from devito.visitors import MergeOuterIterations
__all__ = ['compose_nodes', 'copy_arrays']
def compose_nodes(nodes):
"""Build an Iteration/Expression tree by nesting the nodes in ``nodes``."""
l = list(nodes)
body = l.pop(-1)
while l:
handle = l.pop(-1)
body = handle._rebuild(body, **handle.args_frozen)
return body
def copy_arrays(mapper):
"""Build an Iteration/Expression tree performing the copy ``k = v`` for each
(k, v) in mapper. (k, v) are expected to be of type :class:`IndexedData`."""
# Build the Iteration tree for the copy
iterations = []
for k, v in mapper.items():
handle = []
indices = k.function.indices
for i, j in zip(k.shape, indices):
handle.append(Iteration([], dimension=j, limits=j.symbolic_size))
handle.append(Element(c.Assign(ccode(k[indices]), ccode(v[indices]))))
iterations.append(compose_nodes(handle))
# Maybe some Iterations are mergeable
iterations = MergeOuterIterations().visit(iterations)
return iterations
## Instruction:
dle: Use Expression, not Element, in copy_arrays
## Code After:
from sympy import Eq
from devito.codeprinter import ccode
from devito.nodes import Expression, Iteration
from devito.visitors import MergeOuterIterations
__all__ = ['compose_nodes', 'copy_arrays']
def compose_nodes(nodes):
"""Build an Iteration/Expression tree by nesting the nodes in ``nodes``."""
l = list(nodes)
body = l.pop(-1)
while l:
handle = l.pop(-1)
body = handle._rebuild(body, **handle.args_frozen)
return body
def copy_arrays(mapper):
"""Build an Iteration/Expression tree performing the copy ``k = v`` for each
(k, v) in mapper. (k, v) are expected to be of type :class:`IndexedData`."""
# Build the Iteration tree for the copy
iterations = []
for k, v in mapper.items():
handle = []
indices = k.function.indices
for i, j in zip(k.shape, indices):
handle.append(Iteration([], dimension=j, limits=j.symbolic_size))
handle.append(Expression(Eq(k[indices], v[indices]), dtype=k.function.dtype))
iterations.append(compose_nodes(handle))
# Maybe some Iterations are mergeable
iterations = MergeOuterIterations().visit(iterations)
return iterations
|
# ... existing code ...
from sympy import Eq
from devito.codeprinter import ccode
from devito.nodes import Expression, Iteration
from devito.visitors import MergeOuterIterations
__all__ = ['compose_nodes', 'copy_arrays']
# ... modified code ...
indices = k.function.indices
for i, j in zip(k.shape, indices):
handle.append(Iteration([], dimension=j, limits=j.symbolic_size))
handle.append(Expression(Eq(k[indices], v[indices]), dtype=k.function.dtype))
iterations.append(compose_nodes(handle))
# Maybe some Iterations are mergeable
# ... rest of the code ...
|
9ce26dfb42753570ad7a2c89e51638aa5d49df2b
|
fedora/__init__.py
|
fedora/__init__.py
|
'''
Python Fedora
Modules to communicate with and help implement Fedora Services.
'''
import gettext
translation = gettext.translation('python-fedora', '/usr/share/locale',
fallback=True)
_ = translation.ugettext
from fedora import release
__version__ = release.VERSION
__all__ = ('_', 'release', '__version__',
'accounts', 'client', 'tg')
|
'''
Python Fedora
Modules to communicate with and help implement Fedora Services.
'''
import kitchen
# Setup gettext for all of kitchen.
# Remember -- _() is for marking most messages
# b_() is for marking messages that are used in exceptions
(_, N_) = kitchen.i18n.easy_gettext_setup('python-fedora')
(b_, bN_) = kitchen.i18n.eay_gettext_setup('python-fedora', use_unicode=False)
from fedora import release
__version__ = release.VERSION
__all__ = ('__version__', 'accounts', 'client', 'release', 'tg')
|
Use kitchen.i18n to setup gettext. Setup b_() for exceptions.
|
Use kitchen.i18n to setup gettext. Setup b_() for exceptions.
|
Python
|
lgpl-2.1
|
fedora-infra/python-fedora
|
python
|
## Code Before:
'''
Python Fedora
Modules to communicate with and help implement Fedora Services.
'''
import gettext
translation = gettext.translation('python-fedora', '/usr/share/locale',
fallback=True)
_ = translation.ugettext
from fedora import release
__version__ = release.VERSION
__all__ = ('_', 'release', '__version__',
'accounts', 'client', 'tg')
## Instruction:
Use kitchen.i18n to setup gettext. Setup b_() for exceptions.
## Code After:
'''
Python Fedora
Modules to communicate with and help implement Fedora Services.
'''
import kitchen
# Setup gettext for all of kitchen.
# Remember -- _() is for marking most messages
# b_() is for marking messages that are used in exceptions
(_, N_) = kitchen.i18n.easy_gettext_setup('python-fedora')
(b_, bN_) = kitchen.i18n.eay_gettext_setup('python-fedora', use_unicode=False)
from fedora import release
__version__ = release.VERSION
__all__ = ('__version__', 'accounts', 'client', 'release', 'tg')
|
// ... existing code ...
Modules to communicate with and help implement Fedora Services.
'''
import kitchen
# Setup gettext for all of kitchen.
# Remember -- _() is for marking most messages
# b_() is for marking messages that are used in exceptions
(_, N_) = kitchen.i18n.easy_gettext_setup('python-fedora')
(b_, bN_) = kitchen.i18n.eay_gettext_setup('python-fedora', use_unicode=False)
from fedora import release
__version__ = release.VERSION
__all__ = ('__version__', 'accounts', 'client', 'release', 'tg')
// ... rest of the code ...
|
9937823496db32a4d3eac3eee0fa4d310ea07e11
|
dvhb_hybrid/files/models.py
|
dvhb_hybrid/files/models.py
|
import uuid
from django.conf import settings
from django.contrib.postgres.fields import JSONField
from django.db import models
from django.utils.translation import ugettext_lazy as _
from ..models import UpdatedMixin
from .storages import image_storage
class Image(UpdatedMixin, models.Model):
author = models.ForeignKey(
settings.AUTH_USER_MODEL, related_name='images',
verbose_name=_('Author'), on_delete=models.PROTECT)
uuid = models.UUIDField(_('UUID'), primary_key=True)
image = models.ImageField(storage=image_storage)
mime_type = models.CharField(_('content type'), max_length=99, blank=True)
meta = JSONField(_('meta-information'), default={}, blank=True)
class Meta:
verbose_name = _('image')
verbose_name_plural = _('images')
ordering = ('-created_at',)
def __str__(self):
return self.image.name
def save(self, force_insert=False, force_update=False, using=None,
update_fields=None):
if not self.uuid:
uid = uuid.uuid4()
self.uuid = uid
self.image.name = image_storage.get_available_name(
self.image.name, uuid=uid)
super(Image, self).save(force_insert=force_insert,
force_update=force_update,
using=using,
update_fields=update_fields)
|
import uuid
from django.conf import settings
from django.contrib.postgres.fields import JSONField
from django.contrib.postgres.indexes import GinIndex
from django.db import models
from django.utils.translation import ugettext_lazy as _
from ..models import UpdatedMixin
from .storages import image_storage
class Image(UpdatedMixin, models.Model):
author = models.ForeignKey(
settings.AUTH_USER_MODEL, related_name='images',
verbose_name=_('Author'), on_delete=models.PROTECT)
uuid = models.UUIDField(_('UUID'), primary_key=True)
image = models.ImageField(storage=image_storage)
mime_type = models.CharField(_('content type'), max_length=99, blank=True)
meta = JSONField(_('meta-information'), default={}, blank=True)
class Meta:
verbose_name = _('image')
verbose_name_plural = _('images')
ordering = ('-created_at',)
indexes = [GinIndex(fields=['meta'])]
def __str__(self):
return self.image.name
def save(self, force_insert=False, force_update=False, using=None,
update_fields=None):
if not self.uuid:
uid = uuid.uuid4()
self.uuid = uid
self.image.name = image_storage.get_available_name(
self.image.name, uuid=uid)
super(Image, self).save(force_insert=force_insert,
force_update=force_update,
using=using,
update_fields=update_fields)
|
Add index for image metainformation
|
Add index for image metainformation
|
Python
|
mit
|
dvhbru/dvhb-hybrid
|
python
|
## Code Before:
import uuid
from django.conf import settings
from django.contrib.postgres.fields import JSONField
from django.db import models
from django.utils.translation import ugettext_lazy as _
from ..models import UpdatedMixin
from .storages import image_storage
class Image(UpdatedMixin, models.Model):
author = models.ForeignKey(
settings.AUTH_USER_MODEL, related_name='images',
verbose_name=_('Author'), on_delete=models.PROTECT)
uuid = models.UUIDField(_('UUID'), primary_key=True)
image = models.ImageField(storage=image_storage)
mime_type = models.CharField(_('content type'), max_length=99, blank=True)
meta = JSONField(_('meta-information'), default={}, blank=True)
class Meta:
verbose_name = _('image')
verbose_name_plural = _('images')
ordering = ('-created_at',)
def __str__(self):
return self.image.name
def save(self, force_insert=False, force_update=False, using=None,
update_fields=None):
if not self.uuid:
uid = uuid.uuid4()
self.uuid = uid
self.image.name = image_storage.get_available_name(
self.image.name, uuid=uid)
super(Image, self).save(force_insert=force_insert,
force_update=force_update,
using=using,
update_fields=update_fields)
## Instruction:
Add index for image metainformation
## Code After:
import uuid
from django.conf import settings
from django.contrib.postgres.fields import JSONField
from django.contrib.postgres.indexes import GinIndex
from django.db import models
from django.utils.translation import ugettext_lazy as _
from ..models import UpdatedMixin
from .storages import image_storage
class Image(UpdatedMixin, models.Model):
author = models.ForeignKey(
settings.AUTH_USER_MODEL, related_name='images',
verbose_name=_('Author'), on_delete=models.PROTECT)
uuid = models.UUIDField(_('UUID'), primary_key=True)
image = models.ImageField(storage=image_storage)
mime_type = models.CharField(_('content type'), max_length=99, blank=True)
meta = JSONField(_('meta-information'), default={}, blank=True)
class Meta:
verbose_name = _('image')
verbose_name_plural = _('images')
ordering = ('-created_at',)
indexes = [GinIndex(fields=['meta'])]
def __str__(self):
return self.image.name
def save(self, force_insert=False, force_update=False, using=None,
update_fields=None):
if not self.uuid:
uid = uuid.uuid4()
self.uuid = uid
self.image.name = image_storage.get_available_name(
self.image.name, uuid=uid)
super(Image, self).save(force_insert=force_insert,
force_update=force_update,
using=using,
update_fields=update_fields)
|
...
from django.conf import settings
from django.contrib.postgres.fields import JSONField
from django.contrib.postgres.indexes import GinIndex
from django.db import models
from django.utils.translation import ugettext_lazy as _
...
verbose_name = _('image')
verbose_name_plural = _('images')
ordering = ('-created_at',)
indexes = [GinIndex(fields=['meta'])]
def __str__(self):
return self.image.name
...
|
33fd4bba1f2c44e871051862db8071fadb0e9825
|
core-plugins/shared/1/dss/reporting-plugins/shared_create_metaproject/shared_create_metaproject.py
|
core-plugins/shared/1/dss/reporting-plugins/shared_create_metaproject/shared_create_metaproject.py
|
def process(transaction, parameters, tableBuilder):
"""Create a project with user-defined name in given space.
"""
# Prepare the return table
tableBuilder.addHeader("success")
tableBuilder.addHeader("message")
# Add a row for the results
row = tableBuilder.addRow()
# Retrieve parameters from client
metaprojectCode = parameters.get("metaprojectCode")
username = parameters.get("userName")
# Try retrieving the metaproject (tag)
metaproject = transaction.getMetaproject(metaprojectCode, username)
if metaproject is None:
# Create the metaproject (tag)
metaproject = transaction.createNewMetaproject(metaprojectCode,
"Test",
username)
# Check that creation was succcessful
if metaproject is None:
success = "false"
message = "Could not create metaproject " + metaprojectCode + "."
else:
success = "true"
message = "Tag " + metaprojectCode + " successfully created."
else:
success = "false"
message = "Tag " + metaprojectCode + " exists already."
# Add the results to current row
row.setCell("success", success)
row.setCell("message", message)
|
def process(transaction, parameters, tableBuilder):
"""Create a project with user-defined name in given space.
"""
# Prepare the return table
tableBuilder.addHeader("success")
tableBuilder.addHeader("message")
# Add a row for the results
row = tableBuilder.addRow()
# Retrieve parameters from client
username = parameters.get("userName")
metaprojectCode = parameters.get("metaprojectCode")
metaprojectDescr = parameters.get("metaprojectDescr")
if metaprojectDescr is None:
metaprojectDescr = ""
# Try retrieving the metaproject (tag)
metaproject = transaction.getMetaproject(metaprojectCode, username)
if metaproject is None:
# Create the metaproject (tag)
metaproject = transaction.createNewMetaproject(metaprojectCode,
metaprojectDescr,
username)
# Check that creation was succcessful
if metaproject is None:
success = "false"
message = "Could not create metaproject " + metaprojectCode + "."
else:
success = "true"
message = "Tag " + metaprojectCode + " successfully created."
else:
success = "false"
message = "Tag " + metaprojectCode + " exists already."
# Add the results to current row
row.setCell("success", success)
row.setCell("message", message)
|
Create metaproject with user-provided description.
|
Create metaproject with user-provided description.
|
Python
|
apache-2.0
|
aarpon/obit_shared_core_technology,aarpon/obit_shared_core_technology,aarpon/obit_shared_core_technology
|
python
|
## Code Before:
def process(transaction, parameters, tableBuilder):
"""Create a project with user-defined name in given space.
"""
# Prepare the return table
tableBuilder.addHeader("success")
tableBuilder.addHeader("message")
# Add a row for the results
row = tableBuilder.addRow()
# Retrieve parameters from client
metaprojectCode = parameters.get("metaprojectCode")
username = parameters.get("userName")
# Try retrieving the metaproject (tag)
metaproject = transaction.getMetaproject(metaprojectCode, username)
if metaproject is None:
# Create the metaproject (tag)
metaproject = transaction.createNewMetaproject(metaprojectCode,
"Test",
username)
# Check that creation was succcessful
if metaproject is None:
success = "false"
message = "Could not create metaproject " + metaprojectCode + "."
else:
success = "true"
message = "Tag " + metaprojectCode + " successfully created."
else:
success = "false"
message = "Tag " + metaprojectCode + " exists already."
# Add the results to current row
row.setCell("success", success)
row.setCell("message", message)
## Instruction:
Create metaproject with user-provided description.
## Code After:
def process(transaction, parameters, tableBuilder):
"""Create a project with user-defined name in given space.
"""
# Prepare the return table
tableBuilder.addHeader("success")
tableBuilder.addHeader("message")
# Add a row for the results
row = tableBuilder.addRow()
# Retrieve parameters from client
username = parameters.get("userName")
metaprojectCode = parameters.get("metaprojectCode")
metaprojectDescr = parameters.get("metaprojectDescr")
if metaprojectDescr is None:
metaprojectDescr = ""
# Try retrieving the metaproject (tag)
metaproject = transaction.getMetaproject(metaprojectCode, username)
if metaproject is None:
# Create the metaproject (tag)
metaproject = transaction.createNewMetaproject(metaprojectCode,
metaprojectDescr,
username)
# Check that creation was succcessful
if metaproject is None:
success = "false"
message = "Could not create metaproject " + metaprojectCode + "."
else:
success = "true"
message = "Tag " + metaprojectCode + " successfully created."
else:
success = "false"
message = "Tag " + metaprojectCode + " exists already."
# Add the results to current row
row.setCell("success", success)
row.setCell("message", message)
|
# ... existing code ...
row = tableBuilder.addRow()
# Retrieve parameters from client
username = parameters.get("userName")
metaprojectCode = parameters.get("metaprojectCode")
metaprojectDescr = parameters.get("metaprojectDescr")
if metaprojectDescr is None:
metaprojectDescr = ""
# Try retrieving the metaproject (tag)
metaproject = transaction.getMetaproject(metaprojectCode, username)
# ... modified code ...
# Create the metaproject (tag)
metaproject = transaction.createNewMetaproject(metaprojectCode,
metaprojectDescr,
username)
# Check that creation was succcessful
# ... rest of the code ...
|
f0b6c5101e4b1c88410e8e9d227266500497e031
|
list.c
|
list.c
|
struct ListNode
{
ListNode* prev;
ListNode* next;
void* k;
};
struct List
{
ListNode* head;
};
List* List_Create(void)
{
List* l = (List *)malloc(sizeof(List));
l->head = NULL;
return l;
}
ListNode* ListNode_Create(void* k)
{
ListNode* n = (ListNode *)malloc(sizeof(ListNode));
n->prev = NULL;
n->next = NULL;
n->k = k;
return n;
}
|
struct ListNode
{
ListNode* prev;
ListNode* next;
void* k;
};
struct List
{
ListNode* head;
};
List* List_Create(void)
{
List* l = (List *)malloc(sizeof(List));
l->head = NULL;
return l;
}
ListNode* ListNode_Create(void* k)
{
ListNode* n = (ListNode *)malloc(sizeof(ListNode));
n->prev = NULL;
n->next = NULL;
n->k = k;
return n;
}
ListNode* List_Search(List* l, void* k, int (f)(void*, void*))
{
ListNode* n = l->head;
while (n != NULL && !f(n->k, k))
{
n = n->next;
}
return n;
}
|
Add List Search function implementation
|
Add List Search function implementation
|
C
|
mit
|
MaxLikelihood/CADT
|
c
|
## Code Before:
struct ListNode
{
ListNode* prev;
ListNode* next;
void* k;
};
struct List
{
ListNode* head;
};
List* List_Create(void)
{
List* l = (List *)malloc(sizeof(List));
l->head = NULL;
return l;
}
ListNode* ListNode_Create(void* k)
{
ListNode* n = (ListNode *)malloc(sizeof(ListNode));
n->prev = NULL;
n->next = NULL;
n->k = k;
return n;
}
## Instruction:
Add List Search function implementation
## Code After:
struct ListNode
{
ListNode* prev;
ListNode* next;
void* k;
};
struct List
{
ListNode* head;
};
List* List_Create(void)
{
List* l = (List *)malloc(sizeof(List));
l->head = NULL;
return l;
}
ListNode* ListNode_Create(void* k)
{
ListNode* n = (ListNode *)malloc(sizeof(ListNode));
n->prev = NULL;
n->next = NULL;
n->k = k;
return n;
}
ListNode* List_Search(List* l, void* k, int (f)(void*, void*))
{
ListNode* n = l->head;
while (n != NULL && !f(n->k, k))
{
n = n->next;
}
return n;
}
|
...
n->k = k;
return n;
}
ListNode* List_Search(List* l, void* k, int (f)(void*, void*))
{
ListNode* n = l->head;
while (n != NULL && !f(n->k, k))
{
n = n->next;
}
return n;
}
...
|
66cc5b8ecae568c3a20948718ef2d4f162cfd786
|
test/test_pycompile.py
|
test/test_pycompile.py
|
import unittest
from six import StringIO
from lesscpy import compile
class TestCompileFunction(unittest.TestCase):
"""
Unit tests for compile
"""
def test_compile(self):
"""
It can compile input from a file-like object
"""
output = compile(StringIO("a { border-width: 2px * 3; }"), minify=True)
self.assertEqual(output, "a{border-width:6px;}");
|
import unittest
from six import StringIO
from lesscpy import compile
class TestCompileFunction(unittest.TestCase):
"""
Unit tests for compile
"""
def test_compile(self):
"""
It can compile input from a file-like object
"""
output = compile(StringIO("a { border-width: 2px * 3; }"), minify=True)
self.assertEqual(output, "a{border-width:6px;}");
def test_raises_exception(self):
"""
Test if a syntax error raises an exception
"""
from lesscpy.exceptions import CompilationError
fail_func = lambda: compile(StringIO("a }"), minify=True)
self.assertRaises(CompilationError, fail_func)
|
Add test if compile() raises an CompilationError
|
Add test if compile() raises an CompilationError
|
Python
|
mit
|
lesscpy/lesscpy,fivethreeo/lesscpy,joequery/lesscpy
|
python
|
## Code Before:
import unittest
from six import StringIO
from lesscpy import compile
class TestCompileFunction(unittest.TestCase):
"""
Unit tests for compile
"""
def test_compile(self):
"""
It can compile input from a file-like object
"""
output = compile(StringIO("a { border-width: 2px * 3; }"), minify=True)
self.assertEqual(output, "a{border-width:6px;}");
## Instruction:
Add test if compile() raises an CompilationError
## Code After:
import unittest
from six import StringIO
from lesscpy import compile
class TestCompileFunction(unittest.TestCase):
"""
Unit tests for compile
"""
def test_compile(self):
"""
It can compile input from a file-like object
"""
output = compile(StringIO("a { border-width: 2px * 3; }"), minify=True)
self.assertEqual(output, "a{border-width:6px;}");
def test_raises_exception(self):
"""
Test if a syntax error raises an exception
"""
from lesscpy.exceptions import CompilationError
fail_func = lambda: compile(StringIO("a }"), minify=True)
self.assertRaises(CompilationError, fail_func)
|
// ... existing code ...
output = compile(StringIO("a { border-width: 2px * 3; }"), minify=True)
self.assertEqual(output, "a{border-width:6px;}");
def test_raises_exception(self):
"""
Test if a syntax error raises an exception
"""
from lesscpy.exceptions import CompilationError
fail_func = lambda: compile(StringIO("a }"), minify=True)
self.assertRaises(CompilationError, fail_func)
// ... rest of the code ...
|
91ad56ea892d2f2fdb2af97f81ec70a7b9f9305c
|
analysis/sanity-check-velocity.py
|
analysis/sanity-check-velocity.py
|
import climate
import joblib
import lmj.cubes
import numpy as np
def _check(t):
t.load()
t.add_velocities(smooth=0)
vel = abs(t.df[t.marker_velocity_columns].values).flatten()
vel = vel[np.isfinite(vel)]
pct = np.percentile(vel, [1, 2, 5, 10, 20, 50, 80, 90, 95, 98, 99])
print(t.subject.key, t.block.key, t.key, *pct)
def main(root):
trials = lmj.cubes.Experiment(root).trials_matching('*')
check = joblib.delayed(_check)
joblib.Parallel(-1)(check(t) for t in trials)
if __name__ == '__main__':
climate.call(main)
|
import climate
import joblib
import lmj.cubes
import numpy as np
def _check(t):
t.load()
t.add_velocities(smooth=0)
t.add_accelerations(smooth=0)
vel = abs(t.df[t.marker_velocity_columns].values).flatten()
vel = vel[np.isfinite(vel)]
pct = np.percentile(vel, [1, 2, 5, 10, 20, 50, 80, 90, 95, 98, 99])
np.set_printoptions(suppress=True, linewidth=1000, precision=2)
t.log('%s', pct)
def main(root):
trials = lmj.cubes.Experiment(root).trials_matching('*')
check = joblib.delayed(_check)
joblib.Parallel(-1)(check(t) for t in trials)
if __name__ == '__main__':
climate.call(main)
|
Use trial logging. Tweak numpy logging output.
|
Use trial logging. Tweak numpy logging output.
|
Python
|
mit
|
lmjohns3/cube-experiment,lmjohns3/cube-experiment,lmjohns3/cube-experiment
|
python
|
## Code Before:
import climate
import joblib
import lmj.cubes
import numpy as np
def _check(t):
t.load()
t.add_velocities(smooth=0)
vel = abs(t.df[t.marker_velocity_columns].values).flatten()
vel = vel[np.isfinite(vel)]
pct = np.percentile(vel, [1, 2, 5, 10, 20, 50, 80, 90, 95, 98, 99])
print(t.subject.key, t.block.key, t.key, *pct)
def main(root):
trials = lmj.cubes.Experiment(root).trials_matching('*')
check = joblib.delayed(_check)
joblib.Parallel(-1)(check(t) for t in trials)
if __name__ == '__main__':
climate.call(main)
## Instruction:
Use trial logging. Tweak numpy logging output.
## Code After:
import climate
import joblib
import lmj.cubes
import numpy as np
def _check(t):
t.load()
t.add_velocities(smooth=0)
t.add_accelerations(smooth=0)
vel = abs(t.df[t.marker_velocity_columns].values).flatten()
vel = vel[np.isfinite(vel)]
pct = np.percentile(vel, [1, 2, 5, 10, 20, 50, 80, 90, 95, 98, 99])
np.set_printoptions(suppress=True, linewidth=1000, precision=2)
t.log('%s', pct)
def main(root):
trials = lmj.cubes.Experiment(root).trials_matching('*')
check = joblib.delayed(_check)
joblib.Parallel(-1)(check(t) for t in trials)
if __name__ == '__main__':
climate.call(main)
|
...
def _check(t):
t.load()
t.add_velocities(smooth=0)
t.add_accelerations(smooth=0)
vel = abs(t.df[t.marker_velocity_columns].values).flatten()
vel = vel[np.isfinite(vel)]
pct = np.percentile(vel, [1, 2, 5, 10, 20, 50, 80, 90, 95, 98, 99])
np.set_printoptions(suppress=True, linewidth=1000, precision=2)
t.log('%s', pct)
def main(root):
...
|
188bc4fb408d60fa637e2b5eddd9ddcf201abf34
|
WEB-INF/src/com/krishagni/catissueplus/core/biospecimen/services/impl/StagedParticipantsCleanupTask.java
|
WEB-INF/src/com/krishagni/catissueplus/core/biospecimen/services/impl/StagedParticipantsCleanupTask.java
|
package com.krishagni.catissueplus.core.biospecimen.services.impl;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Configurable;
import com.krishagni.catissueplus.core.administrative.domain.ScheduledJobRun;
import com.krishagni.catissueplus.core.administrative.services.ScheduledTask;
import com.krishagni.catissueplus.core.biospecimen.ConfigParams;
import com.krishagni.catissueplus.core.biospecimen.repository.DaoFactory;
import com.krishagni.catissueplus.core.common.PlusTransactional;
import com.krishagni.catissueplus.core.common.util.ConfigUtil;
@Configurable
public class StagedParticipantsCleanupTask implements ScheduledTask {
private static final int DEF_CLEANUP_INT = 90;
@Autowired
private DaoFactory daoFactory;
@Override
@PlusTransactional
public void doJob(ScheduledJobRun jobRun) throws Exception {
int olderThanDays = ConfigUtil.getInstance().getIntSetting(
ConfigParams.MODULE,
ConfigParams.STAGED_PART_CLEANUP_INT,
DEF_CLEANUP_INT);
daoFactory.getStagedParticipantDao().cleanupOldParticipants(olderThanDays);
}
}
|
package com.krishagni.catissueplus.core.biospecimen.services.impl;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Configurable;
import com.krishagni.catissueplus.core.administrative.domain.ScheduledJobRun;
import com.krishagni.catissueplus.core.administrative.services.ScheduledTask;
import com.krishagni.catissueplus.core.biospecimen.ConfigParams;
import com.krishagni.catissueplus.core.biospecimen.repository.DaoFactory;
import com.krishagni.catissueplus.core.common.PlusTransactional;
import com.krishagni.catissueplus.core.common.util.ConfigUtil;
@Configurable
public class StagedParticipantsCleanupTask implements ScheduledTask {
private static final int DEF_CLEANUP_INT = 90;
@Autowired
private DaoFactory daoFactory;
@Override
@PlusTransactional
public void doJob(ScheduledJobRun jobRun) throws Exception {
int olderThanDays = ConfigUtil.getInstance().getIntSetting(
ConfigParams.MODULE, ConfigParams.STAGED_PART_CLEANUP_INT, DEF_CLEANUP_INT);
if (olderThanDays > 0) {
daoFactory.getStagedParticipantDao().cleanupOldParticipants(olderThanDays);
}
}
}
|
Disable cleaning of staged participants when retain interval specified is 0 days.
|
Disable cleaning of staged participants when retain interval specified is 0 days.
|
Java
|
bsd-3-clause
|
krishagni/openspecimen,krishagni/openspecimen,krishagni/openspecimen
|
java
|
## Code Before:
package com.krishagni.catissueplus.core.biospecimen.services.impl;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Configurable;
import com.krishagni.catissueplus.core.administrative.domain.ScheduledJobRun;
import com.krishagni.catissueplus.core.administrative.services.ScheduledTask;
import com.krishagni.catissueplus.core.biospecimen.ConfigParams;
import com.krishagni.catissueplus.core.biospecimen.repository.DaoFactory;
import com.krishagni.catissueplus.core.common.PlusTransactional;
import com.krishagni.catissueplus.core.common.util.ConfigUtil;
@Configurable
public class StagedParticipantsCleanupTask implements ScheduledTask {
private static final int DEF_CLEANUP_INT = 90;
@Autowired
private DaoFactory daoFactory;
@Override
@PlusTransactional
public void doJob(ScheduledJobRun jobRun) throws Exception {
int olderThanDays = ConfigUtil.getInstance().getIntSetting(
ConfigParams.MODULE,
ConfigParams.STAGED_PART_CLEANUP_INT,
DEF_CLEANUP_INT);
daoFactory.getStagedParticipantDao().cleanupOldParticipants(olderThanDays);
}
}
## Instruction:
Disable cleaning of staged participants when retain interval specified is 0 days.
## Code After:
package com.krishagni.catissueplus.core.biospecimen.services.impl;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Configurable;
import com.krishagni.catissueplus.core.administrative.domain.ScheduledJobRun;
import com.krishagni.catissueplus.core.administrative.services.ScheduledTask;
import com.krishagni.catissueplus.core.biospecimen.ConfigParams;
import com.krishagni.catissueplus.core.biospecimen.repository.DaoFactory;
import com.krishagni.catissueplus.core.common.PlusTransactional;
import com.krishagni.catissueplus.core.common.util.ConfigUtil;
@Configurable
public class StagedParticipantsCleanupTask implements ScheduledTask {
private static final int DEF_CLEANUP_INT = 90;
@Autowired
private DaoFactory daoFactory;
@Override
@PlusTransactional
public void doJob(ScheduledJobRun jobRun) throws Exception {
int olderThanDays = ConfigUtil.getInstance().getIntSetting(
ConfigParams.MODULE, ConfigParams.STAGED_PART_CLEANUP_INT, DEF_CLEANUP_INT);
if (olderThanDays > 0) {
daoFactory.getStagedParticipantDao().cleanupOldParticipants(olderThanDays);
}
}
}
|
# ... existing code ...
@PlusTransactional
public void doJob(ScheduledJobRun jobRun) throws Exception {
int olderThanDays = ConfigUtil.getInstance().getIntSetting(
ConfigParams.MODULE, ConfigParams.STAGED_PART_CLEANUP_INT, DEF_CLEANUP_INT);
if (olderThanDays > 0) {
daoFactory.getStagedParticipantDao().cleanupOldParticipants(olderThanDays);
}
}
}
# ... rest of the code ...
|
98cbd5207bd25fb0fafd25f18870c771479255e1
|
run-tests.py
|
run-tests.py
|
import os
import subprocess
import sys
args = [
sys.executable or 'python', # Python interpreter to call for testing.
'-B', # Don't write .pyc files on import.
'-m', 'unittest', # Run the unittest module as a script.
'discover', # Use test discovery.
'-s', 'tests', # Start discovery in 'tests' directory.
'-t', os.path.dirname(__file__), # Set top-level of project.
]
args.extend(sys.argv[1:]) # Append any arguments passed to script (-v, etc.).
sys.exit(subprocess.call(args))
|
import os
import subprocess
import sys
args = [
sys.executable or 'python', # Python interpreter to call for testing.
'-B', # Don't write .pyc files on import.
'-W', 'default', # Enable default handling for all warnings.
'-m', 'unittest', # Run the unittest module as a script.
'discover', # Use test discovery.
'-s', 'tests', # Start discovery in 'tests' directory.
'-t', os.path.dirname(__file__), # Set top-level of project.
]
args.extend(sys.argv[1:]) # Append any arguments passed to script (-v, etc.).
sys.exit(subprocess.call(args))
|
Enable default warnings while testing.
|
Enable default warnings while testing.
|
Python
|
mit
|
shawnbrown/gpn,shawnbrown/gpn
|
python
|
## Code Before:
import os
import subprocess
import sys
args = [
sys.executable or 'python', # Python interpreter to call for testing.
'-B', # Don't write .pyc files on import.
'-m', 'unittest', # Run the unittest module as a script.
'discover', # Use test discovery.
'-s', 'tests', # Start discovery in 'tests' directory.
'-t', os.path.dirname(__file__), # Set top-level of project.
]
args.extend(sys.argv[1:]) # Append any arguments passed to script (-v, etc.).
sys.exit(subprocess.call(args))
## Instruction:
Enable default warnings while testing.
## Code After:
import os
import subprocess
import sys
args = [
sys.executable or 'python', # Python interpreter to call for testing.
'-B', # Don't write .pyc files on import.
'-W', 'default', # Enable default handling for all warnings.
'-m', 'unittest', # Run the unittest module as a script.
'discover', # Use test discovery.
'-s', 'tests', # Start discovery in 'tests' directory.
'-t', os.path.dirname(__file__), # Set top-level of project.
]
args.extend(sys.argv[1:]) # Append any arguments passed to script (-v, etc.).
sys.exit(subprocess.call(args))
|
// ... existing code ...
args = [
sys.executable or 'python', # Python interpreter to call for testing.
'-B', # Don't write .pyc files on import.
'-W', 'default', # Enable default handling for all warnings.
'-m', 'unittest', # Run the unittest module as a script.
'discover', # Use test discovery.
'-s', 'tests', # Start discovery in 'tests' directory.
// ... rest of the code ...
|
687f48ca94b67321a1576a1dbb1d7ae89fe6f0b7
|
tests/test_pubannotation.py
|
tests/test_pubannotation.py
|
import kindred
def test_pubannotation_groST():
corpus = kindred.pubannotation.load('bionlp-st-gro-2013-development')
assert isinstance(corpus,kindred.Corpus)
fileCount = len(corpus.documents)
entityCount = sum([ len(d.entities) for d in corpus.documents ])
relationCount = sum([ len(d.relations) for d in corpus.documents ])
assert fileCount == 50
assert relationCount == 1454
assert entityCount == 2657
def test_pubannotation_wikiPain():
corpus = kindred.pubannotation.load('WikiPainGoldStandard')
assert isinstance(corpus,kindred.Corpus)
fileCount = len(corpus.documents)
entityCount = sum([ len(d.entities) for d in corpus.documents ])
relationCount = sum([ len(d.relations) for d in corpus.documents ])
assert fileCount == 49
assert relationCount == 715
assert entityCount == 878
if __name__ == '__main__':
test_pubannotation()
|
import kindred
def test_pubannotation():
corpus = kindred.pubannotation.load('bionlp-st-gro-2013-development')
assert isinstance(corpus,kindred.Corpus)
fileCount = len(corpus.documents)
entityCount = sum([ len(d.entities) for d in corpus.documents ])
relationCount = sum([ len(d.relations) for d in corpus.documents ])
assert fileCount == 50
assert relationCount == 1454
assert entityCount == 2657
if __name__ == '__main__':
test_pubannotation()
|
Remove one of the pubannotation tests as their data seems to change
|
Remove one of the pubannotation tests as their data seems to change
|
Python
|
mit
|
jakelever/kindred,jakelever/kindred
|
python
|
## Code Before:
import kindred
def test_pubannotation_groST():
corpus = kindred.pubannotation.load('bionlp-st-gro-2013-development')
assert isinstance(corpus,kindred.Corpus)
fileCount = len(corpus.documents)
entityCount = sum([ len(d.entities) for d in corpus.documents ])
relationCount = sum([ len(d.relations) for d in corpus.documents ])
assert fileCount == 50
assert relationCount == 1454
assert entityCount == 2657
def test_pubannotation_wikiPain():
corpus = kindred.pubannotation.load('WikiPainGoldStandard')
assert isinstance(corpus,kindred.Corpus)
fileCount = len(corpus.documents)
entityCount = sum([ len(d.entities) for d in corpus.documents ])
relationCount = sum([ len(d.relations) for d in corpus.documents ])
assert fileCount == 49
assert relationCount == 715
assert entityCount == 878
if __name__ == '__main__':
test_pubannotation()
## Instruction:
Remove one of the pubannotation tests as their data seems to change
## Code After:
import kindred
def test_pubannotation():
corpus = kindred.pubannotation.load('bionlp-st-gro-2013-development')
assert isinstance(corpus,kindred.Corpus)
fileCount = len(corpus.documents)
entityCount = sum([ len(d.entities) for d in corpus.documents ])
relationCount = sum([ len(d.relations) for d in corpus.documents ])
assert fileCount == 50
assert relationCount == 1454
assert entityCount == 2657
if __name__ == '__main__':
test_pubannotation()
|
# ... existing code ...
import kindred
def test_pubannotation():
corpus = kindred.pubannotation.load('bionlp-st-gro-2013-development')
assert isinstance(corpus,kindred.Corpus)
# ... modified code ...
assert relationCount == 1454
assert entityCount == 2657
if __name__ == '__main__':
test_pubannotation()
# ... rest of the code ...
|
43e118ccc68bcbfd91a56a6572e8543d2172a79c
|
bot/logger/message_sender/reusable/__init__.py
|
bot/logger/message_sender/reusable/__init__.py
|
from bot.api.api import Api
from bot.logger.message_sender import MessageSender
class ReusableMessageSender(MessageSender):
def __init__(self, api: Api, separator):
self.api = api
self.separator = separator
def send(self, text):
if self._is_new():
self._send_new(text)
else:
self._send_edit(text)
def _is_new(self):
raise NotImplementedError()
def _send_new(self, text):
raise NotImplementedError()
def _send_edit(self, text):
raise NotImplementedError()
def new(self):
raise NotImplementedError()
|
from bot.api.domain import Message
from bot.logger.message_sender import MessageSender
from bot.logger.message_sender.api import ApiMessageSender
from bot.logger.message_sender.message_builder import MessageBuilder
class ReusableMessageSender(MessageSender):
def __init__(self, sender: ApiMessageSender, builder: MessageBuilder, max_length: int = 4000):
self.sender = sender
self.builder = builder
self.max_length = max_length
self.message_id = None
def send(self, text):
message = self._get_message_for(text)
self._get_send_func()(message)
def _get_message_for(self, text):
self.builder.add(text)
self.__check_length(text)
return self.builder.get_message()
def __check_length(self, text):
if self.builder.get_length() > self.max_length:
self.new()
# if length is still greater than max_length, let it fail, otherwise we would enter on infinite loop
self.builder.add(text)
def _get_send_func(self):
return self.__send_standalone_message if self.message_id is None else self.__send_edited_message
def __send_standalone_message(self, message: Message):
try:
self.message_id = self.sender.send(message)
finally:
if self.message_id is None:
# Discard current message, as there has been a problem with the message_id retrieval and we
# don't know if it was properly sent or not, so we threat it as corrupt and start a new one.
# That way, the next send:
# - Will not fail if the problem was with this message content
# - Won't have repeated content if this message was really sent but the request was interrupted
# by some event (like a KeyboardInterrupt)
self.new()
def __send_edited_message(self, message: Message):
self.sender.edit(message, self.message_id)
def new(self):
self.builder.clear()
self.message_id = None
|
Refactor ReusableMessageSender to be resilient against errors on first message api call, whose result is needed to get the message_id to edit further.
|
Refactor ReusableMessageSender to be resilient against errors on first message api call, whose result is needed to get the message_id to edit further.
Also, an upper limit has been added to avoid errors because of too long messages.
|
Python
|
agpl-3.0
|
alvarogzp/telegram-bot,alvarogzp/telegram-bot
|
python
|
## Code Before:
from bot.api.api import Api
from bot.logger.message_sender import MessageSender
class ReusableMessageSender(MessageSender):
def __init__(self, api: Api, separator):
self.api = api
self.separator = separator
def send(self, text):
if self._is_new():
self._send_new(text)
else:
self._send_edit(text)
def _is_new(self):
raise NotImplementedError()
def _send_new(self, text):
raise NotImplementedError()
def _send_edit(self, text):
raise NotImplementedError()
def new(self):
raise NotImplementedError()
## Instruction:
Refactor ReusableMessageSender to be resilient against errors on first message api call, whose result is needed to get the message_id to edit further.
Also, an upper limit has been added to avoid errors because of too long messages.
## Code After:
from bot.api.domain import Message
from bot.logger.message_sender import MessageSender
from bot.logger.message_sender.api import ApiMessageSender
from bot.logger.message_sender.message_builder import MessageBuilder
class ReusableMessageSender(MessageSender):
def __init__(self, sender: ApiMessageSender, builder: MessageBuilder, max_length: int = 4000):
self.sender = sender
self.builder = builder
self.max_length = max_length
self.message_id = None
def send(self, text):
message = self._get_message_for(text)
self._get_send_func()(message)
def _get_message_for(self, text):
self.builder.add(text)
self.__check_length(text)
return self.builder.get_message()
def __check_length(self, text):
if self.builder.get_length() > self.max_length:
self.new()
# if length is still greater than max_length, let it fail, otherwise we would enter on infinite loop
self.builder.add(text)
def _get_send_func(self):
return self.__send_standalone_message if self.message_id is None else self.__send_edited_message
def __send_standalone_message(self, message: Message):
try:
self.message_id = self.sender.send(message)
finally:
if self.message_id is None:
# Discard current message, as there has been a problem with the message_id retrieval and we
# don't know if it was properly sent or not, so we threat it as corrupt and start a new one.
# That way, the next send:
# - Will not fail if the problem was with this message content
# - Won't have repeated content if this message was really sent but the request was interrupted
# by some event (like a KeyboardInterrupt)
self.new()
def __send_edited_message(self, message: Message):
self.sender.edit(message, self.message_id)
def new(self):
self.builder.clear()
self.message_id = None
|
# ... existing code ...
from bot.api.domain import Message
from bot.logger.message_sender import MessageSender
from bot.logger.message_sender.api import ApiMessageSender
from bot.logger.message_sender.message_builder import MessageBuilder
class ReusableMessageSender(MessageSender):
def __init__(self, sender: ApiMessageSender, builder: MessageBuilder, max_length: int = 4000):
self.sender = sender
self.builder = builder
self.max_length = max_length
self.message_id = None
def send(self, text):
message = self._get_message_for(text)
self._get_send_func()(message)
def _get_message_for(self, text):
self.builder.add(text)
self.__check_length(text)
return self.builder.get_message()
def __check_length(self, text):
if self.builder.get_length() > self.max_length:
self.new()
# if length is still greater than max_length, let it fail, otherwise we would enter on infinite loop
self.builder.add(text)
def _get_send_func(self):
return self.__send_standalone_message if self.message_id is None else self.__send_edited_message
def __send_standalone_message(self, message: Message):
try:
self.message_id = self.sender.send(message)
finally:
if self.message_id is None:
# Discard current message, as there has been a problem with the message_id retrieval and we
# don't know if it was properly sent or not, so we threat it as corrupt and start a new one.
# That way, the next send:
# - Will not fail if the problem was with this message content
# - Won't have repeated content if this message was really sent but the request was interrupted
# by some event (like a KeyboardInterrupt)
self.new()
def __send_edited_message(self, message: Message):
self.sender.edit(message, self.message_id)
def new(self):
self.builder.clear()
self.message_id = None
# ... rest of the code ...
|
83928f77fb82da01b9521646ffc6b965f70e1a95
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(
name='chainer',
version='1.0.0',
description='A flexible framework of neural networks',
author='Seiya Tokui',
author_email='[email protected]',
url='http://chainer.org/',
packages=['chainer',
'chainer.cudnn',
'chainer.functions',
'chainer.optimizers',
'chainer.utils'],
install_requires=['numpy'],
scripts=['scripts/chainer-cuda-requirements'],
)
|
from distutils.core import setup
setup(
name='chainer',
version='1.0.0',
description='A flexible framework of neural networks',
author='Seiya Tokui',
author_email='[email protected]',
url='http://chainer.org/',
packages=['chainer',
'chainer.cudnn',
'chainer.functions',
'chainer.optimizers',
'chainer.requirements',
'chainer.utils'],
install_requires=['numpy'],
scripts=['scripts/chainer-cuda-requirements'],
)
|
Add cuda.requirements to packages to install
|
Add cuda.requirements to packages to install
|
Python
|
mit
|
niboshi/chainer,benob/chainer,jnishi/chainer,jnishi/chainer,muupan/chainer,woodshop/complex-chainer,chainer/chainer,keisuke-umezawa/chainer,wkentaro/chainer,tscohen/chainer,kikusu/chainer,ikasumi/chainer,jnishi/chainer,ktnyt/chainer,cemoody/chainer,ktnyt/chainer,ysekky/chainer,tkerola/chainer,wkentaro/chainer,nushio3/chainer,pfnet/chainer,kuwa32/chainer,elviswf/chainer,masia02/chainer,bayerj/chainer,okuta/chainer,hvy/chainer,benob/chainer,delta2323/chainer,hvy/chainer,truongdq/chainer,minhpqn/chainer,kashif/chainer,chainer/chainer,sinhrks/chainer,woodshop/chainer,t-abe/chainer,chainer/chainer,umitanuki/chainer,AlpacaDB/chainer,rezoo/chainer,okuta/chainer,anaruse/chainer,kiyukuta/chainer,sou81821/chainer,okuta/chainer,hvy/chainer,AlpacaDB/chainer,chainer/chainer,1986ks/chainer,jnishi/chainer,hidenori-t/chainer,kikusu/chainer,ktnyt/chainer,niboshi/chainer,keisuke-umezawa/chainer,cupy/cupy,nushio3/chainer,keisuke-umezawa/chainer,niboshi/chainer,aonotas/chainer,ttakamura/chainer,ytoyama/yans_chainer_hackathon,muupan/chainer,tereka114/chainer,laysakura/chainer,ttakamura/chainer,jfsantos/chainer,ktnyt/chainer,hvy/chainer,truongdq/chainer,sinhrks/chainer,ronekko/chainer,okuta/chainer,keisuke-umezawa/chainer,Kaisuke5/chainer,tigerneil/chainer,niboshi/chainer,cupy/cupy,cupy/cupy,wkentaro/chainer,yanweifu/chainer,t-abe/chainer,wavelets/chainer,wkentaro/chainer,cupy/cupy
|
python
|
## Code Before:
from distutils.core import setup
setup(
name='chainer',
version='1.0.0',
description='A flexible framework of neural networks',
author='Seiya Tokui',
author_email='[email protected]',
url='http://chainer.org/',
packages=['chainer',
'chainer.cudnn',
'chainer.functions',
'chainer.optimizers',
'chainer.utils'],
install_requires=['numpy'],
scripts=['scripts/chainer-cuda-requirements'],
)
## Instruction:
Add cuda.requirements to packages to install
## Code After:
from distutils.core import setup
setup(
name='chainer',
version='1.0.0',
description='A flexible framework of neural networks',
author='Seiya Tokui',
author_email='[email protected]',
url='http://chainer.org/',
packages=['chainer',
'chainer.cudnn',
'chainer.functions',
'chainer.optimizers',
'chainer.requirements',
'chainer.utils'],
install_requires=['numpy'],
scripts=['scripts/chainer-cuda-requirements'],
)
|
# ... existing code ...
'chainer.cudnn',
'chainer.functions',
'chainer.optimizers',
'chainer.requirements',
'chainer.utils'],
install_requires=['numpy'],
scripts=['scripts/chainer-cuda-requirements'],
# ... rest of the code ...
|
fb665cf8d6c0eb6c794a41eaf312c35473d1bdf0
|
tests/settings_complex.py
|
tests/settings_complex.py
|
from settings import *
INSTALLED_APPS.append('complex')
INSTALLED_APPS.append('django.contrib.comments')
ROOT_URLCONF = 'complex.urls'
|
from settings import *
INSTALLED_APPS += [
'complex',
'django.contrib.comments',
'django.contrib.sites',
]
ROOT_URLCONF = 'complex.urls'
|
Add sites app, change how installed_apps are edited.
|
Add sites app, change how installed_apps are edited.
|
Python
|
bsd-3-clause
|
esatterwhite/django-tastypie,beni55/django-tastypie,Eksmo/django-tastypie,SeanHayes/django-tastypie,cbxcube/bezrealitky.py,ywarezk/nerdeez-tastypie,mohabusama/django-tastypie,ocadotechnology/django-tastypie,ocadotechnology/django-tastypie,waveaccounting/django-tastypie,SeanHayes/django-tastypie,Eksmo/django-tastypie,beedesk/django-tastypie,yfli/django-tastypie,shownomercy/django-tastypie,coxmediagroup/django-tastypie,Eksmo/django-tastypie,guilhermegm/django-tastypie,backslash112/django-tastypie,grischa/django-tastypie,mthornhill/django-tastypie,glencoates/django-tastypie,loftywaif002/django-tastypie,marcosleonefilho/hoop-tastypie,backslash112/django-tastypie,beni55/django-tastypie,SiggyF/django-tastypie,VishvajitP/django-tastypie,VishvajitP/django-tastypie,ipsosante/django-tastypie,waveaccounting/django-tastypie,doselect/django-tastypie,ipsosante/django-tastypie,beedesk/django-tastypie,yfli/django-tastypie,mjschultz/django-tastefulpy,SiggyF/django-tastypie,doselect/django-tastypie,beni55/django-tastypie,frifri/django-tastypie,mitar/django-tastypie,nomadjourney/django-tastypie,ipsosante/django-tastypie,wlanslovenija/django-tastypie,igavrilov/django-tastypie,strets123/django-tastypie,mohabusama/django-tastypie,Perkville/django-tastypie,annacorobco/django-tastypie,ywarezk/nerdeez-tastypie,Perkville/django-tastypie,strets123/django-tastypie-tweaks,igavrilov/django-tastypie,cbxcube/bezrealitky.py,ocadotechnology/django-tastypie,nomadjourney/django-tastypie,annacorobco/django-tastypie,frifri/django-tastypie,strets123/django-tastypie,guilhermegm/django-tastypie,mthornhill/django-tastypie,tyaslab/django-tastypie,strets123/django-tastypie,loftywaif002/django-tastypie,pveglia/django-tastypie,rbraley/django-tastypie,beedesk/django-tastypie,doselect/django-tastypie,backslash112/django-tastypie,esatterwhite/django-tastypie,shownomercy/django-tastypie,pveglia/django-tastypie,strets123/django-tastypie-tweaks,guilhermegm/django-tastypie,wlanslovenija/django-tastypie,mthornhill/django-tastypie,nomadjourney/django-tastypie,marcosleonefilho/hoop-tastypie,esatterwhite/django-tastypie,akvo/django-tastypie,igavrilov/django-tastypie,wlanslovenija/django-tastypie,mjschultz/django-tastefulpy,cbxcube/bezrealitky.py,loftywaif002/django-tastypie,SiggyF/django-tastypie,sideffect0/django-tastypie,akvo/django-tastypie,coxmediagroup/django-tastypie,Perkville/django-tastypie,pveglia/django-tastypie,VishvajitP/django-tastypie,strets123/django-tastypie-tweaks,mohabusama/django-tastypie,coxmediagroup/django-tastypie,glencoates/django-tastypie,grischa/django-tastypie,sideffect0/django-tastypie,mjschultz/django-tastefulpy,mitar/django-tastypie,sideffect0/django-tastypie,rbraley/django-tastypie,waveaccounting/django-tastypie,SeanHayes/django-tastypie,tyaslab/django-tastypie,shownomercy/django-tastypie,yfli/django-tastypie,annacorobco/django-tastypie
|
python
|
## Code Before:
from settings import *
INSTALLED_APPS.append('complex')
INSTALLED_APPS.append('django.contrib.comments')
ROOT_URLCONF = 'complex.urls'
## Instruction:
Add sites app, change how installed_apps are edited.
## Code After:
from settings import *
INSTALLED_APPS += [
'complex',
'django.contrib.comments',
'django.contrib.sites',
]
ROOT_URLCONF = 'complex.urls'
|
...
from settings import *
INSTALLED_APPS += [
'complex',
'django.contrib.comments',
'django.contrib.sites',
]
ROOT_URLCONF = 'complex.urls'
...
|
339c3f76cd8cab33734b0f5aad9713cf0140ce57
|
core/src/test/java/com/github/jsonldjava/core/JsonLdFramingTest.java
|
core/src/test/java/com/github/jsonldjava/core/JsonLdFramingTest.java
|
package com.github.jsonldjava.core;
import com.github.jsonldjava.utils.JsonUtils;
import java.io.IOException;
import org.junit.Test;
import static org.junit.Assert.*;
public class JsonLdFramingTest {
@Test
public void testFrame0001() throws IOException, JsonLdError {
try {
Object frame = JsonUtils.fromInputStream(
getClass().getResourceAsStream("/custom/frame-0001-frame.jsonld"));
Object in = JsonUtils.fromInputStream(
getClass().getResourceAsStream("/custom/frame-0001-in.jsonld"));
JsonLdProcessor.frame(in, frame, new JsonLdOptions());
} catch (Throwable t) {
t.printStackTrace();
fail();
}
}
}
|
package com.github.jsonldjava.core;
import com.github.jsonldjava.utils.JsonUtils;
import java.io.IOException;
import java.util.Map;
import org.junit.Test;
import static org.junit.Assert.*;
public class JsonLdFramingTest {
@Test
public void testFrame0001() throws IOException, JsonLdError {
Object frame = JsonUtils.fromInputStream(
getClass().getResourceAsStream("/custom/frame-0001-frame.jsonld"));
Object in = JsonUtils.fromInputStream(
getClass().getResourceAsStream("/custom/frame-0001-in.jsonld"));
Map<String, Object> frame2 = JsonLdProcessor.frame(in, frame, new JsonLdOptions());
assertEquals(2, frame2.size());
}
}
|
Add positive test for completion in the framing test
|
Add positive test for completion in the framing test
|
Java
|
bsd-3-clause
|
jsonld-java/jsonld-java
|
java
|
## Code Before:
package com.github.jsonldjava.core;
import com.github.jsonldjava.utils.JsonUtils;
import java.io.IOException;
import org.junit.Test;
import static org.junit.Assert.*;
public class JsonLdFramingTest {
@Test
public void testFrame0001() throws IOException, JsonLdError {
try {
Object frame = JsonUtils.fromInputStream(
getClass().getResourceAsStream("/custom/frame-0001-frame.jsonld"));
Object in = JsonUtils.fromInputStream(
getClass().getResourceAsStream("/custom/frame-0001-in.jsonld"));
JsonLdProcessor.frame(in, frame, new JsonLdOptions());
} catch (Throwable t) {
t.printStackTrace();
fail();
}
}
}
## Instruction:
Add positive test for completion in the framing test
## Code After:
package com.github.jsonldjava.core;
import com.github.jsonldjava.utils.JsonUtils;
import java.io.IOException;
import java.util.Map;
import org.junit.Test;
import static org.junit.Assert.*;
public class JsonLdFramingTest {
@Test
public void testFrame0001() throws IOException, JsonLdError {
Object frame = JsonUtils.fromInputStream(
getClass().getResourceAsStream("/custom/frame-0001-frame.jsonld"));
Object in = JsonUtils.fromInputStream(
getClass().getResourceAsStream("/custom/frame-0001-in.jsonld"));
Map<String, Object> frame2 = JsonLdProcessor.frame(in, frame, new JsonLdOptions());
assertEquals(2, frame2.size());
}
}
|
...
import com.github.jsonldjava.utils.JsonUtils;
import java.io.IOException;
import java.util.Map;
import org.junit.Test;
import static org.junit.Assert.*;
...
@Test
public void testFrame0001() throws IOException, JsonLdError {
Object frame = JsonUtils.fromInputStream(
getClass().getResourceAsStream("/custom/frame-0001-frame.jsonld"));
Object in = JsonUtils.fromInputStream(
getClass().getResourceAsStream("/custom/frame-0001-in.jsonld"));
Map<String, Object> frame2 = JsonLdProcessor.frame(in, frame, new JsonLdOptions());
assertEquals(2, frame2.size());
}
}
...
|
f83282b1747e255d35e18e9fecad1750d1564f9e
|
do_record/record.py
|
do_record/record.py
|
"""DigitalOcean DNS Records."""
from certbot_dns_auth.printer import printer
from do_record import http
class Record(object):
"""Handle DigitalOcean DNS records."""
def __init__(self, api_key, domain, hostname):
self._number = None
self.domain = domain
self.hostname = hostname
self.api_key = api_key
def create(self, value):
"""Create this record on DigitalOcean with the supplied value."""
self._number = http.create(self, value)
return self.number
def delete(self, record_id=None):
"""Delete this record on DigitalOcean, identified by record_id."""
if record_id is None:
record_id = self.number
http.delete(self, record_id)
def printer(self):
printer(self.number)
@property
def number(self):
return self._number
@number.setter
def number(self, value):
if self.number is None:
self._number = value
else:
raise ValueError(
'Cannot externally reset a record\'s number identifier.')
|
"""DigitalOcean DNS Records."""
from certbot_dns_auth.printer import printer
from do_record import http
class Record(object):
"""Handle DigitalOcean DNS records."""
def __init__(self, api_key, domain, hostname):
self._number = None
self.domain = domain
self.hostname = hostname
self.api_key = api_key
def create(self, value):
"""Create this record on DigitalOcean with the supplied value."""
self._number = http.create(self, value)
return self.number
def delete(self, record_id=None):
"""Delete this record on DigitalOcean, identified by record_id."""
if record_id is None:
record_id = self.number
http.delete(self, record_id)
def printer(self):
printer(self.number)
@property
def number(self):
return self._number
@number.setter
def number(self, value):
self._number = value
|
Remove Code That Doesn't Have a Test
|
Remove Code That Doesn't Have a Test
|
Python
|
apache-2.0
|
Jitsusama/lets-do-dns
|
python
|
## Code Before:
"""DigitalOcean DNS Records."""
from certbot_dns_auth.printer import printer
from do_record import http
class Record(object):
"""Handle DigitalOcean DNS records."""
def __init__(self, api_key, domain, hostname):
self._number = None
self.domain = domain
self.hostname = hostname
self.api_key = api_key
def create(self, value):
"""Create this record on DigitalOcean with the supplied value."""
self._number = http.create(self, value)
return self.number
def delete(self, record_id=None):
"""Delete this record on DigitalOcean, identified by record_id."""
if record_id is None:
record_id = self.number
http.delete(self, record_id)
def printer(self):
printer(self.number)
@property
def number(self):
return self._number
@number.setter
def number(self, value):
if self.number is None:
self._number = value
else:
raise ValueError(
'Cannot externally reset a record\'s number identifier.')
## Instruction:
Remove Code That Doesn't Have a Test
## Code After:
"""DigitalOcean DNS Records."""
from certbot_dns_auth.printer import printer
from do_record import http
class Record(object):
"""Handle DigitalOcean DNS records."""
def __init__(self, api_key, domain, hostname):
self._number = None
self.domain = domain
self.hostname = hostname
self.api_key = api_key
def create(self, value):
"""Create this record on DigitalOcean with the supplied value."""
self._number = http.create(self, value)
return self.number
def delete(self, record_id=None):
"""Delete this record on DigitalOcean, identified by record_id."""
if record_id is None:
record_id = self.number
http.delete(self, record_id)
def printer(self):
printer(self.number)
@property
def number(self):
return self._number
@number.setter
def number(self, value):
self._number = value
|
// ... existing code ...
@number.setter
def number(self, value):
self._number = value
// ... rest of the code ...
|
fe7ab3060c43d509f995cc64998139a623b21a4a
|
bot/cogs/owner.py
|
bot/cogs/owner.py
|
import discord
from discord.ext import commands
class Owner:
"""Admin-only commands that make the bot dynamic."""
def __init__(self, bot):
self.bot = bot
@commands.command()
@commands.is_owner()
async def close(self, ctx: commands.Context):
"""Closes the bot safely. Can only be used by the owner."""
await self.bot.logout()
@commands.command()
@commands.is_owner()
async def status(self, ctx: commands.Context, *, status: str):
"""Changes the bot's status. Can only be used by the owner."""
await self.bot.change_presence(activity=discord.Game(name=status))
@commands.command(name="reload")
@commands.is_owner()
async def _reload(self, ctx, *, ext: str = None):
"""Reloads a module. Can only be used by the owner."""
if ext:
self.bot.unload_extension(ext)
self.bot.load_extension(ext)
else:
for m in self.bot.initial_extensions:
self.bot.unload_extension(m)
self.bot.load_extension(m)
def setup(bot):
bot.add_cog(Owner(bot))
|
import discord
from discord.ext import commands
class Owner:
"""Admin-only commands that make the bot dynamic."""
def __init__(self, bot):
self.bot = bot
@commands.command()
@commands.is_owner()
async def close(self, ctx: commands.Context):
"""Closes the bot safely. Can only be used by the owner."""
await self.bot.logout()
@commands.command()
@commands.is_owner()
async def status(self, ctx: commands.Context, *, status: str):
"""Changes the bot's status. Can only be used by the owner."""
await self.bot.change_presence(activity=discord.Game(name=status))
@commands.command(name="reload")
@commands.is_owner()
async def _reload(self, ctx, *, ext: str = None):
"""Reloads a module. Can only be used by the owner."""
if ext:
self.bot.unload_extension(ext)
self.bot.load_extension(ext)
else:
for m in self.bot.initial_extensions:
self.bot.unload_extension(m)
self.bot.load_extension(m)
await ctx.message.add_reaction(self.bot.emoji_rustok)
def setup(bot):
bot.add_cog(Owner(bot))
|
Add OK reaction to reload command
|
Add OK reaction to reload command
|
Python
|
mit
|
ivandardi/RustbotPython,ivandardi/RustbotPython
|
python
|
## Code Before:
import discord
from discord.ext import commands
class Owner:
"""Admin-only commands that make the bot dynamic."""
def __init__(self, bot):
self.bot = bot
@commands.command()
@commands.is_owner()
async def close(self, ctx: commands.Context):
"""Closes the bot safely. Can only be used by the owner."""
await self.bot.logout()
@commands.command()
@commands.is_owner()
async def status(self, ctx: commands.Context, *, status: str):
"""Changes the bot's status. Can only be used by the owner."""
await self.bot.change_presence(activity=discord.Game(name=status))
@commands.command(name="reload")
@commands.is_owner()
async def _reload(self, ctx, *, ext: str = None):
"""Reloads a module. Can only be used by the owner."""
if ext:
self.bot.unload_extension(ext)
self.bot.load_extension(ext)
else:
for m in self.bot.initial_extensions:
self.bot.unload_extension(m)
self.bot.load_extension(m)
def setup(bot):
bot.add_cog(Owner(bot))
## Instruction:
Add OK reaction to reload command
## Code After:
import discord
from discord.ext import commands
class Owner:
"""Admin-only commands that make the bot dynamic."""
def __init__(self, bot):
self.bot = bot
@commands.command()
@commands.is_owner()
async def close(self, ctx: commands.Context):
"""Closes the bot safely. Can only be used by the owner."""
await self.bot.logout()
@commands.command()
@commands.is_owner()
async def status(self, ctx: commands.Context, *, status: str):
"""Changes the bot's status. Can only be used by the owner."""
await self.bot.change_presence(activity=discord.Game(name=status))
@commands.command(name="reload")
@commands.is_owner()
async def _reload(self, ctx, *, ext: str = None):
"""Reloads a module. Can only be used by the owner."""
if ext:
self.bot.unload_extension(ext)
self.bot.load_extension(ext)
else:
for m in self.bot.initial_extensions:
self.bot.unload_extension(m)
self.bot.load_extension(m)
await ctx.message.add_reaction(self.bot.emoji_rustok)
def setup(bot):
bot.add_cog(Owner(bot))
|
# ... existing code ...
self.bot.unload_extension(m)
self.bot.load_extension(m)
await ctx.message.add_reaction(self.bot.emoji_rustok)
def setup(bot):
bot.add_cog(Owner(bot))
# ... rest of the code ...
|
1ce0d9898fc31f08bbf5765b3a687eaa8067a465
|
flaskext/flask_scss.py
|
flaskext/flask_scss.py
|
from .scss import Scss
|
from .scss import Scss
from warnings import warn
warn(DeprecationWarning('Deprecated import method. '
'Please use:\n '
'from flask.ext.scss import Scss'), stacklevel=2)
|
Raise a DeprecationWarning when using pre-Flask-0.8 import scheme
|
Raise a DeprecationWarning when using pre-Flask-0.8 import scheme
|
Python
|
mit
|
bcarlin/flask-scss
|
python
|
## Code Before:
from .scss import Scss
## Instruction:
Raise a DeprecationWarning when using pre-Flask-0.8 import scheme
## Code After:
from .scss import Scss
from warnings import warn
warn(DeprecationWarning('Deprecated import method. '
'Please use:\n '
'from flask.ext.scss import Scss'), stacklevel=2)
|
// ... existing code ...
from .scss import Scss
from warnings import warn
warn(DeprecationWarning('Deprecated import method. '
'Please use:\n '
'from flask.ext.scss import Scss'), stacklevel=2)
// ... rest of the code ...
|
32126085f361489bb5c9c18972479b0c313c7d10
|
bash_runner/tasks.py
|
bash_runner/tasks.py
|
from celery import task
from cosmo.events import send_event as send_riemann_event
from cloudify.utils import get_local_ip
get_ip = get_local_ip
send_event = send_riemann_event
@task
def start(__cloudify_id, port=8080, **kwargs):
print 'HELLO BASH! %s' % port
send_event(__cloudify_id, get_ip(), "hello_bash status", "state", "running")
|
from celery import task
from cosmo.events import send_event as send_riemann_event
from cloudify.utils import get_local_ip
get_ip = get_local_ip
send_event = send_riemann_event
@task
def start(__cloudify_id, port=8080, **kwargs):
with open('/tmp/HELLO', 'w') as f:
print >> f, 'HELLO BASH! %s' % port
send_event(__cloudify_id, get_ip(), "hello_bash status", "state", "running")
|
Send the output to a tmp file
|
Send the output to a tmp file
|
Python
|
apache-2.0
|
rantav/cosmo-plugin-bash-runner
|
python
|
## Code Before:
from celery import task
from cosmo.events import send_event as send_riemann_event
from cloudify.utils import get_local_ip
get_ip = get_local_ip
send_event = send_riemann_event
@task
def start(__cloudify_id, port=8080, **kwargs):
print 'HELLO BASH! %s' % port
send_event(__cloudify_id, get_ip(), "hello_bash status", "state", "running")
## Instruction:
Send the output to a tmp file
## Code After:
from celery import task
from cosmo.events import send_event as send_riemann_event
from cloudify.utils import get_local_ip
get_ip = get_local_ip
send_event = send_riemann_event
@task
def start(__cloudify_id, port=8080, **kwargs):
with open('/tmp/HELLO', 'w') as f:
print >> f, 'HELLO BASH! %s' % port
send_event(__cloudify_id, get_ip(), "hello_bash status", "state", "running")
|
# ... existing code ...
@task
def start(__cloudify_id, port=8080, **kwargs):
with open('/tmp/HELLO', 'w') as f:
print >> f, 'HELLO BASH! %s' % port
send_event(__cloudify_id, get_ip(), "hello_bash status", "state", "running")
# ... rest of the code ...
|
0f1b95128b5efc4ca82204828921a75f694b4cb6
|
platform/vcs-impl/src/com/intellij/util/progress/StepsProgressIndicator.kt
|
platform/vcs-impl/src/com/intellij/util/progress/StepsProgressIndicator.kt
|
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.util.progress
import com.intellij.openapi.progress.ProgressIndicator
class StepsProgressIndicator(private val indicator: ProgressIndicator, private val totalSteps: Int) : ProgressIndicator by indicator {
private var finishedTasks = 0
fun nextStep() {
finishedTasks++
fraction = 0.0
}
override fun setFraction(fraction: Double) {
indicator.fraction = (finishedTasks + fraction) / totalSteps.toDouble()
}
}
|
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.util.progress
import com.intellij.ide.util.DelegatingProgressIndicator
import com.intellij.openapi.progress.ProgressIndicator
class StepsProgressIndicator(private val indicator: ProgressIndicator, private val totalSteps: Int) :
DelegatingProgressIndicator(indicator) {
private var finishedTasks = 0
fun nextStep() {
finishedTasks++
fraction = 0.0
}
override fun setFraction(fraction: Double) {
indicator.fraction = (finishedTasks + fraction) / totalSteps.toDouble()
}
}
|
Use DelegatingProgressIndicator instead of kotlin's delegation to properly implement StandardProgressIndicator
|
[vcs] Use DelegatingProgressIndicator instead of kotlin's delegation to properly implement StandardProgressIndicator
GitOrigin-RevId: ef59cf70c02668e62b5f26541a470c738ba8d520
|
Kotlin
|
apache-2.0
|
allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community
|
kotlin
|
## Code Before:
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.util.progress
import com.intellij.openapi.progress.ProgressIndicator
class StepsProgressIndicator(private val indicator: ProgressIndicator, private val totalSteps: Int) : ProgressIndicator by indicator {
private var finishedTasks = 0
fun nextStep() {
finishedTasks++
fraction = 0.0
}
override fun setFraction(fraction: Double) {
indicator.fraction = (finishedTasks + fraction) / totalSteps.toDouble()
}
}
## Instruction:
[vcs] Use DelegatingProgressIndicator instead of kotlin's delegation to properly implement StandardProgressIndicator
GitOrigin-RevId: ef59cf70c02668e62b5f26541a470c738ba8d520
## Code After:
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.util.progress
import com.intellij.ide.util.DelegatingProgressIndicator
import com.intellij.openapi.progress.ProgressIndicator
class StepsProgressIndicator(private val indicator: ProgressIndicator, private val totalSteps: Int) :
DelegatingProgressIndicator(indicator) {
private var finishedTasks = 0
fun nextStep() {
finishedTasks++
fraction = 0.0
}
override fun setFraction(fraction: Double) {
indicator.fraction = (finishedTasks + fraction) / totalSteps.toDouble()
}
}
|
// ... existing code ...
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.util.progress
import com.intellij.ide.util.DelegatingProgressIndicator
import com.intellij.openapi.progress.ProgressIndicator
class StepsProgressIndicator(private val indicator: ProgressIndicator, private val totalSteps: Int) :
DelegatingProgressIndicator(indicator) {
private var finishedTasks = 0
fun nextStep() {
// ... rest of the code ...
|
4a49ddfafd353bec50bfd706035f0202d6ff18b0
|
modules/vrjuggler/vrjconfig/org/vrjuggler/vrjconfig/ui/ConfigFileFilter.java
|
modules/vrjuggler/vrjconfig/org/vrjuggler/vrjconfig/ui/ConfigFileFilter.java
|
package org.vrjuggler.vrjconfig.ui;
import java.io.File;
import javax.swing.filechooser.*;
/**
* FileFilter for VR Juggler configuration files.
*/
public class ConfigFileFilter extends FileFilter
{
/**
* Verify that the given file is a configuration file.
*/
public boolean accept(File f)
{
String ext = null;
String s = f.getName();
int i = s.lastIndexOf('.');
if (i > 0 && i < s.length() - 1)
{
ext = s.substring(i+1).toLowerCase();
if(ext.equals("jconf"))
{
return true;
}
}
return false;
}
/**
* Get a description of this filter to display in JFileChooser.
*/
public String getDescription()
{
return("VR Juggler Configuration File (*.jconf)");
}
}
|
package org.vrjuggler.vrjconfig.ui;
import java.io.File;
import javax.swing.filechooser.*;
/**
* FileFilter for VR Juggler configuration files.
*/
public class ConfigFileFilter extends FileFilter
{
/**
* Verify that the given file is a configuration file.
*/
public boolean accept(File f)
{
if(f.isDirectory())
{
return true;
}
String ext = null;
String s = f.getName();
int i = s.lastIndexOf('.');
if (i > 0 && i < s.length() - 1)
{
ext = s.substring(i+1).toLowerCase();
if(ext.equals("jconf"))
{
return true;
}
}
return false;
}
/**
* Get a description of this filter to display in JFileChooser.
*/
public String getDescription()
{
return("VR Juggler Configuration File (*.jconf)");
}
}
|
Allow the user to see directories also.
|
Allow the user to see directories also.
git-svn-id: 769d22dfa2d22aad706b9a451492fb87c0735f19@14187 08b38cba-cd3b-11de-854e-f91c5b6e4272
|
Java
|
lgpl-2.1
|
vrjuggler/vrjuggler,godbyk/vrjuggler-upstream-old,LiuKeHua/vrjuggler,vancegroup-mirrors/vrjuggler,vrjuggler/vrjuggler,vrjuggler/vrjuggler,godbyk/vrjuggler-upstream-old,LiuKeHua/vrjuggler,vancegroup-mirrors/vrjuggler,LiuKeHua/vrjuggler,MichaelMcDonnell/vrjuggler,vancegroup-mirrors/vrjuggler,LiuKeHua/vrjuggler,LiuKeHua/vrjuggler,godbyk/vrjuggler-upstream-old,vancegroup-mirrors/vrjuggler,vancegroup-mirrors/vrjuggler,LiuKeHua/vrjuggler,godbyk/vrjuggler-upstream-old,MichaelMcDonnell/vrjuggler,vrjuggler/vrjuggler,MichaelMcDonnell/vrjuggler,godbyk/vrjuggler-upstream-old,vrjuggler/vrjuggler,MichaelMcDonnell/vrjuggler,MichaelMcDonnell/vrjuggler,MichaelMcDonnell/vrjuggler,LiuKeHua/vrjuggler,vrjuggler/vrjuggler,vrjuggler/vrjuggler,MichaelMcDonnell/vrjuggler,vancegroup-mirrors/vrjuggler,godbyk/vrjuggler-upstream-old,vrjuggler/vrjuggler,LiuKeHua/vrjuggler,MichaelMcDonnell/vrjuggler
|
java
|
## Code Before:
package org.vrjuggler.vrjconfig.ui;
import java.io.File;
import javax.swing.filechooser.*;
/**
* FileFilter for VR Juggler configuration files.
*/
public class ConfigFileFilter extends FileFilter
{
/**
* Verify that the given file is a configuration file.
*/
public boolean accept(File f)
{
String ext = null;
String s = f.getName();
int i = s.lastIndexOf('.');
if (i > 0 && i < s.length() - 1)
{
ext = s.substring(i+1).toLowerCase();
if(ext.equals("jconf"))
{
return true;
}
}
return false;
}
/**
* Get a description of this filter to display in JFileChooser.
*/
public String getDescription()
{
return("VR Juggler Configuration File (*.jconf)");
}
}
## Instruction:
Allow the user to see directories also.
git-svn-id: 769d22dfa2d22aad706b9a451492fb87c0735f19@14187 08b38cba-cd3b-11de-854e-f91c5b6e4272
## Code After:
package org.vrjuggler.vrjconfig.ui;
import java.io.File;
import javax.swing.filechooser.*;
/**
* FileFilter for VR Juggler configuration files.
*/
public class ConfigFileFilter extends FileFilter
{
/**
* Verify that the given file is a configuration file.
*/
public boolean accept(File f)
{
if(f.isDirectory())
{
return true;
}
String ext = null;
String s = f.getName();
int i = s.lastIndexOf('.');
if (i > 0 && i < s.length() - 1)
{
ext = s.substring(i+1).toLowerCase();
if(ext.equals("jconf"))
{
return true;
}
}
return false;
}
/**
* Get a description of this filter to display in JFileChooser.
*/
public String getDescription()
{
return("VR Juggler Configuration File (*.jconf)");
}
}
|
// ... existing code ...
*/
public boolean accept(File f)
{
if(f.isDirectory())
{
return true;
}
String ext = null;
String s = f.getName();
int i = s.lastIndexOf('.');
// ... rest of the code ...
|
596f17efdac234193afba25bfed6035765fb1a74
|
ReactAndroid/src/main/java/com/facebook/react/fabric/FabricBinder.java
|
ReactAndroid/src/main/java/com/facebook/react/fabric/FabricBinder.java
|
/**
* Copyright (c) 2014-present, Facebook, Inc.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
package com.facebook.react.fabric;
public interface FabricBinder {
void setBinding(FabricBinding binding);
}
|
/**
* Copyright (c) 2014-present, Facebook, Inc.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
package com.facebook.react.fabric;
public interface FabricBinder<T extends FabricBinding> {
void setBinding(T binding);
}
|
Initialize FabricXX C++ code and register rootview
|
Initialize FabricXX C++ code and register rootview
Summary: This diff initializes Fabric C++ UIManager code from java, it also registers android RootViews inside C++.
Reviewed By: shergin
Differential Revision: D8878148
fbshipit-source-id: 8b7924f715b135eda37bb2683206d3e321a2b7b2
|
Java
|
mit
|
javache/react-native,myntra/react-native,pandiaraj44/react-native,janicduplessis/react-native,hoangpham95/react-native,exponentjs/react-native,hoangpham95/react-native,arthuralee/react-native,arthuralee/react-native,exponent/react-native,myntra/react-native,pandiaraj44/react-native,myntra/react-native,hammerandchisel/react-native,hammerandchisel/react-native,exponent/react-native,pandiaraj44/react-native,javache/react-native,facebook/react-native,pandiaraj44/react-native,hoangpham95/react-native,hoangpham95/react-native,hammerandchisel/react-native,facebook/react-native,myntra/react-native,exponent/react-native,hammerandchisel/react-native,exponent/react-native,hoangpham95/react-native,javache/react-native,janicduplessis/react-native,hoangpham95/react-native,janicduplessis/react-native,facebook/react-native,myntra/react-native,exponent/react-native,janicduplessis/react-native,myntra/react-native,facebook/react-native,javache/react-native,exponentjs/react-native,janicduplessis/react-native,janicduplessis/react-native,pandiaraj44/react-native,arthuralee/react-native,exponentjs/react-native,facebook/react-native,exponentjs/react-native,pandiaraj44/react-native,javache/react-native,hammerandchisel/react-native,exponent/react-native,hammerandchisel/react-native,exponent/react-native,hoangpham95/react-native,javache/react-native,pandiaraj44/react-native,exponentjs/react-native,facebook/react-native,facebook/react-native,arthuralee/react-native,facebook/react-native,janicduplessis/react-native,myntra/react-native,javache/react-native,hammerandchisel/react-native,javache/react-native,exponentjs/react-native,exponentjs/react-native,javache/react-native,myntra/react-native,exponentjs/react-native,hoangpham95/react-native,arthuralee/react-native,janicduplessis/react-native,facebook/react-native,pandiaraj44/react-native,myntra/react-native,hammerandchisel/react-native,exponent/react-native
|
java
|
## Code Before:
/**
* Copyright (c) 2014-present, Facebook, Inc.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
package com.facebook.react.fabric;
public interface FabricBinder {
void setBinding(FabricBinding binding);
}
## Instruction:
Initialize FabricXX C++ code and register rootview
Summary: This diff initializes Fabric C++ UIManager code from java, it also registers android RootViews inside C++.
Reviewed By: shergin
Differential Revision: D8878148
fbshipit-source-id: 8b7924f715b135eda37bb2683206d3e321a2b7b2
## Code After:
/**
* Copyright (c) 2014-present, Facebook, Inc.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
package com.facebook.react.fabric;
public interface FabricBinder<T extends FabricBinding> {
void setBinding(T binding);
}
|
# ... existing code ...
package com.facebook.react.fabric;
public interface FabricBinder<T extends FabricBinding> {
void setBinding(T binding);
}
# ... rest of the code ...
|
b98e86ad9b3120dce9f163236b5e28f564547c27
|
TWLight/resources/factories.py
|
TWLight/resources/factories.py
|
import factory
import random
from django.conf import settings
from TWLight.resources.models import Partner, Stream, Video, Suggestion
class PartnerFactory(factory.django.DjangoModelFactory):
class Meta:
model = Partner
strategy = factory.CREATE_STRATEGY
company_name = factory.Faker(
"company", locale=random.choice(settings.FAKER_LOCALES)
)
terms_of_use = factory.Faker("uri", locale=random.choice(settings.FAKER_LOCALES))
status = Partner.AVAILABLE # not the default, but usually wanted in tests
class StreamFactory(factory.django.DjangoModelFactory):
class Meta:
model = Stream
strategy = factory.CREATE_STRATEGY
partner = factory.SubFactory(PartnerFactory)
name = factory.Faker("bs", locale=random.choice(settings.FAKER_LOCALES))
class SuggestionFactory(factory.django.DjangoModelFactory):
class Meta:
model = Suggestion
strategy = factory.CREATE_STRATEGY
suggested_company_name = factory.Faker(
"company", locale=random.choice(settings.FAKER_LOCALES)
)
company_url = factory.Faker("url", locale=random.choice(settings.FAKER_LOCALES))
class VideoFactory(factory.django.DjangoModelFactory):
class Meta:
model = Video
strategy = factory.CREATE_STRATEGY
partner = factory.SubFactory(PartnerFactory)
|
import factory
import random
from django.conf import settings
from TWLight.resources.models import Partner, Stream, Video, Suggestion
class PartnerFactory(factory.django.DjangoModelFactory):
class Meta:
model = Partner
strategy = factory.CREATE_STRATEGY
company_name = factory.Faker(
"company", locale=random.choice(settings.FAKER_LOCALES)
)
terms_of_use = factory.Faker("uri", locale=random.choice(settings.FAKER_LOCALES))
status = Partner.AVAILABLE # not the default, but usually wanted in tests
class StreamFactory(factory.django.DjangoModelFactory):
class Meta:
model = Stream
strategy = factory.CREATE_STRATEGY
partner = factory.SubFactory(PartnerFactory)
name = factory.Faker("bs", locale=random.choice(settings.FAKER_LOCALES))
class SuggestionFactory(factory.django.DjangoModelFactory):
class Meta:
model = Suggestion
strategy = factory.CREATE_STRATEGY
suggested_company_name = factory.Faker("pystr", max_chars=40)
company_url = factory.Faker("url", locale=random.choice(settings.FAKER_LOCALES))
class VideoFactory(factory.django.DjangoModelFactory):
class Meta:
model = Video
strategy = factory.CREATE_STRATEGY
partner = factory.SubFactory(PartnerFactory)
|
Change suggested_company_name factory var to pystr
|
Change suggested_company_name factory var to pystr
|
Python
|
mit
|
WikipediaLibrary/TWLight,WikipediaLibrary/TWLight,WikipediaLibrary/TWLight,WikipediaLibrary/TWLight,WikipediaLibrary/TWLight
|
python
|
## Code Before:
import factory
import random
from django.conf import settings
from TWLight.resources.models import Partner, Stream, Video, Suggestion
class PartnerFactory(factory.django.DjangoModelFactory):
class Meta:
model = Partner
strategy = factory.CREATE_STRATEGY
company_name = factory.Faker(
"company", locale=random.choice(settings.FAKER_LOCALES)
)
terms_of_use = factory.Faker("uri", locale=random.choice(settings.FAKER_LOCALES))
status = Partner.AVAILABLE # not the default, but usually wanted in tests
class StreamFactory(factory.django.DjangoModelFactory):
class Meta:
model = Stream
strategy = factory.CREATE_STRATEGY
partner = factory.SubFactory(PartnerFactory)
name = factory.Faker("bs", locale=random.choice(settings.FAKER_LOCALES))
class SuggestionFactory(factory.django.DjangoModelFactory):
class Meta:
model = Suggestion
strategy = factory.CREATE_STRATEGY
suggested_company_name = factory.Faker(
"company", locale=random.choice(settings.FAKER_LOCALES)
)
company_url = factory.Faker("url", locale=random.choice(settings.FAKER_LOCALES))
class VideoFactory(factory.django.DjangoModelFactory):
class Meta:
model = Video
strategy = factory.CREATE_STRATEGY
partner = factory.SubFactory(PartnerFactory)
## Instruction:
Change suggested_company_name factory var to pystr
## Code After:
import factory
import random
from django.conf import settings
from TWLight.resources.models import Partner, Stream, Video, Suggestion
class PartnerFactory(factory.django.DjangoModelFactory):
class Meta:
model = Partner
strategy = factory.CREATE_STRATEGY
company_name = factory.Faker(
"company", locale=random.choice(settings.FAKER_LOCALES)
)
terms_of_use = factory.Faker("uri", locale=random.choice(settings.FAKER_LOCALES))
status = Partner.AVAILABLE # not the default, but usually wanted in tests
class StreamFactory(factory.django.DjangoModelFactory):
class Meta:
model = Stream
strategy = factory.CREATE_STRATEGY
partner = factory.SubFactory(PartnerFactory)
name = factory.Faker("bs", locale=random.choice(settings.FAKER_LOCALES))
class SuggestionFactory(factory.django.DjangoModelFactory):
class Meta:
model = Suggestion
strategy = factory.CREATE_STRATEGY
suggested_company_name = factory.Faker("pystr", max_chars=40)
company_url = factory.Faker("url", locale=random.choice(settings.FAKER_LOCALES))
class VideoFactory(factory.django.DjangoModelFactory):
class Meta:
model = Video
strategy = factory.CREATE_STRATEGY
partner = factory.SubFactory(PartnerFactory)
|
# ... existing code ...
model = Suggestion
strategy = factory.CREATE_STRATEGY
suggested_company_name = factory.Faker("pystr", max_chars=40)
company_url = factory.Faker("url", locale=random.choice(settings.FAKER_LOCALES))
# ... rest of the code ...
|
8b33e63ab84e2da2168259d8ce17c8afac964500
|
cacheops/management/commands/cleanfilecache.py
|
cacheops/management/commands/cleanfilecache.py
|
import os
from django.core.management.base import BaseCommand
from cacheops.conf import settings
class Command(BaseCommand):
help = 'Clean filebased cache'
def handle(self, **options):
os.system('find %s -type f \! -iname "\." -mmin +0 -delete' % settings.FILE_CACHE_DIR)
|
import os
from django.core.management.base import BaseCommand
from cacheops.conf import settings
class Command(BaseCommand):
help = 'Clean filebased cache'
def add_arguments(self, parser):
parser.add_argument('path', nargs='*', default=['default'])
def handle(self, **options):
for path in options['path']:
if path == 'default':
path = settings.FILE_CACHE_DIR
os.system('find %s -type f \! -iname "\." -mmin +0 -delete' % path)
|
Allow cleaning file cache in non default place
|
Allow cleaning file cache in non default place
|
Python
|
bsd-3-clause
|
LPgenerator/django-cacheops,Suor/django-cacheops
|
python
|
## Code Before:
import os
from django.core.management.base import BaseCommand
from cacheops.conf import settings
class Command(BaseCommand):
help = 'Clean filebased cache'
def handle(self, **options):
os.system('find %s -type f \! -iname "\." -mmin +0 -delete' % settings.FILE_CACHE_DIR)
## Instruction:
Allow cleaning file cache in non default place
## Code After:
import os
from django.core.management.base import BaseCommand
from cacheops.conf import settings
class Command(BaseCommand):
help = 'Clean filebased cache'
def add_arguments(self, parser):
parser.add_argument('path', nargs='*', default=['default'])
def handle(self, **options):
for path in options['path']:
if path == 'default':
path = settings.FILE_CACHE_DIR
os.system('find %s -type f \! -iname "\." -mmin +0 -delete' % path)
|
# ... existing code ...
class Command(BaseCommand):
help = 'Clean filebased cache'
def add_arguments(self, parser):
parser.add_argument('path', nargs='*', default=['default'])
def handle(self, **options):
for path in options['path']:
if path == 'default':
path = settings.FILE_CACHE_DIR
os.system('find %s -type f \! -iname "\." -mmin +0 -delete' % path)
# ... rest of the code ...
|
df650e952b13eca44d6896c52ff4b37cb8403e68
|
src/galileo/event/EventWithSynopsis.java
|
src/galileo/event/EventWithSynopsis.java
|
package galileo.event;
import java.io.IOException;
import galileo.serialization.SerializationInputStream;
import galileo.serialization.SerializationOutputStream;
/**
* Encapsulates a raw (byte[] based) event that includes a String representing
* the event synopsis. This can be used to essentially 'tag' particular blobs
* of data without writing specific events.
*
* @author malensek
*/
public class EventWithSynopsis implements Event {
private String synopsis;
private byte[] data;
private boolean compress = false;
public EventWithSynopsis(String synopsis, byte[] data) {
this.synopsis = synopsis;
this.data = data;
}
/**
* Enables compression when serializing this event. When deserializing,
* this setting has no effect.
*/
public void enableCompression() {
this.compress = true;
}
/**
* Disables compression when serializing this event. This is the default
* behavior. When deserializing, this setting has no effect.
*/
public void disableCompression() {
this.compress = false;
}
@Deserialize
public EventWithSynopsis(SerializationInputStream in)
throws IOException {
this.synopsis = in.readString();
this.data = in.readCompressableField();
}
@Override
public void serialize(SerializationOutputStream out)
throws IOException {
out.writeString(synopsis);
out.writeCompressableField(data, compress);
}
}
|
package galileo.event;
import java.io.IOException;
import galileo.serialization.SerializationInputStream;
import galileo.serialization.SerializationOutputStream;
/**
* Encapsulates a raw (byte[] based) event that includes a String representing
* the event synopsis. This can be used to essentially 'tag' particular blobs
* of data without writing specific events.
*
* @author malensek
*/
public class EventWithSynopsis implements Event {
private String synopsis;
private byte[] data;
private boolean compress = false;
public EventWithSynopsis(String synopsis, byte[] data) {
this.synopsis = synopsis;
this.data = data;
}
public String getSynopsis() {
return this.synopsis;
}
public byte[] getPayload() {
return this.data;
}
/**
* Enables compression when serializing this event. When deserializing,
* this setting has no effect.
*/
public void enableCompression() {
this.compress = true;
}
/**
* Disables compression when serializing this event. This is the default
* behavior. When deserializing, this setting has no effect.
*/
public void disableCompression() {
this.compress = false;
}
@Deserialize
public EventWithSynopsis(SerializationInputStream in)
throws IOException {
this.synopsis = in.readString();
this.data = in.readCompressableField();
}
@Override
public void serialize(SerializationOutputStream out)
throws IOException {
out.writeString(synopsis);
out.writeCompressableField(data, compress);
}
}
|
Add getters for synopsis and payload
|
Add getters for synopsis and payload
|
Java
|
bsd-2-clause
|
10000TB/galileo,10000TB/galileo
|
java
|
## Code Before:
package galileo.event;
import java.io.IOException;
import galileo.serialization.SerializationInputStream;
import galileo.serialization.SerializationOutputStream;
/**
* Encapsulates a raw (byte[] based) event that includes a String representing
* the event synopsis. This can be used to essentially 'tag' particular blobs
* of data without writing specific events.
*
* @author malensek
*/
public class EventWithSynopsis implements Event {
private String synopsis;
private byte[] data;
private boolean compress = false;
public EventWithSynopsis(String synopsis, byte[] data) {
this.synopsis = synopsis;
this.data = data;
}
/**
* Enables compression when serializing this event. When deserializing,
* this setting has no effect.
*/
public void enableCompression() {
this.compress = true;
}
/**
* Disables compression when serializing this event. This is the default
* behavior. When deserializing, this setting has no effect.
*/
public void disableCompression() {
this.compress = false;
}
@Deserialize
public EventWithSynopsis(SerializationInputStream in)
throws IOException {
this.synopsis = in.readString();
this.data = in.readCompressableField();
}
@Override
public void serialize(SerializationOutputStream out)
throws IOException {
out.writeString(synopsis);
out.writeCompressableField(data, compress);
}
}
## Instruction:
Add getters for synopsis and payload
## Code After:
package galileo.event;
import java.io.IOException;
import galileo.serialization.SerializationInputStream;
import galileo.serialization.SerializationOutputStream;
/**
* Encapsulates a raw (byte[] based) event that includes a String representing
* the event synopsis. This can be used to essentially 'tag' particular blobs
* of data without writing specific events.
*
* @author malensek
*/
public class EventWithSynopsis implements Event {
private String synopsis;
private byte[] data;
private boolean compress = false;
public EventWithSynopsis(String synopsis, byte[] data) {
this.synopsis = synopsis;
this.data = data;
}
public String getSynopsis() {
return this.synopsis;
}
public byte[] getPayload() {
return this.data;
}
/**
* Enables compression when serializing this event. When deserializing,
* this setting has no effect.
*/
public void enableCompression() {
this.compress = true;
}
/**
* Disables compression when serializing this event. This is the default
* behavior. When deserializing, this setting has no effect.
*/
public void disableCompression() {
this.compress = false;
}
@Deserialize
public EventWithSynopsis(SerializationInputStream in)
throws IOException {
this.synopsis = in.readString();
this.data = in.readCompressableField();
}
@Override
public void serialize(SerializationOutputStream out)
throws IOException {
out.writeString(synopsis);
out.writeCompressableField(data, compress);
}
}
|
...
public EventWithSynopsis(String synopsis, byte[] data) {
this.synopsis = synopsis;
this.data = data;
}
public String getSynopsis() {
return this.synopsis;
}
public byte[] getPayload() {
return this.data;
}
/**
...
|
ea58504e5d3e1f394f46798805cfb5950b8a4a9d
|
Cube.h
|
Cube.h
|
using namespace std;
class Cube
{
public:
typedef float * Array;
//static
int locAmbient, locDiffuse, locSpecular, locEyeLight, locLight, locTexture;
//static
int locMVP, locMV, locNM;
int numFrame;
GLuint shader, textureID;
static bool readTexture;//, readShader;
Array final_vert, final_text, norm_final;
GLuint vertbuffID[1], normbuffID[1], texbuffID[1];
int total, vsize, nsize, tsize;
Cube(void);
static GLuint loadShaderPair(char *, char *);
void init(float[]);
void bind(GLenum, GLenum);
void draw(GLGeometryTransform);
};
#endif
|
using namespace std;
class Cube
{
public:
typedef float * Array;
static int locAmbient, locDiffuse, locSpecular, locEyeLight, locLight, locTexture;
static int locMVP, locMV, locNM;
int numFrame;
static GLuint shader, textureID;
static bool readTexture, readShader;
Array final_vert, final_text, norm_final;
GLuint vertbuffID[1], normbuffID[1], texbuffID[1];
int total, vsize, nsize, tsize;
Cube(void);
static GLuint loadShaderPair(char *, char *);
void init(float[]);
void bind(GLenum, GLenum);
void draw(GLGeometryTransform);
};
#endif
|
Reduce Even More Memory Usage
|
Reduce Even More Memory Usage
|
C
|
mit
|
diwgan32/Maze,diwgan32/Maze
|
c
|
## Code Before:
using namespace std;
class Cube
{
public:
typedef float * Array;
//static
int locAmbient, locDiffuse, locSpecular, locEyeLight, locLight, locTexture;
//static
int locMVP, locMV, locNM;
int numFrame;
GLuint shader, textureID;
static bool readTexture;//, readShader;
Array final_vert, final_text, norm_final;
GLuint vertbuffID[1], normbuffID[1], texbuffID[1];
int total, vsize, nsize, tsize;
Cube(void);
static GLuint loadShaderPair(char *, char *);
void init(float[]);
void bind(GLenum, GLenum);
void draw(GLGeometryTransform);
};
#endif
## Instruction:
Reduce Even More Memory Usage
## Code After:
using namespace std;
class Cube
{
public:
typedef float * Array;
static int locAmbient, locDiffuse, locSpecular, locEyeLight, locLight, locTexture;
static int locMVP, locMV, locNM;
int numFrame;
static GLuint shader, textureID;
static bool readTexture, readShader;
Array final_vert, final_text, norm_final;
GLuint vertbuffID[1], normbuffID[1], texbuffID[1];
int total, vsize, nsize, tsize;
Cube(void);
static GLuint loadShaderPair(char *, char *);
void init(float[]);
void bind(GLenum, GLenum);
void draw(GLGeometryTransform);
};
#endif
|
# ... existing code ...
public:
typedef float * Array;
static int locAmbient, locDiffuse, locSpecular, locEyeLight, locLight, locTexture;
static int locMVP, locMV, locNM;
int numFrame;
static GLuint shader, textureID;
static bool readTexture, readShader;
Array final_vert, final_text, norm_final;
# ... rest of the code ...
|
a524cf9520b11bb55e3d7c74585aa171651f68bb
|
client/eu-client-service/src/main/java/com/mkl/eu/client/service/vo/EuObject.java
|
client/eu-client-service/src/main/java/com/mkl/eu/client/service/vo/EuObject.java
|
package com.mkl.eu.client.service.vo;
import javax.xml.bind.annotation.XmlID;
import java.io.Serializable;
/**
* Mother class of all VOs.
*
* @author MKL
*/
public abstract class EuObject implements Serializable {
/** Id of the object. */
private Long id;
/** @return the id. */
public Long getId() {
return id;
}
/** @param id the id to set. */
public void setId(Long id) {
this.id = id;
}
/**
* Method added because jaxb stores all the idRef in the same Map,
* whatever the class is. So if two different classes have the same id,
* there will be a collision.
*
* @return id for jaxb.
*/
@XmlID
public String getIdForJaxb() {
return getClass().toString() + "_" + getId();
}
/**
* So that jaxb works...
*/
public void setIdForJaxb(String id) {
}
}
|
package com.mkl.eu.client.service.vo;
import javax.xml.bind.annotation.XmlID;
import java.io.Serializable;
/**
* Mother class of all VOs.
*
* @author MKL
*/
public abstract class EuObject implements Serializable {
/** Id of the object. */
private Long id;
/** @return the id. */
public Long getId() {
return id;
}
/** @param id the id to set. */
public void setId(Long id) {
this.id = id;
}
/**
* Method added because jaxb stores all the idRef in the same Map,
* whatever the class is. So if two different classes have the same id,
* there will be a collision.
*
* @return id for jaxb.
*/
@XmlID
public String getIdForJaxb() {
return getClass().getSimpleName() + "_" + getId();
}
/**
* So that jaxb works...
*/
public void setIdForJaxb(String id) {
}
}
|
Fix Jaxb bug on XmlIdRef.
|
Fix Jaxb bug on XmlIdRef.
|
Java
|
mit
|
FogiaFr/eu
|
java
|
## Code Before:
package com.mkl.eu.client.service.vo;
import javax.xml.bind.annotation.XmlID;
import java.io.Serializable;
/**
* Mother class of all VOs.
*
* @author MKL
*/
public abstract class EuObject implements Serializable {
/** Id of the object. */
private Long id;
/** @return the id. */
public Long getId() {
return id;
}
/** @param id the id to set. */
public void setId(Long id) {
this.id = id;
}
/**
* Method added because jaxb stores all the idRef in the same Map,
* whatever the class is. So if two different classes have the same id,
* there will be a collision.
*
* @return id for jaxb.
*/
@XmlID
public String getIdForJaxb() {
return getClass().toString() + "_" + getId();
}
/**
* So that jaxb works...
*/
public void setIdForJaxb(String id) {
}
}
## Instruction:
Fix Jaxb bug on XmlIdRef.
## Code After:
package com.mkl.eu.client.service.vo;
import javax.xml.bind.annotation.XmlID;
import java.io.Serializable;
/**
* Mother class of all VOs.
*
* @author MKL
*/
public abstract class EuObject implements Serializable {
/** Id of the object. */
private Long id;
/** @return the id. */
public Long getId() {
return id;
}
/** @param id the id to set. */
public void setId(Long id) {
this.id = id;
}
/**
* Method added because jaxb stores all the idRef in the same Map,
* whatever the class is. So if two different classes have the same id,
* there will be a collision.
*
* @return id for jaxb.
*/
@XmlID
public String getIdForJaxb() {
return getClass().getSimpleName() + "_" + getId();
}
/**
* So that jaxb works...
*/
public void setIdForJaxb(String id) {
}
}
|
// ... existing code ...
*/
@XmlID
public String getIdForJaxb() {
return getClass().getSimpleName() + "_" + getId();
}
/**
// ... rest of the code ...
|
002dd6fa4af36bd722b3f194c93f1e2e628ad561
|
inboxen/app/model/email.py
|
inboxen/app/model/email.py
|
from inboxen.models import Alias, Attachment, Email, Header
from config.settings import datetime_format, recieved_header_name
from datetime import datetime
def make_email(message, alias, domain):
inbox = Alias.objects.filter(alias=alias, domain__domain=domain)[0]
user = inbox.user
body = message.base.body
recieved_date = datetime.strptime(message[recieved_header_name], datetime_format)
del message[recieved_header_name]
email = Email(inbox=inbox, user=user, body=body, recieved_date=recieved_date)
email.save()
for name in message.keys():
email.headers.create(name=name, data=message[name])
for part in message.walk():
if not part.body:
part.body = u''
email.attachments.create(
content_type=part.content_encoding['Content-Type'][0],
content_disposition=part.content_encoding['Content-Disposition'][0],
data=part.body
)
email.save()
|
from inboxen.models import Alias, Attachment, Email, Header
from config.settings import datetime_format, recieved_header_name
from datetime import datetime
def make_email(message, alias, domain):
inbox = Alias.objects.filter(alias=alias, domain__domain=domain)[0]
user = inbox.user
body = message.base.body
recieved_date = datetime.strptime(message[recieved_header_name], datetime_format)
del message[recieved_header_name]
email = Email(inbox=inbox, user=user, body=body, recieved_date=recieved_date)
email.save()
head_list = []
for name in message.keys():
header = Header(name=name, data=message[name])
header.save()
head_list.append(header)
# add all the headers at once should save us some queries
email.headers.add(*head_list)
attach_list = []
for part in message.walk():
if not part.body:
part.body = u''
attachment = Attachment(
content_type=part.content_encoding['Content-Type'][0],
content_disposition=part.content_encoding['Content-Disposition'][0],
data=part.body
)
attachment.save()
attach_list.append(attachment)
# as with headers above
email.attachments.add(*attach_list)
|
Reduce number of queries to DB
|
Reduce number of queries to DB
|
Python
|
agpl-3.0
|
Inboxen/router,Inboxen/Inboxen,Inboxen/Inboxen,Inboxen/Inboxen,Inboxen/Inboxen
|
python
|
## Code Before:
from inboxen.models import Alias, Attachment, Email, Header
from config.settings import datetime_format, recieved_header_name
from datetime import datetime
def make_email(message, alias, domain):
inbox = Alias.objects.filter(alias=alias, domain__domain=domain)[0]
user = inbox.user
body = message.base.body
recieved_date = datetime.strptime(message[recieved_header_name], datetime_format)
del message[recieved_header_name]
email = Email(inbox=inbox, user=user, body=body, recieved_date=recieved_date)
email.save()
for name in message.keys():
email.headers.create(name=name, data=message[name])
for part in message.walk():
if not part.body:
part.body = u''
email.attachments.create(
content_type=part.content_encoding['Content-Type'][0],
content_disposition=part.content_encoding['Content-Disposition'][0],
data=part.body
)
email.save()
## Instruction:
Reduce number of queries to DB
## Code After:
from inboxen.models import Alias, Attachment, Email, Header
from config.settings import datetime_format, recieved_header_name
from datetime import datetime
def make_email(message, alias, domain):
inbox = Alias.objects.filter(alias=alias, domain__domain=domain)[0]
user = inbox.user
body = message.base.body
recieved_date = datetime.strptime(message[recieved_header_name], datetime_format)
del message[recieved_header_name]
email = Email(inbox=inbox, user=user, body=body, recieved_date=recieved_date)
email.save()
head_list = []
for name in message.keys():
header = Header(name=name, data=message[name])
header.save()
head_list.append(header)
# add all the headers at once should save us some queries
email.headers.add(*head_list)
attach_list = []
for part in message.walk():
if not part.body:
part.body = u''
attachment = Attachment(
content_type=part.content_encoding['Content-Type'][0],
content_disposition=part.content_encoding['Content-Disposition'][0],
data=part.body
)
attachment.save()
attach_list.append(attachment)
# as with headers above
email.attachments.add(*attach_list)
|
# ... existing code ...
email = Email(inbox=inbox, user=user, body=body, recieved_date=recieved_date)
email.save()
head_list = []
for name in message.keys():
header = Header(name=name, data=message[name])
header.save()
head_list.append(header)
# add all the headers at once should save us some queries
email.headers.add(*head_list)
attach_list = []
for part in message.walk():
if not part.body:
part.body = u''
attachment = Attachment(
content_type=part.content_encoding['Content-Type'][0],
content_disposition=part.content_encoding['Content-Disposition'][0],
data=part.body
)
attachment.save()
attach_list.append(attachment)
# as with headers above
email.attachments.add(*attach_list)
# ... rest of the code ...
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.