commit
stringlengths 40
40
| old_file
stringlengths 4
234
| new_file
stringlengths 4
234
| old_contents
stringlengths 10
3.01k
| new_contents
stringlengths 19
3.38k
| subject
stringlengths 16
736
| message
stringlengths 17
2.63k
| lang
stringclasses 4
values | license
stringclasses 13
values | repos
stringlengths 5
82.6k
| config
stringclasses 4
values | content
stringlengths 134
4.41k
| fuzzy_diff
stringlengths 29
3.44k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
9547988a1a9ef8faf22d9bfa881f4e542637fd46
|
utils.py
|
utils.py
|
import xmlrpclib
import cPickle
import subprocess
from time import sleep
p = None
s = None
def start_plot_server():
global p
if p is None:
p = subprocess.Popen(["python", "plot_server.py"])
def stop_plot_server():
if p is not None:
p.terminate()
sleep(0.01)
p.kill()
def plot_server_alive():
global s
try:
s.alive()
except Exception, e:
if str(e).endswith("Connection refused"):
return False
else:
raise
return True
def establish_connection():
global s
s = xmlrpclib.ServerProxy("http://localhost:8000/", allow_none=True)
if not plot_server_alive():
start_plot_server()
print "waiting for the plot server to start up..."
while not plot_server_alive():
sleep(0.05)
print " done."
def plot(vert, triangles):
print "plotting using mayavi..."
v = cPickle.dumps(vert)
t = cPickle.dumps(triangles)
s.plot(v, t)
print " done."
establish_connection()
|
import xmlrpclib
import cPickle
import subprocess
from time import sleep
p = None
s = None
def start_plot_server():
global p
if p is None:
p = subprocess.Popen(["python", "plot_server.py"])
def stop_plot_server():
if p is not None:
p.terminate()
sleep(0.01)
p.kill()
def plot_server_alive():
global s
try:
s.alive()
except Exception, e:
if str(e).endswith("Connection refused"):
return False
else:
raise
return True
def establish_connection():
global s
if s is not None:
return
s = xmlrpclib.ServerProxy("http://localhost:8000/", allow_none=True)
if not plot_server_alive():
start_plot_server()
print "waiting for the plot server to start up..."
while not plot_server_alive():
sleep(0.05)
print " done."
def plot(vert, triangles):
establish_connection()
print "plotting using mayavi..."
v = cPickle.dumps(vert)
t = cPickle.dumps(triangles)
s.plot(v, t)
print " done."
|
Establish connection only when needed
|
Establish connection only when needed
|
Python
|
bsd-3-clause
|
certik/mhd-hermes,certik/mhd-hermes
|
python
|
## Code Before:
import xmlrpclib
import cPickle
import subprocess
from time import sleep
p = None
s = None
def start_plot_server():
global p
if p is None:
p = subprocess.Popen(["python", "plot_server.py"])
def stop_plot_server():
if p is not None:
p.terminate()
sleep(0.01)
p.kill()
def plot_server_alive():
global s
try:
s.alive()
except Exception, e:
if str(e).endswith("Connection refused"):
return False
else:
raise
return True
def establish_connection():
global s
s = xmlrpclib.ServerProxy("http://localhost:8000/", allow_none=True)
if not plot_server_alive():
start_plot_server()
print "waiting for the plot server to start up..."
while not plot_server_alive():
sleep(0.05)
print " done."
def plot(vert, triangles):
print "plotting using mayavi..."
v = cPickle.dumps(vert)
t = cPickle.dumps(triangles)
s.plot(v, t)
print " done."
establish_connection()
## Instruction:
Establish connection only when needed
## Code After:
import xmlrpclib
import cPickle
import subprocess
from time import sleep
p = None
s = None
def start_plot_server():
global p
if p is None:
p = subprocess.Popen(["python", "plot_server.py"])
def stop_plot_server():
if p is not None:
p.terminate()
sleep(0.01)
p.kill()
def plot_server_alive():
global s
try:
s.alive()
except Exception, e:
if str(e).endswith("Connection refused"):
return False
else:
raise
return True
def establish_connection():
global s
if s is not None:
return
s = xmlrpclib.ServerProxy("http://localhost:8000/", allow_none=True)
if not plot_server_alive():
start_plot_server()
print "waiting for the plot server to start up..."
while not plot_server_alive():
sleep(0.05)
print " done."
def plot(vert, triangles):
establish_connection()
print "plotting using mayavi..."
v = cPickle.dumps(vert)
t = cPickle.dumps(triangles)
s.plot(v, t)
print " done."
|
...
def establish_connection():
global s
if s is not None:
return
s = xmlrpclib.ServerProxy("http://localhost:8000/", allow_none=True)
if not plot_server_alive():
start_plot_server()
...
print " done."
def plot(vert, triangles):
establish_connection()
print "plotting using mayavi..."
v = cPickle.dumps(vert)
t = cPickle.dumps(triangles)
...
s.plot(v, t)
print " done."
...
|
15db0f300b23693160872754a57cb3afc1944a07
|
setup.py
|
setup.py
|
import version
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
with open('requirements.txt') as f:
requirements = f.read().splitlines()
setup(name='robotframework-zoomba',
version=version.VERSION,
description='Robot Framework mini-framework.',
long_description='Zoomba',
url='https://github.com/Accruent/zoomba',
maintainer='Alex Calandra, Michael Hintz, Keith Smoland, Matthew Giardina, Brandon Wolfe, Neil Howell, Tommy Hoang',
maintainer_email='[email protected]',
license='GPL-3.0',
keywords='Robot Framework robot-framework selenium requests appium soap winappdriver appium robotframework'
'desktop windows zoomba python robotframework-library appium-windows appiumlibrary api-rest api '
'soap-api',
platforms='any',
install_requires=requirements,
extras_require={
'testing': [
'Appium-Python-Client'
]
},
classifiers="""
Development Status :: 5 - Production/Stable
Operating System :: OS Independent
Programming Language :: Python :: 3
Topic :: Software Development :: Testing
Framework :: Robot Framework :: Library
""".strip().splitlines(),
package_dir={'': 'src'},
packages=['Zoomba']
)
|
import version
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
with open("docs/README.md", "r") as fh:
long_description = fh.read()
with open('requirements.txt') as f:
requirements = f.read().splitlines()
setup(name='robotframework-zoomba',
version=version.VERSION,
description='Robot Framework mini-framework.',
long_description=long_description,
url='https://github.com/Accruent/zoomba',
maintainer='Alex Calandra, Michael Hintz, Keith Smoland, Matthew Giardina, Brandon Wolfe, Neil Howell,'
' Tommy Hoang',
maintainer_email='[email protected]',
license='GPL-3.0',
keywords='Robot Framework robot-framework selenium requests appium soap winappdriver appium robotframework'
'desktop windows zoomba python robotframework-library appium-windows appiumlibrary api-rest api '
'soap-api',
platforms='any',
install_requires=requirements,
extras_require={
'testing': [
'Appium-Python-Client'
]
},
classifiers="""
Development Status :: 5 - Production/Stable
Operating System :: OS Independent
Programming Language :: Python :: 3
Topic :: Software Development :: Testing
Framework :: Robot Framework :: Library
""".strip().splitlines(),
package_dir={'': 'src'},
packages=['Zoomba']
)
|
Use readme as the long description in pypi
|
Use readme as the long description in pypi
|
Python
|
apache-2.0
|
Accruent/zoomba,Accruent/zoomba
|
python
|
## Code Before:
import version
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
with open('requirements.txt') as f:
requirements = f.read().splitlines()
setup(name='robotframework-zoomba',
version=version.VERSION,
description='Robot Framework mini-framework.',
long_description='Zoomba',
url='https://github.com/Accruent/zoomba',
maintainer='Alex Calandra, Michael Hintz, Keith Smoland, Matthew Giardina, Brandon Wolfe, Neil Howell, Tommy Hoang',
maintainer_email='[email protected]',
license='GPL-3.0',
keywords='Robot Framework robot-framework selenium requests appium soap winappdriver appium robotframework'
'desktop windows zoomba python robotframework-library appium-windows appiumlibrary api-rest api '
'soap-api',
platforms='any',
install_requires=requirements,
extras_require={
'testing': [
'Appium-Python-Client'
]
},
classifiers="""
Development Status :: 5 - Production/Stable
Operating System :: OS Independent
Programming Language :: Python :: 3
Topic :: Software Development :: Testing
Framework :: Robot Framework :: Library
""".strip().splitlines(),
package_dir={'': 'src'},
packages=['Zoomba']
)
## Instruction:
Use readme as the long description in pypi
## Code After:
import version
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
with open("docs/README.md", "r") as fh:
long_description = fh.read()
with open('requirements.txt') as f:
requirements = f.read().splitlines()
setup(name='robotframework-zoomba',
version=version.VERSION,
description='Robot Framework mini-framework.',
long_description=long_description,
url='https://github.com/Accruent/zoomba',
maintainer='Alex Calandra, Michael Hintz, Keith Smoland, Matthew Giardina, Brandon Wolfe, Neil Howell,'
' Tommy Hoang',
maintainer_email='[email protected]',
license='GPL-3.0',
keywords='Robot Framework robot-framework selenium requests appium soap winappdriver appium robotframework'
'desktop windows zoomba python robotframework-library appium-windows appiumlibrary api-rest api '
'soap-api',
platforms='any',
install_requires=requirements,
extras_require={
'testing': [
'Appium-Python-Client'
]
},
classifiers="""
Development Status :: 5 - Production/Stable
Operating System :: OS Independent
Programming Language :: Python :: 3
Topic :: Software Development :: Testing
Framework :: Robot Framework :: Library
""".strip().splitlines(),
package_dir={'': 'src'},
packages=['Zoomba']
)
|
...
except ImportError:
from distutils.core import setup
with open("docs/README.md", "r") as fh:
long_description = fh.read()
with open('requirements.txt') as f:
requirements = f.read().splitlines()
...
setup(name='robotframework-zoomba',
version=version.VERSION,
description='Robot Framework mini-framework.',
long_description=long_description,
url='https://github.com/Accruent/zoomba',
maintainer='Alex Calandra, Michael Hintz, Keith Smoland, Matthew Giardina, Brandon Wolfe, Neil Howell,'
' Tommy Hoang',
maintainer_email='[email protected]',
license='GPL-3.0',
keywords='Robot Framework robot-framework selenium requests appium soap winappdriver appium robotframework'
...
|
1eaae78c14b26378a606221eb61f97ec15134baa
|
src/gpl/test/simple01-td.py
|
src/gpl/test/simple01-td.py
|
from openroad import Design, Tech
import helpers
import gpl_aux
tech = Tech()
tech.readLiberty("./library/nangate45/NangateOpenCellLibrary_typical.lib")
tech.readLef("./nangate45.lef")
design = Design(tech)
design.readDef("./simple01-td.def")
design.evalTclString("create_clock -name core_clock -period 2 clk")
design.evalTclString("set_wire_rc -signal -layer metal3")
design.evalTclString("set_wire_rc -clock -layer metal5")
gpl_aux.global_placement(design, timing_driven=True)
design.evalTclString("estimate_parasitics -placement")
design.evalTclString("report_worst_slack")
def_file = helpers.make_result_file("simple01-td.def")
design.writeDef(def_file)
helpers.diff_files(def_file, "simple01-td.defok")
# source helpers.tcl
# set test_name simple01-td
# read_liberty ./library/nangate45/NangateOpenCellLibrary_typical.lib
# read_lef ./nangate45.lef
# read_def ./$test_name.def
# create_clock -name core_clock -period 2 clk
# set_wire_rc -signal -layer metal3
# set_wire_rc -clock -layer metal5
# global_placement -timing_driven
# # check reported wns
# estimate_parasitics -placement
# report_worst_slack
# set def_file [make_result_file $test_name.def]
# write_def $def_file
# diff_file $def_file $test_name.defok
|
from openroad import Design, Tech
import helpers
import gpl_aux
tech = Tech()
tech.readLiberty("./library/nangate45/NangateOpenCellLibrary_typical.lib")
tech.readLef("./nangate45.lef")
design = Design(tech)
design.readDef("./simple01-td.def")
design.evalTclString("create_clock -name core_clock -period 2 clk")
design.evalTclString("set_wire_rc -signal -layer metal3")
design.evalTclString("set_wire_rc -clock -layer metal5")
gpl_aux.global_placement(design, timing_driven=True)
design.evalTclString("estimate_parasitics -placement")
design.evalTclString("report_worst_slack")
def_file = helpers.make_result_file("simple01-td.def")
design.writeDef(def_file)
helpers.diff_files(def_file, "simple01-td.defok")
|
Remove dead code from test
|
Remove dead code from test
Signed-off-by: Don MacMillen <[email protected]>
|
Python
|
bsd-3-clause
|
The-OpenROAD-Project/OpenROAD,The-OpenROAD-Project/OpenROAD,The-OpenROAD-Project/OpenROAD,The-OpenROAD-Project/OpenROAD,QuantamHD/OpenROAD,The-OpenROAD-Project/OpenROAD,QuantamHD/OpenROAD,QuantamHD/OpenROAD,QuantamHD/OpenROAD,QuantamHD/OpenROAD
|
python
|
## Code Before:
from openroad import Design, Tech
import helpers
import gpl_aux
tech = Tech()
tech.readLiberty("./library/nangate45/NangateOpenCellLibrary_typical.lib")
tech.readLef("./nangate45.lef")
design = Design(tech)
design.readDef("./simple01-td.def")
design.evalTclString("create_clock -name core_clock -period 2 clk")
design.evalTclString("set_wire_rc -signal -layer metal3")
design.evalTclString("set_wire_rc -clock -layer metal5")
gpl_aux.global_placement(design, timing_driven=True)
design.evalTclString("estimate_parasitics -placement")
design.evalTclString("report_worst_slack")
def_file = helpers.make_result_file("simple01-td.def")
design.writeDef(def_file)
helpers.diff_files(def_file, "simple01-td.defok")
# source helpers.tcl
# set test_name simple01-td
# read_liberty ./library/nangate45/NangateOpenCellLibrary_typical.lib
# read_lef ./nangate45.lef
# read_def ./$test_name.def
# create_clock -name core_clock -period 2 clk
# set_wire_rc -signal -layer metal3
# set_wire_rc -clock -layer metal5
# global_placement -timing_driven
# # check reported wns
# estimate_parasitics -placement
# report_worst_slack
# set def_file [make_result_file $test_name.def]
# write_def $def_file
# diff_file $def_file $test_name.defok
## Instruction:
Remove dead code from test
Signed-off-by: Don MacMillen <[email protected]>
## Code After:
from openroad import Design, Tech
import helpers
import gpl_aux
tech = Tech()
tech.readLiberty("./library/nangate45/NangateOpenCellLibrary_typical.lib")
tech.readLef("./nangate45.lef")
design = Design(tech)
design.readDef("./simple01-td.def")
design.evalTclString("create_clock -name core_clock -period 2 clk")
design.evalTclString("set_wire_rc -signal -layer metal3")
design.evalTclString("set_wire_rc -clock -layer metal5")
gpl_aux.global_placement(design, timing_driven=True)
design.evalTclString("estimate_parasitics -placement")
design.evalTclString("report_worst_slack")
def_file = helpers.make_result_file("simple01-td.def")
design.writeDef(def_file)
helpers.diff_files(def_file, "simple01-td.defok")
|
# ... existing code ...
def_file = helpers.make_result_file("simple01-td.def")
design.writeDef(def_file)
helpers.diff_files(def_file, "simple01-td.defok")
# ... rest of the code ...
|
e40985c1ecba1529987ed9551210677ea93b9614
|
test/unit/builtins/test_install.py
|
test/unit/builtins/test_install.py
|
from .common import BuiltinTest
from bfg9000.builtins import default, install # noqa
from bfg9000 import file_types
from bfg9000.path import Path, Root
class TestInstall(BuiltinTest):
def test_install_none(self):
self.assertEqual(self.builtin_dict['install'](), None)
def test_install_single(self):
exe = file_types.Executable(Path('exe', Root.srcdir), None)
self.assertEqual(self.builtin_dict['install'](exe), exe)
def test_install_multiple(self):
exe1 = file_types.Executable(Path('exe1', Root.srcdir), None)
exe2 = file_types.Executable(Path('exe2', Root.srcdir), None)
self.assertEqual(self.builtin_dict['install'](exe1, exe2),
(exe1, exe2))
def test_invalid(self):
phony = file_types.Phony('name')
self.assertRaises(TypeError, self.builtin_dict['install'], phony)
exe = file_types.Executable(Path('/path/to/exe', Root.absolute), None)
self.assertRaises(ValueError, self.builtin_dict['install'], exe)
|
import mock
from .common import BuiltinTest
from bfg9000.builtins import default, install # noqa
from bfg9000 import file_types
from bfg9000.path import Path, Root
class TestInstall(BuiltinTest):
def test_install_none(self):
self.assertEqual(self.builtin_dict['install'](), None)
def test_install_single(self):
exe = file_types.Executable(Path('exe', Root.srcdir), None)
self.assertEqual(self.builtin_dict['install'](exe), exe)
def test_install_multiple(self):
exe1 = file_types.Executable(Path('exe1', Root.srcdir), None)
exe2 = file_types.Executable(Path('exe2', Root.srcdir), None)
self.assertEqual(self.builtin_dict['install'](exe1, exe2),
(exe1, exe2))
def test_invalid(self):
phony = file_types.Phony('name')
self.assertRaises(TypeError, self.builtin_dict['install'], phony)
exe = file_types.Executable(Path('/path/to/exe', Root.absolute), None)
self.assertRaises(ValueError, self.builtin_dict['install'], exe)
def test_cant_install(self):
with mock.patch('bfg9000.builtins.install.can_install',
return_value=False), \
mock.patch('warnings.warn') as m: # noqa
exe = file_types.Executable(Path('exe', Root.srcdir), None)
self.assertEqual(self.builtin_dict['install'](exe), exe)
m.assert_called_once()
|
Add tests for unset installation dirs
|
Add tests for unset installation dirs
|
Python
|
bsd-3-clause
|
jimporter/bfg9000,jimporter/bfg9000,jimporter/bfg9000,jimporter/bfg9000
|
python
|
## Code Before:
from .common import BuiltinTest
from bfg9000.builtins import default, install # noqa
from bfg9000 import file_types
from bfg9000.path import Path, Root
class TestInstall(BuiltinTest):
def test_install_none(self):
self.assertEqual(self.builtin_dict['install'](), None)
def test_install_single(self):
exe = file_types.Executable(Path('exe', Root.srcdir), None)
self.assertEqual(self.builtin_dict['install'](exe), exe)
def test_install_multiple(self):
exe1 = file_types.Executable(Path('exe1', Root.srcdir), None)
exe2 = file_types.Executable(Path('exe2', Root.srcdir), None)
self.assertEqual(self.builtin_dict['install'](exe1, exe2),
(exe1, exe2))
def test_invalid(self):
phony = file_types.Phony('name')
self.assertRaises(TypeError, self.builtin_dict['install'], phony)
exe = file_types.Executable(Path('/path/to/exe', Root.absolute), None)
self.assertRaises(ValueError, self.builtin_dict['install'], exe)
## Instruction:
Add tests for unset installation dirs
## Code After:
import mock
from .common import BuiltinTest
from bfg9000.builtins import default, install # noqa
from bfg9000 import file_types
from bfg9000.path import Path, Root
class TestInstall(BuiltinTest):
def test_install_none(self):
self.assertEqual(self.builtin_dict['install'](), None)
def test_install_single(self):
exe = file_types.Executable(Path('exe', Root.srcdir), None)
self.assertEqual(self.builtin_dict['install'](exe), exe)
def test_install_multiple(self):
exe1 = file_types.Executable(Path('exe1', Root.srcdir), None)
exe2 = file_types.Executable(Path('exe2', Root.srcdir), None)
self.assertEqual(self.builtin_dict['install'](exe1, exe2),
(exe1, exe2))
def test_invalid(self):
phony = file_types.Phony('name')
self.assertRaises(TypeError, self.builtin_dict['install'], phony)
exe = file_types.Executable(Path('/path/to/exe', Root.absolute), None)
self.assertRaises(ValueError, self.builtin_dict['install'], exe)
def test_cant_install(self):
with mock.patch('bfg9000.builtins.install.can_install',
return_value=False), \
mock.patch('warnings.warn') as m: # noqa
exe = file_types.Executable(Path('exe', Root.srcdir), None)
self.assertEqual(self.builtin_dict['install'](exe), exe)
m.assert_called_once()
|
// ... existing code ...
import mock
from .common import BuiltinTest
from bfg9000.builtins import default, install # noqa
// ... modified code ...
exe = file_types.Executable(Path('/path/to/exe', Root.absolute), None)
self.assertRaises(ValueError, self.builtin_dict['install'], exe)
def test_cant_install(self):
with mock.patch('bfg9000.builtins.install.can_install',
return_value=False), \
mock.patch('warnings.warn') as m: # noqa
exe = file_types.Executable(Path('exe', Root.srcdir), None)
self.assertEqual(self.builtin_dict['install'](exe), exe)
m.assert_called_once()
// ... rest of the code ...
|
d54854c11094e0ca8598e59de0bc0795dc8143c9
|
lib/recordclass/__init__.py
|
lib/recordclass/__init__.py
|
from .memoryslots import memoryslots, itemgetset
from .record import recordclass
__version__ = '0.4.2'
|
from .memoryslots import memoryslots, itemgetset
from .record import recordclass, RecordClass
__version__ = '0.4.2'
|
Add import of RecordClass to init
|
Add import of RecordClass to init
--HG--
branch : typing
|
Python
|
mit
|
vovanbo/trafaretrecord,vovanbo/trafaretrecord
|
python
|
## Code Before:
from .memoryslots import memoryslots, itemgetset
from .record import recordclass
__version__ = '0.4.2'
## Instruction:
Add import of RecordClass to init
--HG--
branch : typing
## Code After:
from .memoryslots import memoryslots, itemgetset
from .record import recordclass, RecordClass
__version__ = '0.4.2'
|
# ... existing code ...
from .memoryslots import memoryslots, itemgetset
from .record import recordclass, RecordClass
__version__ = '0.4.2'
# ... rest of the code ...
|
37bb334a1c59920d92649b0cedddf62863bf6da8
|
scipy/weave/tests/test_inline_tools.py
|
scipy/weave/tests/test_inline_tools.py
|
from numpy import *
from numpy.testing import *
from scipy.weave import inline_tools
class TestInline(TestCase):
""" These are long running tests...
I'd like to benchmark these things somehow.
"""
@dec.slow
def test_exceptions(self):
a = 3
code = """
if (a < 2)
throw_error(PyExc_ValueError,
"the variable 'a' should not be less than 2");
else
return_val = PyInt_FromLong(a+1);
"""
result = inline_tools.inline(code,['a'])
assert(result == 4)
try:
a = 1
result = inline_tools.inline(code,['a'])
assert(1) # should've thrown a ValueError
except ValueError:
pass
from distutils.errors import DistutilsError, CompileError
try:
a = 'string'
result = inline_tools.inline(code,['a'])
assert(1) # should've gotten an error
except:
# ?CompileError is the error reported, but catching it doesn't work
pass
if __name__ == "__main__":
nose.run(argv=['', __file__])
|
from numpy import *
from numpy.testing import *
from scipy.weave import inline_tools
class TestInline(TestCase):
""" These are long running tests...
I'd like to benchmark these things somehow.
"""
@dec.slow
def test_exceptions(self):
a = 3
code = """
if (a < 2)
throw_error(PyExc_ValueError,
"the variable 'a' should not be less than 2");
else
return_val = PyInt_FromLong(a+1);
"""
result = inline_tools.inline(code,['a'])
assert(result == 4)
## Unfortunately, it is not always possible to catch distutils compiler
## errors, since SystemExit is used. Until that is fixed, these tests
## cannot be run in the same process as the test suite.
## try:
## a = 1
## result = inline_tools.inline(code,['a'])
## assert(1) # should've thrown a ValueError
## except ValueError:
## pass
## from distutils.errors import DistutilsError, CompileError
## try:
## a = 'string'
## result = inline_tools.inline(code,['a'])
## assert(1) # should've gotten an error
## except:
## # ?CompileError is the error reported, but catching it doesn't work
## pass
if __name__ == "__main__":
nose.run(argv=['', __file__])
|
Disable weave tests that cause compilation failure, since this causes distutils to do a SystemExit, which break the test suite.
|
Disable weave tests that cause compilation failure, since this causes
distutils to do a SystemExit, which break the test suite.
|
Python
|
bsd-3-clause
|
kalvdans/scipy,chatcannon/scipy,trankmichael/scipy,ilayn/scipy,jsilter/scipy,tylerjereddy/scipy,mikebenfield/scipy,minhlongdo/scipy,trankmichael/scipy,WillieMaddox/scipy,piyush0609/scipy,surhudm/scipy,Dapid/scipy,chatcannon/scipy,grlee77/scipy,vanpact/scipy,Kamp9/scipy,Srisai85/scipy,ilayn/scipy,dch312/scipy,chatcannon/scipy,kleskjr/scipy,gef756/scipy,ndchorley/scipy,Shaswat27/scipy,woodscn/scipy,jjhelmus/scipy,cpaulik/scipy,anielsen001/scipy,mortada/scipy,njwilson23/scipy,lukauskas/scipy,anntzer/scipy,zerothi/scipy,aman-iitj/scipy,ChanderG/scipy,befelix/scipy,vhaasteren/scipy,arokem/scipy,pizzathief/scipy,trankmichael/scipy,sriki18/scipy,maniteja123/scipy,ChanderG/scipy,pyramania/scipy,sargas/scipy,e-q/scipy,rgommers/scipy,jakevdp/scipy,Dapid/scipy,maciejkula/scipy,gdooper/scipy,Newman101/scipy,ChanderG/scipy,larsmans/scipy,giorgiop/scipy,petebachant/scipy,WillieMaddox/scipy,jonycgn/scipy,WarrenWeckesser/scipy,fernand/scipy,kalvdans/scipy,Eric89GXL/scipy,perimosocordiae/scipy,pizzathief/scipy,sonnyhu/scipy,jonycgn/scipy,apbard/scipy,aeklant/scipy,sonnyhu/scipy,mhogg/scipy,niknow/scipy,anielsen001/scipy,matthew-brett/scipy,endolith/scipy,andim/scipy,ortylp/scipy,vberaudi/scipy,futurulus/scipy,jsilter/scipy,mortada/scipy,sauliusl/scipy,vberaudi/scipy,jjhelmus/scipy,felipebetancur/scipy,pnedunuri/scipy,Kamp9/scipy,ales-erjavec/scipy,zxsted/scipy,sriki18/scipy,vberaudi/scipy,FRidh/scipy,sargas/scipy,pnedunuri/scipy,mgaitan/scipy,haudren/scipy,hainm/scipy,tylerjereddy/scipy,nonhermitian/scipy,pbrod/scipy,gertingold/scipy,teoliphant/scipy,vhaasteren/scipy,kleskjr/scipy,woodscn/scipy,jor-/scipy,newemailjdm/scipy,grlee77/scipy,hainm/scipy,chatcannon/scipy,gdooper/scipy,sonnyhu/scipy,dominicelse/scipy,fernand/scipy,mhogg/scipy,nonhermitian/scipy,sonnyhu/scipy,rgommers/scipy,FRidh/scipy,piyush0609/scipy,cpaulik/scipy,cpaulik/scipy,arokem/scipy,gef756/scipy,bkendzior/scipy,vanpact/scipy,njwilson23/scipy,ogrisel/scipy,raoulbq/scipy,vanpact/scipy,mortonjt/scipy,mgaitan/scipy,jjhelmus/scipy,ilayn/scipy,pyramania/scipy,matthew-brett/scipy,surhudm/scipy,hainm/scipy,FRidh/scipy,jonycgn/scipy,endolith/scipy,behzadnouri/scipy,richardotis/scipy,e-q/scipy,gdooper/scipy,jseabold/scipy,pizzathief/scipy,WillieMaddox/scipy,aarchiba/scipy,dch312/scipy,futurulus/scipy,lukauskas/scipy,pyramania/scipy,aeklant/scipy,giorgiop/scipy,felipebetancur/scipy,gef756/scipy,pnedunuri/scipy,jamestwebber/scipy,mortonjt/scipy,witcxc/scipy,Eric89GXL/scipy,mtrbean/scipy,andyfaff/scipy,argriffing/scipy,fernand/scipy,fredrikw/scipy,haudren/scipy,surhudm/scipy,Shaswat27/scipy,kleskjr/scipy,person142/scipy,jor-/scipy,aeklant/scipy,perimosocordiae/scipy,newemailjdm/scipy,juliantaylor/scipy,Shaswat27/scipy,maniteja123/scipy,surhudm/scipy,mingwpy/scipy,zxsted/scipy,jamestwebber/scipy,perimosocordiae/scipy,ilayn/scipy,argriffing/scipy,zxsted/scipy,anielsen001/scipy,fredrikw/scipy,felipebetancur/scipy,Kamp9/scipy,gdooper/scipy,petebachant/scipy,chatcannon/scipy,Dapid/scipy,behzadnouri/scipy,witcxc/scipy,teoliphant/scipy,vigna/scipy,FRidh/scipy,scipy/scipy,josephcslater/scipy,zerothi/scipy,efiring/scipy,raoulbq/scipy,rmcgibbo/scipy,mtrbean/scipy,nonhermitian/scipy,chatcannon/scipy,anntzer/scipy,cpaulik/scipy,kleskjr/scipy,pnedunuri/scipy,mhogg/scipy,juliantaylor/scipy,woodscn/scipy,befelix/scipy,jakevdp/scipy,zaxliu/scipy,maciejkula/scipy,scipy/scipy,petebachant/scipy,scipy/scipy,giorgiop/scipy,larsmans/scipy,nvoron23/scipy,Shaswat27/scipy,mtrbean/scipy,Newman101/scipy,lhilt/scipy,Gillu13/scipy,endolith/scipy,felipebetancur/scipy,efiring/scipy,raoulbq/scipy,aarchiba/scipy,ogrisel/scipy,trankmichael/scipy,niknow/scipy,richardotis/scipy,mortonjt/scipy,jonycgn/scipy,matthewalbani/scipy,Gillu13/scipy,WillieMaddox/scipy,nvoron23/scipy,zerothi/scipy,dominicelse/scipy,perimosocordiae/scipy,aman-iitj/scipy,kleskjr/scipy,vanpact/scipy,jor-/scipy,person142/scipy,befelix/scipy,Gillu13/scipy,vigna/scipy,aman-iitj/scipy,jjhelmus/scipy,pschella/scipy,argriffing/scipy,anielsen001/scipy,sauliusl/scipy,tylerjereddy/scipy,vanpact/scipy,aarchiba/scipy,Stefan-Endres/scipy,zxsted/scipy,Eric89GXL/scipy,pbrod/scipy,ogrisel/scipy,jsilter/scipy,scipy/scipy,vanpact/scipy,Newman101/scipy,sargas/scipy,rmcgibbo/scipy,vberaudi/scipy,jamestwebber/scipy,ndchorley/scipy,lukauskas/scipy,pnedunuri/scipy,larsmans/scipy,Kamp9/scipy,mtrbean/scipy,grlee77/scipy,andim/scipy,newemailjdm/scipy,gdooper/scipy,andim/scipy,FRidh/scipy,josephcslater/scipy,newemailjdm/scipy,Kamp9/scipy,haudren/scipy,anielsen001/scipy,jseabold/scipy,richardotis/scipy,arokem/scipy,gfyoung/scipy,anielsen001/scipy,vhaasteren/scipy,vhaasteren/scipy,dominicelse/scipy,ChanderG/scipy,nmayorov/scipy,tylerjereddy/scipy,matthew-brett/scipy,jseabold/scipy,mingwpy/scipy,apbard/scipy,Stefan-Endres/scipy,person142/scipy,Kamp9/scipy,jonycgn/scipy,arokem/scipy,mdhaber/scipy,witcxc/scipy,jsilter/scipy,hainm/scipy,hainm/scipy,mgaitan/scipy,pyramania/scipy,sonnyhu/scipy,WarrenWeckesser/scipy,perimosocordiae/scipy,ogrisel/scipy,mikebenfield/scipy,ndchorley/scipy,maniteja123/scipy,pbrod/scipy,mortonjt/scipy,andyfaff/scipy,Gillu13/scipy,matthewalbani/scipy,rmcgibbo/scipy,ChanderG/scipy,mgaitan/scipy,argriffing/scipy,zerothi/scipy,mdhaber/scipy,Stefan-Endres/scipy,richardotis/scipy,cpaulik/scipy,fredrikw/scipy,piyush0609/scipy,fredrikw/scipy,Gillu13/scipy,trankmichael/scipy,niknow/scipy,andyfaff/scipy,WillieMaddox/scipy,lukauskas/scipy,nvoron23/scipy,sriki18/scipy,minhlongdo/scipy,aeklant/scipy,e-q/scipy,rmcgibbo/scipy,pschella/scipy,ales-erjavec/scipy,lhilt/scipy,mdhaber/scipy,mingwpy/scipy,witcxc/scipy,niknow/scipy,piyush0609/scipy,anntzer/scipy,mortonjt/scipy,kalvdans/scipy,jor-/scipy,sauliusl/scipy,aman-iitj/scipy,jjhelmus/scipy,ortylp/scipy,behzadnouri/scipy,surhudm/scipy,njwilson23/scipy,mdhaber/scipy,lhilt/scipy,teoliphant/scipy,gertingold/scipy,gfyoung/scipy,pizzathief/scipy,josephcslater/scipy,dominicelse/scipy,anntzer/scipy,giorgiop/scipy,andim/scipy,andyfaff/scipy,mortada/scipy,e-q/scipy,vhaasteren/scipy,aarchiba/scipy,teoliphant/scipy,Eric89GXL/scipy,maniteja123/scipy,dch312/scipy,newemailjdm/scipy,efiring/scipy,gef756/scipy,lukauskas/scipy,zaxliu/scipy,Shaswat27/scipy,zaxliu/scipy,Dapid/scipy,surhudm/scipy,minhlongdo/scipy,ales-erjavec/scipy,sonnyhu/scipy,endolith/scipy,mtrbean/scipy,rmcgibbo/scipy,lukauskas/scipy,dominicelse/scipy,behzadnouri/scipy,Shaswat27/scipy,richardotis/scipy,endolith/scipy,Srisai85/scipy,argriffing/scipy,rgommers/scipy,ogrisel/scipy,woodscn/scipy,behzadnouri/scipy,mingwpy/scipy,aman-iitj/scipy,mhogg/scipy,jsilter/scipy,mingwpy/scipy,jonycgn/scipy,Eric89GXL/scipy,mortada/scipy,dch312/scipy,giorgiop/scipy,sriki18/scipy,Srisai85/scipy,jseabold/scipy,sargas/scipy,nmayorov/scipy,Srisai85/scipy,mikebenfield/scipy,efiring/scipy,apbard/scipy,zaxliu/scipy,pizzathief/scipy,grlee77/scipy,pbrod/scipy,minhlongdo/scipy,matthewalbani/scipy,Eric89GXL/scipy,woodscn/scipy,jakevdp/scipy,gef756/scipy,hainm/scipy,WillieMaddox/scipy,teoliphant/scipy,juliantaylor/scipy,newemailjdm/scipy,behzadnouri/scipy,vberaudi/scipy,jakevdp/scipy,kalvdans/scipy,vigna/scipy,andyfaff/scipy,njwilson23/scipy,ortylp/scipy,argriffing/scipy,futurulus/scipy,WarrenWeckesser/scipy,maciejkula/scipy,mortada/scipy,richardotis/scipy,efiring/scipy,nvoron23/scipy,jakevdp/scipy,maciejkula/scipy,vigna/scipy,zaxliu/scipy,juliantaylor/scipy,maniteja123/scipy,ilayn/scipy,aeklant/scipy,befelix/scipy,nvoron23/scipy,Stefan-Endres/scipy,ortylp/scipy,mdhaber/scipy,njwilson23/scipy,Newman101/scipy,josephcslater/scipy,minhlongdo/scipy,sriki18/scipy,FRidh/scipy,Stefan-Endres/scipy,jor-/scipy,pnedunuri/scipy,WarrenWeckesser/scipy,kleskjr/scipy,befelix/scipy,sargas/scipy,sriki18/scipy,zerothi/scipy,gfyoung/scipy,tylerjereddy/scipy,gef756/scipy,Srisai85/scipy,mortada/scipy,ilayn/scipy,pschella/scipy,nmayorov/scipy,felipebetancur/scipy,nmayorov/scipy,mhogg/scipy,gertingold/scipy,matthewalbani/scipy,bkendzior/scipy,larsmans/scipy,arokem/scipy,jseabold/scipy,person142/scipy,bkendzior/scipy,futurulus/scipy,matthewalbani/scipy,anntzer/scipy,rgommers/scipy,mingwpy/scipy,Gillu13/scipy,larsmans/scipy,nonhermitian/scipy,vhaasteren/scipy,aman-iitj/scipy,larsmans/scipy,vberaudi/scipy,andim/scipy,Newman101/scipy,njwilson23/scipy,Srisai85/scipy,cpaulik/scipy,aarchiba/scipy,jseabold/scipy,haudren/scipy,WarrenWeckesser/scipy,kalvdans/scipy,ndchorley/scipy,zaxliu/scipy,gfyoung/scipy,fernand/scipy,mikebenfield/scipy,woodscn/scipy,maniteja123/scipy,person142/scipy,raoulbq/scipy,niknow/scipy,mgaitan/scipy,maciejkula/scipy,matthew-brett/scipy,Dapid/scipy,ndchorley/scipy,ales-erjavec/scipy,zerothi/scipy,endolith/scipy,fredrikw/scipy,vigna/scipy,zxsted/scipy,futurulus/scipy,gertingold/scipy,jamestwebber/scipy,anntzer/scipy,matthew-brett/scipy,perimosocordiae/scipy,efiring/scipy,andyfaff/scipy,pschella/scipy,pschella/scipy,sauliusl/scipy,fredrikw/scipy,gfyoung/scipy,petebachant/scipy,mikebenfield/scipy,rgommers/scipy,fernand/scipy,scipy/scipy,bkendzior/scipy,nonhermitian/scipy,fernand/scipy,zxsted/scipy,mgaitan/scipy,petebachant/scipy,mhogg/scipy,grlee77/scipy,mortonjt/scipy,futurulus/scipy,nvoron23/scipy,sauliusl/scipy,WarrenWeckesser/scipy,pyramania/scipy,bkendzior/scipy,lhilt/scipy,haudren/scipy,apbard/scipy,ales-erjavec/scipy,pbrod/scipy,ortylp/scipy,ChanderG/scipy,andim/scipy,Dapid/scipy,juliantaylor/scipy,apbard/scipy,ales-erjavec/scipy,lhilt/scipy,ndchorley/scipy,raoulbq/scipy,haudren/scipy,nmayorov/scipy,piyush0609/scipy,sauliusl/scipy,mdhaber/scipy,minhlongdo/scipy,piyush0609/scipy,Stefan-Endres/scipy,jamestwebber/scipy,witcxc/scipy,dch312/scipy,gertingold/scipy,raoulbq/scipy,felipebetancur/scipy,niknow/scipy,e-q/scipy,pbrod/scipy,josephcslater/scipy,ortylp/scipy,rmcgibbo/scipy,petebachant/scipy,scipy/scipy,trankmichael/scipy,Newman101/scipy,mtrbean/scipy,giorgiop/scipy
|
python
|
## Code Before:
from numpy import *
from numpy.testing import *
from scipy.weave import inline_tools
class TestInline(TestCase):
""" These are long running tests...
I'd like to benchmark these things somehow.
"""
@dec.slow
def test_exceptions(self):
a = 3
code = """
if (a < 2)
throw_error(PyExc_ValueError,
"the variable 'a' should not be less than 2");
else
return_val = PyInt_FromLong(a+1);
"""
result = inline_tools.inline(code,['a'])
assert(result == 4)
try:
a = 1
result = inline_tools.inline(code,['a'])
assert(1) # should've thrown a ValueError
except ValueError:
pass
from distutils.errors import DistutilsError, CompileError
try:
a = 'string'
result = inline_tools.inline(code,['a'])
assert(1) # should've gotten an error
except:
# ?CompileError is the error reported, but catching it doesn't work
pass
if __name__ == "__main__":
nose.run(argv=['', __file__])
## Instruction:
Disable weave tests that cause compilation failure, since this causes
distutils to do a SystemExit, which break the test suite.
## Code After:
from numpy import *
from numpy.testing import *
from scipy.weave import inline_tools
class TestInline(TestCase):
""" These are long running tests...
I'd like to benchmark these things somehow.
"""
@dec.slow
def test_exceptions(self):
a = 3
code = """
if (a < 2)
throw_error(PyExc_ValueError,
"the variable 'a' should not be less than 2");
else
return_val = PyInt_FromLong(a+1);
"""
result = inline_tools.inline(code,['a'])
assert(result == 4)
## Unfortunately, it is not always possible to catch distutils compiler
## errors, since SystemExit is used. Until that is fixed, these tests
## cannot be run in the same process as the test suite.
## try:
## a = 1
## result = inline_tools.inline(code,['a'])
## assert(1) # should've thrown a ValueError
## except ValueError:
## pass
## from distutils.errors import DistutilsError, CompileError
## try:
## a = 'string'
## result = inline_tools.inline(code,['a'])
## assert(1) # should've gotten an error
## except:
## # ?CompileError is the error reported, but catching it doesn't work
## pass
if __name__ == "__main__":
nose.run(argv=['', __file__])
|
...
result = inline_tools.inline(code,['a'])
assert(result == 4)
## Unfortunately, it is not always possible to catch distutils compiler
## errors, since SystemExit is used. Until that is fixed, these tests
## cannot be run in the same process as the test suite.
## try:
## a = 1
## result = inline_tools.inline(code,['a'])
## assert(1) # should've thrown a ValueError
## except ValueError:
## pass
## from distutils.errors import DistutilsError, CompileError
## try:
## a = 'string'
## result = inline_tools.inline(code,['a'])
## assert(1) # should've gotten an error
## except:
## # ?CompileError is the error reported, but catching it doesn't work
## pass
if __name__ == "__main__":
nose.run(argv=['', __file__])
...
|
3800c095f58e9bc2ca8c580537ea576049bbfe2d
|
sell/urls.py
|
sell/urls.py
|
from django.conf.urls import url
from sell import views
urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^personal/$', views.personal_data),
url(r'^books/$', views.books),
url(r'^summary/$', views.summary),
]
|
from django.conf.urls import url
from sell import views
urlpatterns = [
url(r'^$', views.index),
url(r'^personal/$', views.personal_data),
url(r'^books/$', views.books),
url(r'^summary/$', views.summary),
]
|
Remove unnecessary URL name in Sell app
|
Remove unnecessary URL name in Sell app
|
Python
|
agpl-3.0
|
m4tx/egielda,m4tx/egielda,m4tx/egielda
|
python
|
## Code Before:
from django.conf.urls import url
from sell import views
urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^personal/$', views.personal_data),
url(r'^books/$', views.books),
url(r'^summary/$', views.summary),
]
## Instruction:
Remove unnecessary URL name in Sell app
## Code After:
from django.conf.urls import url
from sell import views
urlpatterns = [
url(r'^$', views.index),
url(r'^personal/$', views.personal_data),
url(r'^books/$', views.books),
url(r'^summary/$', views.summary),
]
|
...
urlpatterns = [
url(r'^$', views.index),
url(r'^personal/$', views.personal_data),
url(r'^books/$', views.books),
url(r'^summary/$', views.summary),
...
|
cedac36d38ff0bf70abc1c9193948a288e858a01
|
kitsune/lib/pipeline_compilers.py
|
kitsune/lib/pipeline_compilers.py
|
import re
from django.conf import settings
from django.utils.encoding import smart_bytes
from pipeline.compilers import CompilerBase
from pipeline.exceptions import CompilerError
class BrowserifyCompiler(CompilerBase):
output_extension = 'browserified.js'
def match_file(self, path):
# Allow for cache busting hashes between ".browserify" and ".js"
return re.search(r'\.browserify(\.[a-fA-F0-9]+)?\.js$', path) is not None
def compile_file(self, infile, outfile, outdated=False, force=False):
command = "%s %s %s > %s" % (
getattr(settings, 'PIPELINE_BROWSERIFY_BINARY', '/usr/bin/env browserify'),
getattr(settings, 'PIPELINE_BROWSERIFY_ARGUMENTS', ''),
infile,
outfile
)
return self.execute_command(command)
def execute_command(self, command, content=None, cwd=None):
"""This is like the one in SubProcessCompiler, except it checks the exit code."""
import subprocess
pipe = subprocess.Popen(command, shell=True, cwd=cwd,
stdout=subprocess.PIPE, stdin=subprocess.PIPE,
stderr=subprocess.PIPE)
if content:
content = smart_bytes(content)
stdout, stderr = pipe.communicate(content)
if self.verbose:
print(stderr)
if pipe.returncode != 0:
raise CompilerError(stderr)
return stdout
|
import re
from django.conf import settings
from django.utils.encoding import smart_bytes
from pipeline.compilers import CompilerBase
from pipeline.exceptions import CompilerError
class BrowserifyCompiler(CompilerBase):
output_extension = 'browserified.js'
def match_file(self, path):
# Allow for cache busting hashes between ".browserify" and ".js"
return re.search(r'\.browserify(\.[a-fA-F0-9]+)?\.js$', path) is not None
def compile_file(self, infile, outfile, outdated=False, force=False):
pipeline_settings = getattr(settings, 'PIPELINE', {})
command = "%s %s %s > %s" % (
pipeline_settings.get('BROWSERIFY_BINARY', '/usr/bin/env browserify'),
pipeline_settings.get('BROWSERIFY_ARGUMENTS', ''),
infile,
outfile
)
return self.execute_command(command)
def execute_command(self, command, content=None, cwd=None):
"""This is like the one in SubProcessCompiler, except it checks the exit code."""
import subprocess
pipe = subprocess.Popen(command, shell=True, cwd=cwd,
stdout=subprocess.PIPE, stdin=subprocess.PIPE,
stderr=subprocess.PIPE)
if content:
content = smart_bytes(content)
stdout, stderr = pipe.communicate(content)
if self.verbose:
print(stderr)
if pipe.returncode != 0:
raise CompilerError(stderr)
return stdout
|
Update BrowserifyCompiler for n Pipeline settings.
|
Update BrowserifyCompiler for n Pipeline settings.
|
Python
|
bsd-3-clause
|
mythmon/kitsune,MikkCZ/kitsune,brittanystoroz/kitsune,anushbmx/kitsune,MikkCZ/kitsune,anushbmx/kitsune,safwanrahman/kitsune,brittanystoroz/kitsune,MikkCZ/kitsune,mozilla/kitsune,safwanrahman/kitsune,mythmon/kitsune,anushbmx/kitsune,mythmon/kitsune,safwanrahman/kitsune,mozilla/kitsune,brittanystoroz/kitsune,mythmon/kitsune,mozilla/kitsune,anushbmx/kitsune,MikkCZ/kitsune,mozilla/kitsune,brittanystoroz/kitsune,safwanrahman/kitsune
|
python
|
## Code Before:
import re
from django.conf import settings
from django.utils.encoding import smart_bytes
from pipeline.compilers import CompilerBase
from pipeline.exceptions import CompilerError
class BrowserifyCompiler(CompilerBase):
output_extension = 'browserified.js'
def match_file(self, path):
# Allow for cache busting hashes between ".browserify" and ".js"
return re.search(r'\.browserify(\.[a-fA-F0-9]+)?\.js$', path) is not None
def compile_file(self, infile, outfile, outdated=False, force=False):
command = "%s %s %s > %s" % (
getattr(settings, 'PIPELINE_BROWSERIFY_BINARY', '/usr/bin/env browserify'),
getattr(settings, 'PIPELINE_BROWSERIFY_ARGUMENTS', ''),
infile,
outfile
)
return self.execute_command(command)
def execute_command(self, command, content=None, cwd=None):
"""This is like the one in SubProcessCompiler, except it checks the exit code."""
import subprocess
pipe = subprocess.Popen(command, shell=True, cwd=cwd,
stdout=subprocess.PIPE, stdin=subprocess.PIPE,
stderr=subprocess.PIPE)
if content:
content = smart_bytes(content)
stdout, stderr = pipe.communicate(content)
if self.verbose:
print(stderr)
if pipe.returncode != 0:
raise CompilerError(stderr)
return stdout
## Instruction:
Update BrowserifyCompiler for n Pipeline settings.
## Code After:
import re
from django.conf import settings
from django.utils.encoding import smart_bytes
from pipeline.compilers import CompilerBase
from pipeline.exceptions import CompilerError
class BrowserifyCompiler(CompilerBase):
output_extension = 'browserified.js'
def match_file(self, path):
# Allow for cache busting hashes between ".browserify" and ".js"
return re.search(r'\.browserify(\.[a-fA-F0-9]+)?\.js$', path) is not None
def compile_file(self, infile, outfile, outdated=False, force=False):
pipeline_settings = getattr(settings, 'PIPELINE', {})
command = "%s %s %s > %s" % (
pipeline_settings.get('BROWSERIFY_BINARY', '/usr/bin/env browserify'),
pipeline_settings.get('BROWSERIFY_ARGUMENTS', ''),
infile,
outfile
)
return self.execute_command(command)
def execute_command(self, command, content=None, cwd=None):
"""This is like the one in SubProcessCompiler, except it checks the exit code."""
import subprocess
pipe = subprocess.Popen(command, shell=True, cwd=cwd,
stdout=subprocess.PIPE, stdin=subprocess.PIPE,
stderr=subprocess.PIPE)
if content:
content = smart_bytes(content)
stdout, stderr = pipe.communicate(content)
if self.verbose:
print(stderr)
if pipe.returncode != 0:
raise CompilerError(stderr)
return stdout
|
# ... existing code ...
return re.search(r'\.browserify(\.[a-fA-F0-9]+)?\.js$', path) is not None
def compile_file(self, infile, outfile, outdated=False, force=False):
pipeline_settings = getattr(settings, 'PIPELINE', {})
command = "%s %s %s > %s" % (
pipeline_settings.get('BROWSERIFY_BINARY', '/usr/bin/env browserify'),
pipeline_settings.get('BROWSERIFY_ARGUMENTS', ''),
infile,
outfile
)
# ... rest of the code ...
|
6daf30e81489f72c92c53aabed40ed0870ed6c9b
|
src/data_test/java/info/u_team/u_team_test/data/provider/TestItemModelsProvider.java
|
src/data_test/java/info/u_team/u_team_test/data/provider/TestItemModelsProvider.java
|
package info.u_team.u_team_test.data.provider;
import info.u_team.u_team_core.data.*;
import info.u_team.u_team_test.init.*;
public class TestItemModelsProvider extends CommonItemModelsProvider {
public TestItemModelsProvider(GenerationData data) {
super(data);
}
@Override
protected void registerModels() {
// Items
simpleGenerated(TestItems.BASIC.get());
simpleGenerated(TestItems.BASIC_FOOD.get());
simpleGenerated(TestItems.BETTER_ENDERPEARL.get());
iterateItems(TestItems.BASIC_TOOL, this::simpleHandheld);
iterateItems(TestItems.BASIC_ARMOR, this::simpleGenerated);
// Blocks
simpleBlock(TestBlocks.BASIC.get());
simpleBlock(TestBlocks.BASIC_TILEENTITY.get());
simpleBlock(TestBlocks.BASIC_ENERGY_CREATOR.get());
simpleBlock(TestBlocks.BASIC_FLUID_INVENTORY.get());
}
}
|
package info.u_team.u_team_test.data.provider;
import info.u_team.u_team_core.data.*;
import info.u_team.u_team_test.init.*;
public class TestItemModelsProvider extends CommonItemModelsProvider {
public TestItemModelsProvider(GenerationData data) {
super(data);
}
@Override
protected void registerModels() {
// Items
simpleGenerated(TestItems.BASIC.get());
simpleGenerated(TestItems.BASIC_FOOD.get());
simpleGenerated(TestItems.BETTER_ENDERPEARL.get());
iterateItems(TestItems.BASIC_TOOL, this::simpleHandheld);
iterateItems(TestItems.BASIC_ARMOR, this::simpleGenerated);
spawnEgg(TestItems.TEST_LIVING_SPAWN_EGG.get());
// Blocks
simpleBlock(TestBlocks.BASIC.get());
simpleBlock(TestBlocks.BASIC_TILEENTITY.get());
simpleBlock(TestBlocks.BASIC_ENERGY_CREATOR.get());
simpleBlock(TestBlocks.BASIC_FLUID_INVENTORY.get());
}
}
|
Add generation for spawn egg
|
Add generation for spawn egg
|
Java
|
apache-2.0
|
MC-U-Team/U-Team-Core,MC-U-Team/U-Team-Core
|
java
|
## Code Before:
package info.u_team.u_team_test.data.provider;
import info.u_team.u_team_core.data.*;
import info.u_team.u_team_test.init.*;
public class TestItemModelsProvider extends CommonItemModelsProvider {
public TestItemModelsProvider(GenerationData data) {
super(data);
}
@Override
protected void registerModels() {
// Items
simpleGenerated(TestItems.BASIC.get());
simpleGenerated(TestItems.BASIC_FOOD.get());
simpleGenerated(TestItems.BETTER_ENDERPEARL.get());
iterateItems(TestItems.BASIC_TOOL, this::simpleHandheld);
iterateItems(TestItems.BASIC_ARMOR, this::simpleGenerated);
// Blocks
simpleBlock(TestBlocks.BASIC.get());
simpleBlock(TestBlocks.BASIC_TILEENTITY.get());
simpleBlock(TestBlocks.BASIC_ENERGY_CREATOR.get());
simpleBlock(TestBlocks.BASIC_FLUID_INVENTORY.get());
}
}
## Instruction:
Add generation for spawn egg
## Code After:
package info.u_team.u_team_test.data.provider;
import info.u_team.u_team_core.data.*;
import info.u_team.u_team_test.init.*;
public class TestItemModelsProvider extends CommonItemModelsProvider {
public TestItemModelsProvider(GenerationData data) {
super(data);
}
@Override
protected void registerModels() {
// Items
simpleGenerated(TestItems.BASIC.get());
simpleGenerated(TestItems.BASIC_FOOD.get());
simpleGenerated(TestItems.BETTER_ENDERPEARL.get());
iterateItems(TestItems.BASIC_TOOL, this::simpleHandheld);
iterateItems(TestItems.BASIC_ARMOR, this::simpleGenerated);
spawnEgg(TestItems.TEST_LIVING_SPAWN_EGG.get());
// Blocks
simpleBlock(TestBlocks.BASIC.get());
simpleBlock(TestBlocks.BASIC_TILEENTITY.get());
simpleBlock(TestBlocks.BASIC_ENERGY_CREATOR.get());
simpleBlock(TestBlocks.BASIC_FLUID_INVENTORY.get());
}
}
|
# ... existing code ...
simpleGenerated(TestItems.BETTER_ENDERPEARL.get());
iterateItems(TestItems.BASIC_TOOL, this::simpleHandheld);
iterateItems(TestItems.BASIC_ARMOR, this::simpleGenerated);
spawnEgg(TestItems.TEST_LIVING_SPAWN_EGG.get());
// Blocks
simpleBlock(TestBlocks.BASIC.get());
simpleBlock(TestBlocks.BASIC_TILEENTITY.get());
# ... rest of the code ...
|
bea3f4327322f351b53ae3ce2e063e3bf6fdeec1
|
subprojects/playpens/java-playpen/ehcache/ehcache-playpen/src/test/java/com/fenixinfotech/ehcache/playpen/CustomEhCacheTest.java
|
subprojects/playpens/java-playpen/ehcache/ehcache-playpen/src/test/java/com/fenixinfotech/ehcache/playpen/CustomEhCacheTest.java
|
package com.fenixinfotech.ehcache.playpen;
import net.sf.ehcache.CacheManager;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
public class CustomEhCacheTest
{
private static final Logger logger = LoggerFactory.getLogger(CustomEhCacheTest.class);
@Test
public void testCache()
{
String testCacheName = "testCache";
int maxCacheEntries = 10;
CustomEhCache customEhCache = new CustomEhCache();
// Store some data
for (int i=1; i<=maxCacheEntries; i++)
{
customEhCache.putInCache(testCacheName, i, String.format("value %d", i));
}
// Check cache content
for (int i=1; i<=maxCacheEntries; i++)
{
Object cachedValue = customEhCache.getFromCache(testCacheName, i);
assertNotNull(cachedValue);
assertEquals(String.format("value %d", i), cachedValue);
}
}
}
|
package com.fenixinfotech.ehcache.playpen;
import net.sf.ehcache.CacheManager;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
public class CustomEhCacheTest
{
private static final Logger logger = LoggerFactory.getLogger(CustomEhCacheTest.class);
@Test
public void testCache()
{
assertTrue(false);
String testCacheName = "testCache";
int maxCacheEntries = 10;
CustomEhCache customEhCache = new CustomEhCache();
// Store some data
for (int i=1; i<=maxCacheEntries; i++)
{
customEhCache.putInCache(testCacheName, i, String.format("value %d", i));
}
// Check cache content
for (int i=1; i<=maxCacheEntries; i++)
{
Object cachedValue = customEhCache.getFromCache(testCacheName, i);
assertNotNull(cachedValue);
assertEquals(String.format("value %d", i), cachedValue);
}
}
}
|
Test checking to deliberately break the build
|
Test checking to deliberately break the build
|
Java
|
mit
|
stevocurtis/public-development,stevocurtis/public-development,stevocurtis/public-development,stevocurtis/public-development,stevocurtis/public-development,stevocurtis/public-development
|
java
|
## Code Before:
package com.fenixinfotech.ehcache.playpen;
import net.sf.ehcache.CacheManager;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
public class CustomEhCacheTest
{
private static final Logger logger = LoggerFactory.getLogger(CustomEhCacheTest.class);
@Test
public void testCache()
{
String testCacheName = "testCache";
int maxCacheEntries = 10;
CustomEhCache customEhCache = new CustomEhCache();
// Store some data
for (int i=1; i<=maxCacheEntries; i++)
{
customEhCache.putInCache(testCacheName, i, String.format("value %d", i));
}
// Check cache content
for (int i=1; i<=maxCacheEntries; i++)
{
Object cachedValue = customEhCache.getFromCache(testCacheName, i);
assertNotNull(cachedValue);
assertEquals(String.format("value %d", i), cachedValue);
}
}
}
## Instruction:
Test checking to deliberately break the build
## Code After:
package com.fenixinfotech.ehcache.playpen;
import net.sf.ehcache.CacheManager;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
public class CustomEhCacheTest
{
private static final Logger logger = LoggerFactory.getLogger(CustomEhCacheTest.class);
@Test
public void testCache()
{
assertTrue(false);
String testCacheName = "testCache";
int maxCacheEntries = 10;
CustomEhCache customEhCache = new CustomEhCache();
// Store some data
for (int i=1; i<=maxCacheEntries; i++)
{
customEhCache.putInCache(testCacheName, i, String.format("value %d", i));
}
// Check cache content
for (int i=1; i<=maxCacheEntries; i++)
{
Object cachedValue = customEhCache.getFromCache(testCacheName, i);
assertNotNull(cachedValue);
assertEquals(String.format("value %d", i), cachedValue);
}
}
}
|
# ... existing code ...
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
public class CustomEhCacheTest
{
# ... modified code ...
@Test
public void testCache()
{
assertTrue(false);
String testCacheName = "testCache";
int maxCacheEntries = 10;
# ... rest of the code ...
|
06e858fc86f8f34ccae521cb269c959569f53f97
|
script/sample/submitpython.py
|
script/sample/submitpython.py
|
from __future__ import print_function
import multyvac
multyvac.config.set_key(api_key='admin', api_secret_key='12345', api_url='http://docker:8000/v1')
def add(a, b):
return a + b
jid = multyvac.submit(add, 3, 4)
result = multyvac.get(jid).get_result()
print("result = {}".format(result))
|
from __future__ import print_function
import multyvac
import os
# Grab from the CLOUDPIPE_URL environment variable, otherwise assume they have
# /etc/hosts configured to point to their docker
api_url = os.environ.get('CLOUDPIPE_URL', 'http://docker:8000/v1')
multyvac.config.set_key(api_key='admin', api_secret_key='12345', api_url=api_url)
def add(a, b):
return a + b
jid = multyvac.submit(add, 3, 4)
result = multyvac.get(jid).get_result()
print("added {} and {} to get {}... in the cloud!".format(3,4,result))
|
Allow api_url in the script to be configurable
|
Allow api_url in the script to be configurable
|
Python
|
bsd-3-clause
|
cloudpipe/cloudpipe,cloudpipe/cloudpipe,cloudpipe/cloudpipe
|
python
|
## Code Before:
from __future__ import print_function
import multyvac
multyvac.config.set_key(api_key='admin', api_secret_key='12345', api_url='http://docker:8000/v1')
def add(a, b):
return a + b
jid = multyvac.submit(add, 3, 4)
result = multyvac.get(jid).get_result()
print("result = {}".format(result))
## Instruction:
Allow api_url in the script to be configurable
## Code After:
from __future__ import print_function
import multyvac
import os
# Grab from the CLOUDPIPE_URL environment variable, otherwise assume they have
# /etc/hosts configured to point to their docker
api_url = os.environ.get('CLOUDPIPE_URL', 'http://docker:8000/v1')
multyvac.config.set_key(api_key='admin', api_secret_key='12345', api_url=api_url)
def add(a, b):
return a + b
jid = multyvac.submit(add, 3, 4)
result = multyvac.get(jid).get_result()
print("added {} and {} to get {}... in the cloud!".format(3,4,result))
|
# ... existing code ...
import multyvac
import os
# Grab from the CLOUDPIPE_URL environment variable, otherwise assume they have
# /etc/hosts configured to point to their docker
api_url = os.environ.get('CLOUDPIPE_URL', 'http://docker:8000/v1')
multyvac.config.set_key(api_key='admin', api_secret_key='12345', api_url=api_url)
def add(a, b):
return a + b
# ... modified code ...
jid = multyvac.submit(add, 3, 4)
result = multyvac.get(jid).get_result()
print("added {} and {} to get {}... in the cloud!".format(3,4,result))
# ... rest of the code ...
|
02e86ccfe9fc04afbd5275d4b2f27881d61e3ced
|
ServerMonitoringService/ServerMonitoringService/SystemMemory.h
|
ServerMonitoringService/ServerMonitoringService/SystemMemory.h
|
class SystemMemory
{
private:
MEMORYSTATUSEX memoryStat;
private:
int memoryCall();
public:
int getLoadPercent(int &val);
int getUsage(double &val);
int getTotalByte(DWORD &val);
int getFreeByte(DWORD &val);
};
#endif
|
typedef unsigned long DWORD;
class SystemMemory
{
private:
MEMORYSTATUSEX memoryStat;
private:
int memoryCall();
public:
int getLoadPercent(int &val);
int getUsage(double &val);
int getTotalByte(DWORD &val);
int getFreeByte(DWORD &val);
};
#endif
|
Define 'unsigned long' as 'DWORD' for cross-platform
|
Define 'unsigned long' as 'DWORD' for cross-platform
|
C
|
mit
|
bg0820/SMS,bg0820/SMS
|
c
|
## Code Before:
class SystemMemory
{
private:
MEMORYSTATUSEX memoryStat;
private:
int memoryCall();
public:
int getLoadPercent(int &val);
int getUsage(double &val);
int getTotalByte(DWORD &val);
int getFreeByte(DWORD &val);
};
#endif
## Instruction:
Define 'unsigned long' as 'DWORD' for cross-platform
## Code After:
typedef unsigned long DWORD;
class SystemMemory
{
private:
MEMORYSTATUSEX memoryStat;
private:
int memoryCall();
public:
int getLoadPercent(int &val);
int getUsage(double &val);
int getTotalByte(DWORD &val);
int getFreeByte(DWORD &val);
};
#endif
|
# ... existing code ...
typedef unsigned long DWORD;
class SystemMemory
{
# ... rest of the code ...
|
0cb4362412abfc45e019fc8e4bd3b1c551662bf7
|
iOS/PlayPlan/PublicViews/SideMenu.h
|
iOS/PlayPlan/PublicViews/SideMenu.h
|
//
// SideMenu.h
// PlayPlan
//
// Created by Zeacone on 15/11/19.
// Copyright © 2015年 Zeacone. All rights reserved.
//
#import <UIKit/UIKit.h>
#import "PlayPlan.h"
@interface SideMenu : UIView
@end
|
//
// SideMenu.h
// PlayPlan
//
// Created by Zeacone on 15/11/19.
// Copyright © 2015年 Zeacone. All rights reserved.
//
#import <UIKit/UIKit.h>
#import "PlayPlan.h"
@protocol SideMenuDelegate <NSObject>
- (void)sideMenu:(UIButton *)menu title:(NSString *)title;
@end
@interface SideMenu : UIView
@property (nonatomic, assign) id<SideMenuDelegate> delegate;
@end
|
Add delegate for side menu.
|
Add delegate for side menu.
|
C
|
mit
|
Zeacone/PlayPlan,Zeacone/PlayPlan
|
c
|
## Code Before:
//
// SideMenu.h
// PlayPlan
//
// Created by Zeacone on 15/11/19.
// Copyright © 2015年 Zeacone. All rights reserved.
//
#import <UIKit/UIKit.h>
#import "PlayPlan.h"
@interface SideMenu : UIView
@end
## Instruction:
Add delegate for side menu.
## Code After:
//
// SideMenu.h
// PlayPlan
//
// Created by Zeacone on 15/11/19.
// Copyright © 2015年 Zeacone. All rights reserved.
//
#import <UIKit/UIKit.h>
#import "PlayPlan.h"
@protocol SideMenuDelegate <NSObject>
- (void)sideMenu:(UIButton *)menu title:(NSString *)title;
@end
@interface SideMenu : UIView
@property (nonatomic, assign) id<SideMenuDelegate> delegate;
@end
|
...
#import <UIKit/UIKit.h>
#import "PlayPlan.h"
@protocol SideMenuDelegate <NSObject>
- (void)sideMenu:(UIButton *)menu title:(NSString *)title;
@end
@interface SideMenu : UIView
@property (nonatomic, assign) id<SideMenuDelegate> delegate;
@end
...
|
b45768b1321048d323fe762319a21e2da928142b
|
main.c
|
main.c
|
int main(void)
{
return 0;
}
|
short int CURRENT_STATE = SETUP_HANDLER;
void setupHandler() {
// Hardware definitions
// Setup next state
}
void loop() {
while(1) {
switch(CURRENT_STATE) {
case SETUP_HANDLER: setupHandler(); break;
}
}
}
int main(void) {
loop();
return 0;
}
|
Add base `state machine` from the scratch
|
feature: Add base `state machine` from the scratch
|
C
|
mit
|
marceloboeira/miller-urey
|
c
|
## Code Before:
int main(void)
{
return 0;
}
## Instruction:
feature: Add base `state machine` from the scratch
## Code After:
short int CURRENT_STATE = SETUP_HANDLER;
void setupHandler() {
// Hardware definitions
// Setup next state
}
void loop() {
while(1) {
switch(CURRENT_STATE) {
case SETUP_HANDLER: setupHandler(); break;
}
}
}
int main(void) {
loop();
return 0;
}
|
# ... existing code ...
short int CURRENT_STATE = SETUP_HANDLER;
void setupHandler() {
// Hardware definitions
// Setup next state
}
void loop() {
while(1) {
switch(CURRENT_STATE) {
case SETUP_HANDLER: setupHandler(); break;
}
}
}
int main(void) {
loop();
return 0;
}
# ... rest of the code ...
|
7435d508ae95c69dcb596e74f62bfb030011201f
|
tests/general/test_required_folders.py
|
tests/general/test_required_folders.py
|
""" Test that the All Mail folder is enabled for Gmail. """
import pytest
from inbox.auth.gmail import GmailAuthHandler
from inbox.basicauth import GmailSettingError
from inbox.crispin import GmailCrispinClient
class AccountStub(object):
id = 0
email_address = '[email protected]'
access_token = None
imap_endpoint = None
def new_token(self):
return ('foo', 22)
def validate_token(self, new_token):
return True
class ConnectionStub(object):
def logout(self):
pass
def get_auth_handler(monkeypatch, folders):
g = GmailAuthHandler('gmail')
def mock_connect(a):
return ConnectionStub()
g.connect_account = mock_connect
monkeypatch.setattr(GmailCrispinClient, 'folder_names',
lambda x: folders)
return g
def test_all_mail_missing(monkeypatch):
"""
Test that validate_folders throws a GmailSettingError if All Mail
is not in the list of folders.
"""
g = get_auth_handler(monkeypatch, {'inbox': 'INBOX'})
with pytest.raises(GmailSettingError):
g.verify_account(AccountStub())
def test_all_mail_present(monkeypatch):
"""
Test that the validate_folders passes if All Mail is present.
"""
g = get_auth_handler(monkeypatch, {'all': 'ALL', 'inbox': 'INBOX',
'trash': 'TRASH'})
assert g.verify_account(AccountStub())
|
""" Test that the All Mail folder is enabled for Gmail. """
import pytest
from inbox.auth.gmail import GmailAuthHandler
from inbox.basicauth import GmailSettingError
from inbox.crispin import GmailCrispinClient
class AccountStub(object):
id = 0
email_address = '[email protected]'
access_token = None
imap_endpoint = None
sync_state = 'running'
def new_token(self):
return ('foo', 22)
def validate_token(self, new_token):
return True
class ConnectionStub(object):
def logout(self):
pass
def get_auth_handler(monkeypatch, folders):
g = GmailAuthHandler('gmail')
def mock_connect(a):
return ConnectionStub()
g.connect_account = mock_connect
monkeypatch.setattr(GmailCrispinClient, 'folder_names',
lambda x: folders)
return g
def test_all_mail_missing(monkeypatch):
"""
Test that validate_folders throws a GmailSettingError if All Mail
is not in the list of folders.
"""
g = get_auth_handler(monkeypatch, {'inbox': 'INBOX'})
with pytest.raises(GmailSettingError):
g.verify_account(AccountStub())
def test_all_mail_present(monkeypatch):
"""
Test that the validate_folders passes if All Mail is present.
"""
g = get_auth_handler(monkeypatch, {'all': 'ALL', 'inbox': 'INBOX',
'trash': 'TRASH'})
assert g.verify_account(AccountStub())
|
Update mock Account in tests.
|
Update mock Account in tests.
|
Python
|
agpl-3.0
|
jobscore/sync-engine,jobscore/sync-engine,nylas/sync-engine,closeio/nylas,jobscore/sync-engine,nylas/sync-engine,closeio/nylas,jobscore/sync-engine,closeio/nylas,nylas/sync-engine,nylas/sync-engine,closeio/nylas
|
python
|
## Code Before:
""" Test that the All Mail folder is enabled for Gmail. """
import pytest
from inbox.auth.gmail import GmailAuthHandler
from inbox.basicauth import GmailSettingError
from inbox.crispin import GmailCrispinClient
class AccountStub(object):
id = 0
email_address = '[email protected]'
access_token = None
imap_endpoint = None
def new_token(self):
return ('foo', 22)
def validate_token(self, new_token):
return True
class ConnectionStub(object):
def logout(self):
pass
def get_auth_handler(monkeypatch, folders):
g = GmailAuthHandler('gmail')
def mock_connect(a):
return ConnectionStub()
g.connect_account = mock_connect
monkeypatch.setattr(GmailCrispinClient, 'folder_names',
lambda x: folders)
return g
def test_all_mail_missing(monkeypatch):
"""
Test that validate_folders throws a GmailSettingError if All Mail
is not in the list of folders.
"""
g = get_auth_handler(monkeypatch, {'inbox': 'INBOX'})
with pytest.raises(GmailSettingError):
g.verify_account(AccountStub())
def test_all_mail_present(monkeypatch):
"""
Test that the validate_folders passes if All Mail is present.
"""
g = get_auth_handler(monkeypatch, {'all': 'ALL', 'inbox': 'INBOX',
'trash': 'TRASH'})
assert g.verify_account(AccountStub())
## Instruction:
Update mock Account in tests.
## Code After:
""" Test that the All Mail folder is enabled for Gmail. """
import pytest
from inbox.auth.gmail import GmailAuthHandler
from inbox.basicauth import GmailSettingError
from inbox.crispin import GmailCrispinClient
class AccountStub(object):
id = 0
email_address = '[email protected]'
access_token = None
imap_endpoint = None
sync_state = 'running'
def new_token(self):
return ('foo', 22)
def validate_token(self, new_token):
return True
class ConnectionStub(object):
def logout(self):
pass
def get_auth_handler(monkeypatch, folders):
g = GmailAuthHandler('gmail')
def mock_connect(a):
return ConnectionStub()
g.connect_account = mock_connect
monkeypatch.setattr(GmailCrispinClient, 'folder_names',
lambda x: folders)
return g
def test_all_mail_missing(monkeypatch):
"""
Test that validate_folders throws a GmailSettingError if All Mail
is not in the list of folders.
"""
g = get_auth_handler(monkeypatch, {'inbox': 'INBOX'})
with pytest.raises(GmailSettingError):
g.verify_account(AccountStub())
def test_all_mail_present(monkeypatch):
"""
Test that the validate_folders passes if All Mail is present.
"""
g = get_auth_handler(monkeypatch, {'all': 'ALL', 'inbox': 'INBOX',
'trash': 'TRASH'})
assert g.verify_account(AccountStub())
|
// ... existing code ...
email_address = '[email protected]'
access_token = None
imap_endpoint = None
sync_state = 'running'
def new_token(self):
return ('foo', 22)
// ... rest of the code ...
|
5e67e16d17d06a0f4d307a035ca6b62f094995c6
|
network/api/serializers.py
|
network/api/serializers.py
|
from rest_framework import serializers
from network.base.models import Data
class DataSerializer(serializers.ModelSerializer):
class Meta:
model = Data
fields = ('id', 'start', 'end', 'observation', 'ground_station', 'payload')
read_only_fields = ['id', 'start', 'end', 'observation', 'ground_station']
class JobSerializer(serializers.ModelSerializer):
frequency = serializers.SerializerMethodField()
tle0 = serializers.SerializerMethodField()
tle1 = serializers.SerializerMethodField()
tle2 = serializers.SerializerMethodField()
class Meta:
model = Data
fields = ('id', 'start', 'end', 'ground_station', 'tle0', 'tle1', 'tle2',
'frequency')
def get_frequency(self, obj):
return obj.observation.transmitter.downlink_low
def get_tle0(self, obj):
return obj.observation.satellite.tle0
def get_tle1(self, obj):
return obj.observation.satellite.tle1
def get_tle2(self, obj):
return obj.observation.satellite.tle2
|
from rest_framework import serializers
from network.base.models import Data
class DataSerializer(serializers.ModelSerializer):
class Meta:
model = Data
fields = ('id', 'start', 'end', 'observation', 'ground_station', 'payload')
read_only_fields = ['id', 'start', 'end', 'observation', 'ground_station']
class JobSerializer(serializers.ModelSerializer):
frequency = serializers.SerializerMethodField()
tle0 = serializers.SerializerMethodField()
tle1 = serializers.SerializerMethodField()
tle2 = serializers.SerializerMethodField()
class Meta:
model = Data
fields = ('id', 'start', 'end', 'ground_station', 'tle0', 'tle1', 'tle2',
'frequency')
def get_frequency(self, obj):
return obj.observation.transmitter.downlink_low
def get_tle0(self, obj):
return obj.observation.tle.tle0
def get_tle1(self, obj):
return obj.observation.tle.tle1
def get_tle2(self, obj):
return obj.observation.tle.tle2
|
Adjust API to TLE code changes
|
Adjust API to TLE code changes
|
Python
|
agpl-3.0
|
cshields/satnogs-network,cshields/satnogs-network,cshields/satnogs-network,cshields/satnogs-network
|
python
|
## Code Before:
from rest_framework import serializers
from network.base.models import Data
class DataSerializer(serializers.ModelSerializer):
class Meta:
model = Data
fields = ('id', 'start', 'end', 'observation', 'ground_station', 'payload')
read_only_fields = ['id', 'start', 'end', 'observation', 'ground_station']
class JobSerializer(serializers.ModelSerializer):
frequency = serializers.SerializerMethodField()
tle0 = serializers.SerializerMethodField()
tle1 = serializers.SerializerMethodField()
tle2 = serializers.SerializerMethodField()
class Meta:
model = Data
fields = ('id', 'start', 'end', 'ground_station', 'tle0', 'tle1', 'tle2',
'frequency')
def get_frequency(self, obj):
return obj.observation.transmitter.downlink_low
def get_tle0(self, obj):
return obj.observation.satellite.tle0
def get_tle1(self, obj):
return obj.observation.satellite.tle1
def get_tle2(self, obj):
return obj.observation.satellite.tle2
## Instruction:
Adjust API to TLE code changes
## Code After:
from rest_framework import serializers
from network.base.models import Data
class DataSerializer(serializers.ModelSerializer):
class Meta:
model = Data
fields = ('id', 'start', 'end', 'observation', 'ground_station', 'payload')
read_only_fields = ['id', 'start', 'end', 'observation', 'ground_station']
class JobSerializer(serializers.ModelSerializer):
frequency = serializers.SerializerMethodField()
tle0 = serializers.SerializerMethodField()
tle1 = serializers.SerializerMethodField()
tle2 = serializers.SerializerMethodField()
class Meta:
model = Data
fields = ('id', 'start', 'end', 'ground_station', 'tle0', 'tle1', 'tle2',
'frequency')
def get_frequency(self, obj):
return obj.observation.transmitter.downlink_low
def get_tle0(self, obj):
return obj.observation.tle.tle0
def get_tle1(self, obj):
return obj.observation.tle.tle1
def get_tle2(self, obj):
return obj.observation.tle.tle2
|
# ... existing code ...
return obj.observation.transmitter.downlink_low
def get_tle0(self, obj):
return obj.observation.tle.tle0
def get_tle1(self, obj):
return obj.observation.tle.tle1
def get_tle2(self, obj):
return obj.observation.tle.tle2
# ... rest of the code ...
|
f06692ee3d6a12f13d425e7b0aaa88ea8cd6ac94
|
rhinoInSpring/src/java/org/szegedi/spring/web/jsflow/ScriptSelectionStrategy.java
|
rhinoInSpring/src/java/org/szegedi/spring/web/jsflow/ScriptSelectionStrategy.java
|
package org.szegedi.spring.web.jsflow;
import javax.servlet.http.HttpServletRequest;
/**
* Interface for objects that select a script for a particular initial HTTP
* request.
* @author Attila Szegedi
* @version $Id: $
*/
public interface ScriptSelectionStrategy
{
/**
* Returns the pathname of the script that should run for a particular
* initial HTTP request.
* @param request the HTTP request
* @return the path of the script. null can be returned to indicate that
* this strategy is unable to select a script (i.e. because some data is
* missing in the request). The controller will respond to this by sending
* back a HTTP 400 "Bad Request" status.
*/
public String getScriptPath(HttpServletRequest request);
}
|
package org.szegedi.spring.web.jsflow;
import javax.servlet.http.HttpServletRequest;
import org.springframework.web.servlet.ModelAndViewDefiningException;
/**
* Interface for objects that select a script for a particular initial HTTP
* request.
* @author Attila Szegedi
* @version $Id: $
*/
public interface ScriptSelectionStrategy
{
/**
* Returns the pathname of the script that should run for a particular
* initial HTTP request.
* @param request the HTTP request
* @return the path of the script. null can be returned to indicate that
* this strategy is unable to select a script (i.e. because some data is
* missing in the request). The controller will respond to this by sending
* back a HTTP 400 "Bad Request" status. Alternatively, the strategy can
* throw an instance of {@link ModelAndViewDefiningException}.
*/
public String getScriptPath(HttpServletRequest request)
throws ModelAndViewDefiningException;
}
|
Allow it to throw a ModelAndViewDefiningException
|
Allow it to throw a ModelAndViewDefiningException
|
Java
|
apache-2.0
|
szegedi/spring-web-jsflow,szegedi/spring-web-jsflow,szegedi/spring-web-jsflow
|
java
|
## Code Before:
package org.szegedi.spring.web.jsflow;
import javax.servlet.http.HttpServletRequest;
/**
* Interface for objects that select a script for a particular initial HTTP
* request.
* @author Attila Szegedi
* @version $Id: $
*/
public interface ScriptSelectionStrategy
{
/**
* Returns the pathname of the script that should run for a particular
* initial HTTP request.
* @param request the HTTP request
* @return the path of the script. null can be returned to indicate that
* this strategy is unable to select a script (i.e. because some data is
* missing in the request). The controller will respond to this by sending
* back a HTTP 400 "Bad Request" status.
*/
public String getScriptPath(HttpServletRequest request);
}
## Instruction:
Allow it to throw a ModelAndViewDefiningException
## Code After:
package org.szegedi.spring.web.jsflow;
import javax.servlet.http.HttpServletRequest;
import org.springframework.web.servlet.ModelAndViewDefiningException;
/**
* Interface for objects that select a script for a particular initial HTTP
* request.
* @author Attila Szegedi
* @version $Id: $
*/
public interface ScriptSelectionStrategy
{
/**
* Returns the pathname of the script that should run for a particular
* initial HTTP request.
* @param request the HTTP request
* @return the path of the script. null can be returned to indicate that
* this strategy is unable to select a script (i.e. because some data is
* missing in the request). The controller will respond to this by sending
* back a HTTP 400 "Bad Request" status. Alternatively, the strategy can
* throw an instance of {@link ModelAndViewDefiningException}.
*/
public String getScriptPath(HttpServletRequest request)
throws ModelAndViewDefiningException;
}
|
...
package org.szegedi.spring.web.jsflow;
import javax.servlet.http.HttpServletRequest;
import org.springframework.web.servlet.ModelAndViewDefiningException;
/**
* Interface for objects that select a script for a particular initial HTTP
...
* @return the path of the script. null can be returned to indicate that
* this strategy is unable to select a script (i.e. because some data is
* missing in the request). The controller will respond to this by sending
* back a HTTP 400 "Bad Request" status. Alternatively, the strategy can
* throw an instance of {@link ModelAndViewDefiningException}.
*/
public String getScriptPath(HttpServletRequest request)
throws ModelAndViewDefiningException;
}
...
|
b07d74f99338165f8bb83ac0599452b021b96a8f
|
django_boolean_sum.py
|
django_boolean_sum.py
|
from django.conf import settings
from django.db.models.aggregates import Sum
from django.db.models.sql.aggregates import Sum as BaseSQLSum
class SQLSum(BaseSQLSum):
@property
def sql_template(self):
if settings.DATABASES['default']['ENGINE'] == \
'django.db.backends.postgresql_psycopg2':
return '%(function)s(%(field)s::int)'
return '%(function)s(%(field)s)'
class BooleanSum(Sum):
function = None
def add_to_query(self, query, alias, col, source, is_summary):
aggregate = SQLSum(col, source=source, is_summary=is_summary,
**self.extra)
query.aggregates[alias] = aggregate
|
from django.conf import settings
from django.db.models.aggregates import Sum
class SQLSum(Sum):
@property
def sql_template(self):
if settings.DATABASES['default']['ENGINE'] == \
'django.db.backends.postgresql_psycopg2':
return '%(function)s(%(field)s::int)'
return '%(function)s(%(field)s)'
class BooleanSum(Sum):
def add_to_query(self, query, alias, col, source, is_summary):
aggregate = SQLSum(col, source=source, is_summary=is_summary,
**self.extra)
query.aggregates[alias] = aggregate
|
Add support for Django 1.10+
|
Add support for Django 1.10+
|
Python
|
bsd-2-clause
|
Mibou/django-boolean-sum
|
python
|
## Code Before:
from django.conf import settings
from django.db.models.aggregates import Sum
from django.db.models.sql.aggregates import Sum as BaseSQLSum
class SQLSum(BaseSQLSum):
@property
def sql_template(self):
if settings.DATABASES['default']['ENGINE'] == \
'django.db.backends.postgresql_psycopg2':
return '%(function)s(%(field)s::int)'
return '%(function)s(%(field)s)'
class BooleanSum(Sum):
function = None
def add_to_query(self, query, alias, col, source, is_summary):
aggregate = SQLSum(col, source=source, is_summary=is_summary,
**self.extra)
query.aggregates[alias] = aggregate
## Instruction:
Add support for Django 1.10+
## Code After:
from django.conf import settings
from django.db.models.aggregates import Sum
class SQLSum(Sum):
@property
def sql_template(self):
if settings.DATABASES['default']['ENGINE'] == \
'django.db.backends.postgresql_psycopg2':
return '%(function)s(%(field)s::int)'
return '%(function)s(%(field)s)'
class BooleanSum(Sum):
def add_to_query(self, query, alias, col, source, is_summary):
aggregate = SQLSum(col, source=source, is_summary=is_summary,
**self.extra)
query.aggregates[alias] = aggregate
|
// ... existing code ...
from django.conf import settings
from django.db.models.aggregates import Sum
class SQLSum(Sum):
@property
def sql_template(self):
if settings.DATABASES['default']['ENGINE'] == \
// ... modified code ...
class BooleanSum(Sum):
def add_to_query(self, query, alias, col, source, is_summary):
aggregate = SQLSum(col, source=source, is_summary=is_summary,
**self.extra)
// ... rest of the code ...
|
df4345e0a8fa410ebc2577dfbbb50c6033637c70
|
app/src/main/java/com/studio4plus/homerplayer/ui/Speaker.java
|
app/src/main/java/com/studio4plus/homerplayer/ui/Speaker.java
|
package com.studio4plus.homerplayer.ui;
import android.content.Context;
import android.speech.tts.TextToSpeech;
import java.util.Locale;
class Speaker implements TextToSpeech.OnInitListener {
private final Locale locale;
private final TextToSpeech tts;
private boolean ttsReady;
private String pendingSpeech;
Speaker(Context context) {
this.locale = context.getResources().getConfiguration().locale;
this.tts = new TextToSpeech(context, this);
}
@Override
public void onInit(int status) {
if (status == TextToSpeech.SUCCESS) {
ttsReady = true;
tts.setLanguage(locale);
tts.speak(pendingSpeech, TextToSpeech.QUEUE_FLUSH, null);
pendingSpeech = null;
}
}
public void speak(String text) {
if (ttsReady) {
tts.speak(text, TextToSpeech.QUEUE_FLUSH, null);
} else {
pendingSpeech = text;
}
}
public void stop() {
tts.stop();
pendingSpeech = null;
}
public void shutdown() {
ttsReady = false;
tts.shutdown();
}
}
|
package com.studio4plus.homerplayer.ui;
import android.content.Context;
import android.speech.tts.TextToSpeech;
import java.util.HashMap;
import java.util.Locale;
class Speaker implements TextToSpeech.OnInitListener {
// TTS is usually much louder than a regular audio book recording.
private static final String TTS_VOLUME_ADJUSTMENT = "0.5";
private final Locale locale;
private final TextToSpeech tts;
private final HashMap<String, String> speechParams = new HashMap<>();
private boolean ttsReady;
private String pendingSpeech;
Speaker(Context context) {
this.locale = context.getResources().getConfiguration().locale;
this.tts = new TextToSpeech(context, this);
speechParams.put(TextToSpeech.Engine.KEY_PARAM_VOLUME, TTS_VOLUME_ADJUSTMENT);
}
@Override
public void onInit(int status) {
if (status == TextToSpeech.SUCCESS) {
ttsReady = true;
tts.setLanguage(locale);
tts.speak(pendingSpeech, TextToSpeech.QUEUE_FLUSH, speechParams);
pendingSpeech = null;
}
}
public void speak(String text) {
if (ttsReady) {
tts.speak(text, TextToSpeech.QUEUE_FLUSH, speechParams);
} else {
pendingSpeech = text;
}
}
public void stop() {
tts.stop();
pendingSpeech = null;
}
public void shutdown() {
ttsReady = false;
tts.shutdown();
}
}
|
Read audio book titles with a softer voice.
|
Read audio book titles with a softer voice.
|
Java
|
mit
|
msimonides/homerplayer,treejames/homerplayer
|
java
|
## Code Before:
package com.studio4plus.homerplayer.ui;
import android.content.Context;
import android.speech.tts.TextToSpeech;
import java.util.Locale;
class Speaker implements TextToSpeech.OnInitListener {
private final Locale locale;
private final TextToSpeech tts;
private boolean ttsReady;
private String pendingSpeech;
Speaker(Context context) {
this.locale = context.getResources().getConfiguration().locale;
this.tts = new TextToSpeech(context, this);
}
@Override
public void onInit(int status) {
if (status == TextToSpeech.SUCCESS) {
ttsReady = true;
tts.setLanguage(locale);
tts.speak(pendingSpeech, TextToSpeech.QUEUE_FLUSH, null);
pendingSpeech = null;
}
}
public void speak(String text) {
if (ttsReady) {
tts.speak(text, TextToSpeech.QUEUE_FLUSH, null);
} else {
pendingSpeech = text;
}
}
public void stop() {
tts.stop();
pendingSpeech = null;
}
public void shutdown() {
ttsReady = false;
tts.shutdown();
}
}
## Instruction:
Read audio book titles with a softer voice.
## Code After:
package com.studio4plus.homerplayer.ui;
import android.content.Context;
import android.speech.tts.TextToSpeech;
import java.util.HashMap;
import java.util.Locale;
class Speaker implements TextToSpeech.OnInitListener {
// TTS is usually much louder than a regular audio book recording.
private static final String TTS_VOLUME_ADJUSTMENT = "0.5";
private final Locale locale;
private final TextToSpeech tts;
private final HashMap<String, String> speechParams = new HashMap<>();
private boolean ttsReady;
private String pendingSpeech;
Speaker(Context context) {
this.locale = context.getResources().getConfiguration().locale;
this.tts = new TextToSpeech(context, this);
speechParams.put(TextToSpeech.Engine.KEY_PARAM_VOLUME, TTS_VOLUME_ADJUSTMENT);
}
@Override
public void onInit(int status) {
if (status == TextToSpeech.SUCCESS) {
ttsReady = true;
tts.setLanguage(locale);
tts.speak(pendingSpeech, TextToSpeech.QUEUE_FLUSH, speechParams);
pendingSpeech = null;
}
}
public void speak(String text) {
if (ttsReady) {
tts.speak(text, TextToSpeech.QUEUE_FLUSH, speechParams);
} else {
pendingSpeech = text;
}
}
public void stop() {
tts.stop();
pendingSpeech = null;
}
public void shutdown() {
ttsReady = false;
tts.shutdown();
}
}
|
# ... existing code ...
import android.content.Context;
import android.speech.tts.TextToSpeech;
import java.util.HashMap;
import java.util.Locale;
class Speaker implements TextToSpeech.OnInitListener {
// TTS is usually much louder than a regular audio book recording.
private static final String TTS_VOLUME_ADJUSTMENT = "0.5";
private final Locale locale;
private final TextToSpeech tts;
private final HashMap<String, String> speechParams = new HashMap<>();
private boolean ttsReady;
private String pendingSpeech;
# ... modified code ...
Speaker(Context context) {
this.locale = context.getResources().getConfiguration().locale;
this.tts = new TextToSpeech(context, this);
speechParams.put(TextToSpeech.Engine.KEY_PARAM_VOLUME, TTS_VOLUME_ADJUSTMENT);
}
@Override
...
if (status == TextToSpeech.SUCCESS) {
ttsReady = true;
tts.setLanguage(locale);
tts.speak(pendingSpeech, TextToSpeech.QUEUE_FLUSH, speechParams);
pendingSpeech = null;
}
}
...
public void speak(String text) {
if (ttsReady) {
tts.speak(text, TextToSpeech.QUEUE_FLUSH, speechParams);
} else {
pendingSpeech = text;
}
# ... rest of the code ...
|
aa9d2c12fc4bbe25331f3afadc0638f107828ba6
|
src/main/java/com/samovich/cop2800/chapter15/assignmnet/SimpleTextEditorGUI.java
|
src/main/java/com/samovich/cop2800/chapter15/assignmnet/SimpleTextEditorGUI.java
|
package com.samovich.cop2800.chapter15.assignmnet;
import javax.swing.*;
import java.awt.*;
/**
* Contain the code for the interface and event handling
* Filename SimpleTextEditorGUI
* Created by Valery Samovich
* Written on 7/27/16
*/
public class SimpleTextEditorGUI extends JFrame {
private JMenuBar mnuBar = new JMenuBar();
private JMenu mnuFile = new JMenu("Edit");
private JMenu mnuHelp = new JMenu("Help");
public SimpleTextEditorGUI() {
setLayout(new BorderLayout());
setSize(360, 210);
setJMenuBar(mnuBar);
mnuBar.add(mnuFile);
mnuBar.add(mnuHelp);
}
}
|
package com.samovich.cop2800.chapter15.assignmnet;
import javax.swing.*;
import java.awt.*;
import java.awt.event.KeyEvent;
/**
* Contain the code for the interface and event handling
* Filename SimpleTextEditorGUI
* Created by Valery Samovich
* Written on 7/27/16
*/
public class SimpleTextEditorGUI extends JFrame {
private JMenuBar mnuBar = new JMenuBar();
private JMenu mnuFile = new JMenu("Edit");
private JMenu mnuHelp = new JMenu("Help");
JPanel plnContent = new JPanel();
JTextArea txtArea = new JTextArea(200, 100);
JLabel lblFontSize = new JLabel("Font Size");
JTextField txtSetSize = new JTextField();
/**
* Constructor
*/
public SimpleTextEditorGUI() {
setSize(360, 210);
setLayout(new BorderLayout());
setJMenuBar(mnuBar);
txtArea.setLineWrap(true);
txtArea.setWrapStyleWord(true);
// add constants from keyboard
mnuFile.setMnemonic(KeyEvent.VK_F);
mnuHelp.setMnemonic(KeyEvent.VK_H);
// add menu
mnuBar.add(mnuFile);
mnuBar.add(mnuHelp);
// text area
plnContent.setLayout(new BorderLayout());
plnContent.add(txtArea, BorderLayout.EAST);
plnContent.add(lblFontSize);
plnContent.add(txtSetSize);
setContentPane(plnContent);
}
}
|
Add text filed: NOT COMPLETED
|
Add text filed: NOT COMPLETED
|
Java
|
unknown
|
valerysamovich/java-technologies,vsamov/java-technologies,vsamov/java-technologies
|
java
|
## Code Before:
package com.samovich.cop2800.chapter15.assignmnet;
import javax.swing.*;
import java.awt.*;
/**
* Contain the code for the interface and event handling
* Filename SimpleTextEditorGUI
* Created by Valery Samovich
* Written on 7/27/16
*/
public class SimpleTextEditorGUI extends JFrame {
private JMenuBar mnuBar = new JMenuBar();
private JMenu mnuFile = new JMenu("Edit");
private JMenu mnuHelp = new JMenu("Help");
public SimpleTextEditorGUI() {
setLayout(new BorderLayout());
setSize(360, 210);
setJMenuBar(mnuBar);
mnuBar.add(mnuFile);
mnuBar.add(mnuHelp);
}
}
## Instruction:
Add text filed: NOT COMPLETED
## Code After:
package com.samovich.cop2800.chapter15.assignmnet;
import javax.swing.*;
import java.awt.*;
import java.awt.event.KeyEvent;
/**
* Contain the code for the interface and event handling
* Filename SimpleTextEditorGUI
* Created by Valery Samovich
* Written on 7/27/16
*/
public class SimpleTextEditorGUI extends JFrame {
private JMenuBar mnuBar = new JMenuBar();
private JMenu mnuFile = new JMenu("Edit");
private JMenu mnuHelp = new JMenu("Help");
JPanel plnContent = new JPanel();
JTextArea txtArea = new JTextArea(200, 100);
JLabel lblFontSize = new JLabel("Font Size");
JTextField txtSetSize = new JTextField();
/**
* Constructor
*/
public SimpleTextEditorGUI() {
setSize(360, 210);
setLayout(new BorderLayout());
setJMenuBar(mnuBar);
txtArea.setLineWrap(true);
txtArea.setWrapStyleWord(true);
// add constants from keyboard
mnuFile.setMnemonic(KeyEvent.VK_F);
mnuHelp.setMnemonic(KeyEvent.VK_H);
// add menu
mnuBar.add(mnuFile);
mnuBar.add(mnuHelp);
// text area
plnContent.setLayout(new BorderLayout());
plnContent.add(txtArea, BorderLayout.EAST);
plnContent.add(lblFontSize);
plnContent.add(txtSetSize);
setContentPane(plnContent);
}
}
|
# ... existing code ...
import javax.swing.*;
import java.awt.*;
import java.awt.event.KeyEvent;
/**
* Contain the code for the interface and event handling
# ... modified code ...
private JMenuBar mnuBar = new JMenuBar();
private JMenu mnuFile = new JMenu("Edit");
private JMenu mnuHelp = new JMenu("Help");
JPanel plnContent = new JPanel();
JTextArea txtArea = new JTextArea(200, 100);
JLabel lblFontSize = new JLabel("Font Size");
JTextField txtSetSize = new JTextField();
/**
* Constructor
*/
public SimpleTextEditorGUI() {
setSize(360, 210);
setLayout(new BorderLayout());
setJMenuBar(mnuBar);
txtArea.setLineWrap(true);
txtArea.setWrapStyleWord(true);
// add constants from keyboard
mnuFile.setMnemonic(KeyEvent.VK_F);
mnuHelp.setMnemonic(KeyEvent.VK_H);
// add menu
mnuBar.add(mnuFile);
mnuBar.add(mnuHelp);
// text area
plnContent.setLayout(new BorderLayout());
plnContent.add(txtArea, BorderLayout.EAST);
plnContent.add(lblFontSize);
plnContent.add(txtSetSize);
setContentPane(plnContent);
}
}
# ... rest of the code ...
|
fb7913f58315b1e9c1a3534a7882b78149e274dd
|
fml/src/main/java/cpw/mods/fml/common/network/handshake/HandshakeMessageHandler.java
|
fml/src/main/java/cpw/mods/fml/common/network/handshake/HandshakeMessageHandler.java
|
package cpw.mods.fml.common.network.handshake;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.SimpleChannelInboundHandler;
import io.netty.util.AttributeKey;
public class HandshakeMessageHandler<S extends Enum<S> & IHandshakeState<S>> extends SimpleChannelInboundHandler<FMLHandshakeMessage> {
private static final AttributeKey<IHandshakeState<?>> STATE = new AttributeKey<IHandshakeState<?>>("fml:handshake-state");
private final AttributeKey<S> fmlHandshakeState;
private S initialState;
@SuppressWarnings("unchecked")
public HandshakeMessageHandler(Class<S> stateType)
{
fmlHandshakeState = (AttributeKey<S>) STATE;
initialState = Enum.valueOf(stateType, "START");
}
@Override
protected void channelRead0(ChannelHandlerContext ctx, FMLHandshakeMessage msg) throws Exception
{
S state = ctx.attr(fmlHandshakeState).get();
S newState = state.accept(ctx, msg);
ctx.attr(fmlHandshakeState).set(newState);
}
@Override
public void channelActive(ChannelHandlerContext ctx) throws Exception
{
ctx.attr(fmlHandshakeState).set(initialState);
}
@Override
public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception
{
S state = ctx.attr(fmlHandshakeState).get();
S newState = state.accept(ctx, null);
ctx.attr(fmlHandshakeState).set(newState);
}
}
|
package cpw.mods.fml.common.network.handshake;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.SimpleChannelInboundHandler;
import io.netty.util.AttributeKey;
public class HandshakeMessageHandler<S extends Enum<S> & IHandshakeState<S>> extends SimpleChannelInboundHandler<FMLHandshakeMessage> {
private static final AttributeKey<IHandshakeState<?>> STATE = new AttributeKey<IHandshakeState<?>>("fml:handshake-state");
private final AttributeKey<S> fmlHandshakeState;
private S initialState;
@SuppressWarnings("unchecked")
public HandshakeMessageHandler(Class<S> stateType)
{
fmlHandshakeState = (AttributeKey<S>) ((Object)STATE);
initialState = Enum.valueOf(stateType, "START");
}
@Override
protected void channelRead0(ChannelHandlerContext ctx, FMLHandshakeMessage msg) throws Exception
{
S state = ctx.attr(fmlHandshakeState).get();
S newState = state.accept(ctx, msg);
ctx.attr(fmlHandshakeState).set(newState);
}
@Override
public void channelActive(ChannelHandlerContext ctx) throws Exception
{
ctx.attr(fmlHandshakeState).set(initialState);
}
@Override
public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception
{
S state = ctx.attr(fmlHandshakeState).get();
S newState = state.accept(ctx, null);
ctx.attr(fmlHandshakeState).set(newState);
}
}
|
Fix stupid possible compiler error.
|
Fix stupid possible compiler error.
|
Java
|
lgpl-2.1
|
fcjailybo/MinecraftForge,blay09/MinecraftForge,shadekiller666/MinecraftForge,jdpadrnos/MinecraftForge,luacs1998/MinecraftForge,Ghostlyr/MinecraftForge,dmf444/MinecraftForge,RainWarrior/MinecraftForge,bonii-xx/MinecraftForge,Theerapak/MinecraftForge,ThiagoGarciaAlves/MinecraftForge,karlthepagan/MinecraftForge,simon816/MinecraftForge,mickkay/MinecraftForge,CrafterKina/MinecraftForge,Zaggy1024/MinecraftForge,Mathe172/MinecraftForge,brubo1/MinecraftForge,Vorquel/MinecraftForge
|
java
|
## Code Before:
package cpw.mods.fml.common.network.handshake;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.SimpleChannelInboundHandler;
import io.netty.util.AttributeKey;
public class HandshakeMessageHandler<S extends Enum<S> & IHandshakeState<S>> extends SimpleChannelInboundHandler<FMLHandshakeMessage> {
private static final AttributeKey<IHandshakeState<?>> STATE = new AttributeKey<IHandshakeState<?>>("fml:handshake-state");
private final AttributeKey<S> fmlHandshakeState;
private S initialState;
@SuppressWarnings("unchecked")
public HandshakeMessageHandler(Class<S> stateType)
{
fmlHandshakeState = (AttributeKey<S>) STATE;
initialState = Enum.valueOf(stateType, "START");
}
@Override
protected void channelRead0(ChannelHandlerContext ctx, FMLHandshakeMessage msg) throws Exception
{
S state = ctx.attr(fmlHandshakeState).get();
S newState = state.accept(ctx, msg);
ctx.attr(fmlHandshakeState).set(newState);
}
@Override
public void channelActive(ChannelHandlerContext ctx) throws Exception
{
ctx.attr(fmlHandshakeState).set(initialState);
}
@Override
public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception
{
S state = ctx.attr(fmlHandshakeState).get();
S newState = state.accept(ctx, null);
ctx.attr(fmlHandshakeState).set(newState);
}
}
## Instruction:
Fix stupid possible compiler error.
## Code After:
package cpw.mods.fml.common.network.handshake;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.SimpleChannelInboundHandler;
import io.netty.util.AttributeKey;
public class HandshakeMessageHandler<S extends Enum<S> & IHandshakeState<S>> extends SimpleChannelInboundHandler<FMLHandshakeMessage> {
private static final AttributeKey<IHandshakeState<?>> STATE = new AttributeKey<IHandshakeState<?>>("fml:handshake-state");
private final AttributeKey<S> fmlHandshakeState;
private S initialState;
@SuppressWarnings("unchecked")
public HandshakeMessageHandler(Class<S> stateType)
{
fmlHandshakeState = (AttributeKey<S>) ((Object)STATE);
initialState = Enum.valueOf(stateType, "START");
}
@Override
protected void channelRead0(ChannelHandlerContext ctx, FMLHandshakeMessage msg) throws Exception
{
S state = ctx.attr(fmlHandshakeState).get();
S newState = state.accept(ctx, msg);
ctx.attr(fmlHandshakeState).set(newState);
}
@Override
public void channelActive(ChannelHandlerContext ctx) throws Exception
{
ctx.attr(fmlHandshakeState).set(initialState);
}
@Override
public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception
{
S state = ctx.attr(fmlHandshakeState).get();
S newState = state.accept(ctx, null);
ctx.attr(fmlHandshakeState).set(newState);
}
}
|
# ... existing code ...
@SuppressWarnings("unchecked")
public HandshakeMessageHandler(Class<S> stateType)
{
fmlHandshakeState = (AttributeKey<S>) ((Object)STATE);
initialState = Enum.valueOf(stateType, "START");
}
@Override
# ... rest of the code ...
|
589a9d66803e323c66ef78ebf499cc49a6b65fe7
|
Source/World/Block/BlockDatabase.h
|
Source/World/Block/BlockDatabase.h
|
namespace Block
{
class Database
{
public:
static Database& get();
Database();
const BlockType& getBlock(uint8_t id) const;
const BlockType& getBlock(ID blockID) const;
const Texture::Atlas& getTextureAtlas() const;
private:
std::array<std::unique_ptr<BlockType>, (int)ID::NUM_BlockTypeS> m_blocks;
Texture::Atlas m_textures;
};
const BlockType& get(uint8_t id);
const BlockType& get(ID blockID);
}
#endif // BlockDatabase_H_INCLUDED
|
namespace Block
{
class Database
{
public:
static Database& get();
const BlockType& getBlock(uint8_t id) const;
const BlockType& getBlock(ID blockID) const;
const Texture::Atlas& getTextureAtlas() const;
private:
Database();
std::array<std::unique_ptr<BlockType>, (int)ID::NUM_BlockTypeS> m_blocks;
Texture::Atlas m_textures;
};
const BlockType& get(uint8_t id);
const BlockType& get(ID blockID);
}
#endif // BlockDatabase_H_INCLUDED
|
Fix the block database singleton
|
Fix the block database singleton
|
C
|
mit
|
Hopson97/HopsonCraft,Hopson97/HopsonCraft
|
c
|
## Code Before:
namespace Block
{
class Database
{
public:
static Database& get();
Database();
const BlockType& getBlock(uint8_t id) const;
const BlockType& getBlock(ID blockID) const;
const Texture::Atlas& getTextureAtlas() const;
private:
std::array<std::unique_ptr<BlockType>, (int)ID::NUM_BlockTypeS> m_blocks;
Texture::Atlas m_textures;
};
const BlockType& get(uint8_t id);
const BlockType& get(ID blockID);
}
#endif // BlockDatabase_H_INCLUDED
## Instruction:
Fix the block database singleton
## Code After:
namespace Block
{
class Database
{
public:
static Database& get();
const BlockType& getBlock(uint8_t id) const;
const BlockType& getBlock(ID blockID) const;
const Texture::Atlas& getTextureAtlas() const;
private:
Database();
std::array<std::unique_ptr<BlockType>, (int)ID::NUM_BlockTypeS> m_blocks;
Texture::Atlas m_textures;
};
const BlockType& get(uint8_t id);
const BlockType& get(ID blockID);
}
#endif // BlockDatabase_H_INCLUDED
|
...
public:
static Database& get();
const BlockType& getBlock(uint8_t id) const;
const BlockType& getBlock(ID blockID) const;
...
const Texture::Atlas& getTextureAtlas() const;
private:
Database();
std::array<std::unique_ptr<BlockType>, (int)ID::NUM_BlockTypeS> m_blocks;
Texture::Atlas m_textures;
...
|
47bf4aa44342acc030d5cc2047d571b93b4f8de3
|
ts3npl.py
|
ts3npl.py
|
from irc3.plugins.command import command
from irc3.plugins.cron import cron
import irc3
from teamspeak_web_utils import nplstatus
@irc3.plugin
class TS3NPL(object):
def __init__(self, bot):
self.bot = bot
self.npl_status = None
self.target_channel = '#teamspeak'
@cron('* * * * *')
def fetch_status(self):
new_status = nplstatus()
if self.npl_status is not None and new_status != self.npl_status:
if new_status:
self.bot.privmsg(self.target_channel,
'NPL-Registrations are now open!')
else:
self.bot.privmsg(self.target_channel,
'NPL-Registrations are now closed!')
self.npl_status = new_status
@command(permission='view')
def nplstatus(self, mask, target, args):
'''check Teamspeak3 NPL-Registration-status
%%nplstatus
'''
if self.npl_status is None:
self.npl_status = nplstatus()
if self.npl_status:
return 'NPL-Registrations are currently open!'
else:
return 'NPL-Registrations are currently closed!'
|
from irc3.plugins.command import command
from irc3.plugins.cron import cron
import irc3
from teamspeak_web_utils import nplstatus
@irc3.plugin
class TS3NPL(object):
def __init__(self, bot):
self.bot = bot
self.npl_status = None
config = bot.config.get('ts3npl', {})
self.target_channel = config.get('channel')
@cron('* * * * *')
def fetch_status(self):
print('checking status')
new_status = nplstatus()
if self.npl_status is not None and new_status != self.npl_status \
and self.target_channel:
if new_status:
self.bot.privmsg(self.target_channel,
'NPL-Registrations are now open!')
else:
self.bot.privmsg(self.target_channel,
'NPL-Registrations are now closed!')
self.npl_status = new_status
@command(permission='view')
def nplstatus(self, mask, target, args):
'''check Teamspeak3 NPL-Registration-status
%%nplstatus
'''
if self.npl_status is None:
self.npl_status = nplstatus()
if self.npl_status:
return 'NPL-Registrations are currently open!'
else:
return 'NPL-Registrations are currently closed!'
|
Use config instead of hardcoding target channel
|
Use config instead of hardcoding target channel
|
Python
|
mit
|
Thor77/TeamspeakIRC
|
python
|
## Code Before:
from irc3.plugins.command import command
from irc3.plugins.cron import cron
import irc3
from teamspeak_web_utils import nplstatus
@irc3.plugin
class TS3NPL(object):
def __init__(self, bot):
self.bot = bot
self.npl_status = None
self.target_channel = '#teamspeak'
@cron('* * * * *')
def fetch_status(self):
new_status = nplstatus()
if self.npl_status is not None and new_status != self.npl_status:
if new_status:
self.bot.privmsg(self.target_channel,
'NPL-Registrations are now open!')
else:
self.bot.privmsg(self.target_channel,
'NPL-Registrations are now closed!')
self.npl_status = new_status
@command(permission='view')
def nplstatus(self, mask, target, args):
'''check Teamspeak3 NPL-Registration-status
%%nplstatus
'''
if self.npl_status is None:
self.npl_status = nplstatus()
if self.npl_status:
return 'NPL-Registrations are currently open!'
else:
return 'NPL-Registrations are currently closed!'
## Instruction:
Use config instead of hardcoding target channel
## Code After:
from irc3.plugins.command import command
from irc3.plugins.cron import cron
import irc3
from teamspeak_web_utils import nplstatus
@irc3.plugin
class TS3NPL(object):
def __init__(self, bot):
self.bot = bot
self.npl_status = None
config = bot.config.get('ts3npl', {})
self.target_channel = config.get('channel')
@cron('* * * * *')
def fetch_status(self):
print('checking status')
new_status = nplstatus()
if self.npl_status is not None and new_status != self.npl_status \
and self.target_channel:
if new_status:
self.bot.privmsg(self.target_channel,
'NPL-Registrations are now open!')
else:
self.bot.privmsg(self.target_channel,
'NPL-Registrations are now closed!')
self.npl_status = new_status
@command(permission='view')
def nplstatus(self, mask, target, args):
'''check Teamspeak3 NPL-Registration-status
%%nplstatus
'''
if self.npl_status is None:
self.npl_status = nplstatus()
if self.npl_status:
return 'NPL-Registrations are currently open!'
else:
return 'NPL-Registrations are currently closed!'
|
# ... existing code ...
def __init__(self, bot):
self.bot = bot
self.npl_status = None
config = bot.config.get('ts3npl', {})
self.target_channel = config.get('channel')
@cron('* * * * *')
def fetch_status(self):
print('checking status')
new_status = nplstatus()
if self.npl_status is not None and new_status != self.npl_status \
and self.target_channel:
if new_status:
self.bot.privmsg(self.target_channel,
'NPL-Registrations are now open!')
# ... rest of the code ...
|
34c0c6c73a65da3120aa52600254afc909e9a3bc
|
pytach/wsgi.py
|
pytach/wsgi.py
|
import bottle
from bottle import route, run
from web import web
import config
app = application = bottle.Bottle()
app.merge(web.app)
config.arguments['--verbose'] = True
if __name__ == '__main__':
app.run(host='0.0.0.0', port=8082, debug=True)
|
import bottle
import config
from web import web
app = application = bottle.Bottle()
app.merge(web.app)
config.arguments['--verbose'] = True
|
Remove unused main and unused imports
|
Remove unused main and unused imports
|
Python
|
mit
|
gotling/PyTach,gotling/PyTach,gotling/PyTach
|
python
|
## Code Before:
import bottle
from bottle import route, run
from web import web
import config
app = application = bottle.Bottle()
app.merge(web.app)
config.arguments['--verbose'] = True
if __name__ == '__main__':
app.run(host='0.0.0.0', port=8082, debug=True)
## Instruction:
Remove unused main and unused imports
## Code After:
import bottle
import config
from web import web
app = application = bottle.Bottle()
app.merge(web.app)
config.arguments['--verbose'] = True
|
// ... existing code ...
import bottle
import config
from web import web
app = application = bottle.Bottle()
app.merge(web.app)
config.arguments['--verbose'] = True
// ... rest of the code ...
|
e5083fd56caa271afbdbad1c59009f7e1ea465b3
|
content/app.py
|
content/app.py
|
from flask import Flask
from .extensions import envcfg, apierrors, applogging
from .blueprints.status import blueprint as status_bp
from .blueprints.content import blueprint as content_bp
from .blueprints.swagger import blueprint as swagger_bp
def create_app():
app = Flask('content')
app.config.from_object('content.default_settings')
envcfg.init_app(app)
applogging.init_app(app)
apierrors.init_app(app)
app.register_blueprint(status_bp)
app.register_blueprint(content_bp, url_prefix='/v1')
app.register_blueprint(swagger_bp)
return app
|
from flask import Flask, jsonify
from botocore.exceptions import ClientError
from .extensions import envcfg, apierrors, applogging
from .blueprints.status import blueprint as status_bp
from .blueprints.content import blueprint as content_bp
from .blueprints.swagger import blueprint as swagger_bp
def create_app():
app = Flask('content')
app.config.from_object('content.default_settings')
envcfg.init_app(app)
applogging.init_app(app)
apierrors.init_app(app)
app.register_blueprint(status_bp)
app.register_blueprint(content_bp, url_prefix='/v1')
app.register_blueprint(swagger_bp)
app.register_error_handler(ClientError, _no_such_key)
return app
def _no_such_key(error):
# Boto3 exceptions are idiotic.
if error.response['Error']['Code'] != "NoSuchEntity":
return jsonify({'error': 'No such content'}), 404
else:
raise error
|
Return 404 when no content found.
|
Return 404 when no content found.
|
Python
|
bsd-3-clause
|
Zipmatch/zipmatch-content,Zipmatch/zipmatch-content
|
python
|
## Code Before:
from flask import Flask
from .extensions import envcfg, apierrors, applogging
from .blueprints.status import blueprint as status_bp
from .blueprints.content import blueprint as content_bp
from .blueprints.swagger import blueprint as swagger_bp
def create_app():
app = Flask('content')
app.config.from_object('content.default_settings')
envcfg.init_app(app)
applogging.init_app(app)
apierrors.init_app(app)
app.register_blueprint(status_bp)
app.register_blueprint(content_bp, url_prefix='/v1')
app.register_blueprint(swagger_bp)
return app
## Instruction:
Return 404 when no content found.
## Code After:
from flask import Flask, jsonify
from botocore.exceptions import ClientError
from .extensions import envcfg, apierrors, applogging
from .blueprints.status import blueprint as status_bp
from .blueprints.content import blueprint as content_bp
from .blueprints.swagger import blueprint as swagger_bp
def create_app():
app = Flask('content')
app.config.from_object('content.default_settings')
envcfg.init_app(app)
applogging.init_app(app)
apierrors.init_app(app)
app.register_blueprint(status_bp)
app.register_blueprint(content_bp, url_prefix='/v1')
app.register_blueprint(swagger_bp)
app.register_error_handler(ClientError, _no_such_key)
return app
def _no_such_key(error):
# Boto3 exceptions are idiotic.
if error.response['Error']['Code'] != "NoSuchEntity":
return jsonify({'error': 'No such content'}), 404
else:
raise error
|
...
from flask import Flask, jsonify
from botocore.exceptions import ClientError
from .extensions import envcfg, apierrors, applogging
from .blueprints.status import blueprint as status_bp
...
app.register_blueprint(status_bp)
app.register_blueprint(content_bp, url_prefix='/v1')
app.register_blueprint(swagger_bp)
app.register_error_handler(ClientError, _no_such_key)
return app
def _no_such_key(error):
# Boto3 exceptions are idiotic.
if error.response['Error']['Code'] != "NoSuchEntity":
return jsonify({'error': 'No such content'}), 404
else:
raise error
...
|
35f286ac175d5480d3dbb7261205f12dd97144bb
|
checkmail.py
|
checkmail.py
|
import pyclamav
import os
import re
import email
import argparse
import sys
import tempfile
mail_split_re = re.compile(r'\s(?=From -)')
def print_message(message, signature=None):
parsed = email.message_from_string(message)
print "From: {0}, Subject: {1}, Signature: {2}".format(parsed["From"],
parsed["Subject"],
signature)
def scan_mail(message):
temp_message = tempfile.NamedTemporaryFile(delete=False)
with temp_message as f:
f.write(message)
try:
result = pyclamav.scanfile(temp_message.name)
if not result[0]:
return
print_message(message, result[1])
finally:
os.remove(temp_message.name)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('mailfile', nargs='?', type=argparse.FileType('r'),
default=sys.stdin,
help="Thunderbird mail file to parse, if not provided input is taken from STDIN")
args = parser.parse_args()
for msg in mail_split_re.split(args.mailfile.read()):
scan_mail(msg)
|
import pyclamav
import os
import email
import argparse
import sys
import tempfile
import mailbox
def print_message(parsed, signature=None):
print "From: {0}, Subject: {1}, Signature: {2}".format(parsed["From"],
parsed["Subject"],
signature)
def scan_mail(message):
temp_message = tempfile.NamedTemporaryFile(delete=False)
with temp_message as f:
f.write(message.as_string())
try:
result = pyclamav.scanfile(temp_message.name)
if not result[0]:
return
print_message(message, result[1])
finally:
os.remove(temp_message.name)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('mailfile', nargs='?', type=argparse.FileType('r'),
default=sys.stdin,
help="mbox mail file to parse, if not provided input is taken from STDIN")
args = parser.parse_args()
mbox = mailbox.mbox(args.mailfile.name)
for msg in mbox:
scan_mail(msg)
|
Use the python MBox class rather than parsing the mailbox manually
|
Use the python MBox class rather than parsing the mailbox manually
|
Python
|
mit
|
DanSearle/CheckMail
|
python
|
## Code Before:
import pyclamav
import os
import re
import email
import argparse
import sys
import tempfile
mail_split_re = re.compile(r'\s(?=From -)')
def print_message(message, signature=None):
parsed = email.message_from_string(message)
print "From: {0}, Subject: {1}, Signature: {2}".format(parsed["From"],
parsed["Subject"],
signature)
def scan_mail(message):
temp_message = tempfile.NamedTemporaryFile(delete=False)
with temp_message as f:
f.write(message)
try:
result = pyclamav.scanfile(temp_message.name)
if not result[0]:
return
print_message(message, result[1])
finally:
os.remove(temp_message.name)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('mailfile', nargs='?', type=argparse.FileType('r'),
default=sys.stdin,
help="Thunderbird mail file to parse, if not provided input is taken from STDIN")
args = parser.parse_args()
for msg in mail_split_re.split(args.mailfile.read()):
scan_mail(msg)
## Instruction:
Use the python MBox class rather than parsing the mailbox manually
## Code After:
import pyclamav
import os
import email
import argparse
import sys
import tempfile
import mailbox
def print_message(parsed, signature=None):
print "From: {0}, Subject: {1}, Signature: {2}".format(parsed["From"],
parsed["Subject"],
signature)
def scan_mail(message):
temp_message = tempfile.NamedTemporaryFile(delete=False)
with temp_message as f:
f.write(message.as_string())
try:
result = pyclamav.scanfile(temp_message.name)
if not result[0]:
return
print_message(message, result[1])
finally:
os.remove(temp_message.name)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('mailfile', nargs='?', type=argparse.FileType('r'),
default=sys.stdin,
help="mbox mail file to parse, if not provided input is taken from STDIN")
args = parser.parse_args()
mbox = mailbox.mbox(args.mailfile.name)
for msg in mbox:
scan_mail(msg)
|
...
import pyclamav
import os
import email
import argparse
import sys
import tempfile
import mailbox
def print_message(parsed, signature=None):
print "From: {0}, Subject: {1}, Signature: {2}".format(parsed["From"],
parsed["Subject"],
signature)
...
def scan_mail(message):
temp_message = tempfile.NamedTemporaryFile(delete=False)
with temp_message as f:
f.write(message.as_string())
try:
result = pyclamav.scanfile(temp_message.name)
if not result[0]:
...
parser = argparse.ArgumentParser()
parser.add_argument('mailfile', nargs='?', type=argparse.FileType('r'),
default=sys.stdin,
help="mbox mail file to parse, if not provided input is taken from STDIN")
args = parser.parse_args()
mbox = mailbox.mbox(args.mailfile.name)
for msg in mbox:
scan_mail(msg)
...
|
0d5072aea49ed5c34bc3c140a5019e59506135a4
|
menus/database_setup.py
|
menus/database_setup.py
|
import sys
from sqlalchemy import Column, ForeignKey, Integer, String
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship
from sqlalchemy import create_engine
Base = declarative_base()
class Restaurant(Base):
__tablename__ = 'restaurant'
name = Column(String(80), nullable = False)
description = Column(String(250))
id = Column(Integer, primary_key = True)
@property
def serialize(self):
return {
'name': self.name,
'description': self.description,
'id': self.id,
}
class MenuItem(Base):
__tablename__ = 'menu_item'
name = Column(String(80), nullable = False)
id = Column(Integer,primary_key = True)
course = Column(String(250))
description = Column(String(250))
price = Column(String(8))
restaurant_id = Column(Integer, ForeignKey('restaurant.id'))
restaurant = relationship(Restaurant)
@property
def serialize(self):
return {
'name': self.name,
'description': self.description,
'id': self.id,
'price': self.price,
'course': self.course,
}
engine = create_engine('sqlite:///restaurantmenu.db')
Base.metadata.create_all(engine)
|
import sys
from sqlalchemy import Column, ForeignKey, Integer, String
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship
from sqlalchemy import create_engine
Base = declarative_base()
class Restaurant(Base):
__tablename__ = 'restaurant'
name = Column(String(80), nullable = False)
id = Column(Integer, primary_key = True)
@property
def serialize(self):
return {
'name': self.name,
'id': self.id,
}
class MenuItem(Base):
__tablename__ = 'menu_item'
name = Column(String(80), nullable = False)
id = Column(Integer,primary_key = True)
course = Column(String(250))
description = Column(String(250))
price = Column(String(8))
restaurant_id = Column(Integer, ForeignKey('restaurant.id'))
restaurant = relationship(Restaurant)
@property
def serialize(self):
return {
'name': self.name,
'description': self.description,
'id': self.id,
'price': self.price,
'course': self.course,
}
engine = create_engine('sqlite:///restaurantmenu.db')
Base.metadata.create_all(engine)
|
Remove description from Restaurant class
|
bug: Remove description from Restaurant class
|
Python
|
mit
|
gsbullmer/restaurant-menu-directory,gsbullmer/restaurant-menu-directory
|
python
|
## Code Before:
import sys
from sqlalchemy import Column, ForeignKey, Integer, String
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship
from sqlalchemy import create_engine
Base = declarative_base()
class Restaurant(Base):
__tablename__ = 'restaurant'
name = Column(String(80), nullable = False)
description = Column(String(250))
id = Column(Integer, primary_key = True)
@property
def serialize(self):
return {
'name': self.name,
'description': self.description,
'id': self.id,
}
class MenuItem(Base):
__tablename__ = 'menu_item'
name = Column(String(80), nullable = False)
id = Column(Integer,primary_key = True)
course = Column(String(250))
description = Column(String(250))
price = Column(String(8))
restaurant_id = Column(Integer, ForeignKey('restaurant.id'))
restaurant = relationship(Restaurant)
@property
def serialize(self):
return {
'name': self.name,
'description': self.description,
'id': self.id,
'price': self.price,
'course': self.course,
}
engine = create_engine('sqlite:///restaurantmenu.db')
Base.metadata.create_all(engine)
## Instruction:
bug: Remove description from Restaurant class
## Code After:
import sys
from sqlalchemy import Column, ForeignKey, Integer, String
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship
from sqlalchemy import create_engine
Base = declarative_base()
class Restaurant(Base):
__tablename__ = 'restaurant'
name = Column(String(80), nullable = False)
id = Column(Integer, primary_key = True)
@property
def serialize(self):
return {
'name': self.name,
'id': self.id,
}
class MenuItem(Base):
__tablename__ = 'menu_item'
name = Column(String(80), nullable = False)
id = Column(Integer,primary_key = True)
course = Column(String(250))
description = Column(String(250))
price = Column(String(8))
restaurant_id = Column(Integer, ForeignKey('restaurant.id'))
restaurant = relationship(Restaurant)
@property
def serialize(self):
return {
'name': self.name,
'description': self.description,
'id': self.id,
'price': self.price,
'course': self.course,
}
engine = create_engine('sqlite:///restaurantmenu.db')
Base.metadata.create_all(engine)
|
# ... existing code ...
__tablename__ = 'restaurant'
name = Column(String(80), nullable = False)
id = Column(Integer, primary_key = True)
@property
# ... modified code ...
def serialize(self):
return {
'name': self.name,
'id': self.id,
}
# ... rest of the code ...
|
d55a66a20e9ed480fb7be939e2988f7267ed3a7a
|
trianglify/src/main/java/com/sdsmdg/kd/trianglify/models/Triangle.java
|
trianglify/src/main/java/com/sdsmdg/kd/trianglify/models/Triangle.java
|
package com.sdsmdg.kd.trianglify.models;
import android.graphics.Point;
public class Triangle {
public Point a;
public Point b;
public Point c;
public Triangle (Point a, Point b, Point c) {
this.a = a;
this.b = b;
this.c = c;
}
}
|
package com.sdsmdg.kd.trianglify.models;
import android.graphics.Point;
public class Triangle {
public Point a;
public Point b;
public Point c;
public Triangle (Point a, Point b, Point c) {
this.a = a;
this.b = b;
this.c = c;
}
private float sign (Point p1, Point p2, Point p3) {
return (p1.x - p3.x) * (p2.y - p3.y) - (p1.y - p3.y) * (p2.x - p3.x);
}
public boolean contains (Point p) {
boolean pab, pbc, pca;
pab = sign(p, a, b) < 0f;
pbc = sign(p, b, c) < 0f;
if (pab == pbc)
return false;
pca = sign(p, c, a) < 0f;
return (pab == pca);
}
}
|
Add 'contains()' to check if a point is in interior of triangle.
|
Add 'contains()' to check if a point is in interior of triangle.
|
Java
|
mit
|
sdsmdg/trianglify
|
java
|
## Code Before:
package com.sdsmdg.kd.trianglify.models;
import android.graphics.Point;
public class Triangle {
public Point a;
public Point b;
public Point c;
public Triangle (Point a, Point b, Point c) {
this.a = a;
this.b = b;
this.c = c;
}
}
## Instruction:
Add 'contains()' to check if a point is in interior of triangle.
## Code After:
package com.sdsmdg.kd.trianglify.models;
import android.graphics.Point;
public class Triangle {
public Point a;
public Point b;
public Point c;
public Triangle (Point a, Point b, Point c) {
this.a = a;
this.b = b;
this.c = c;
}
private float sign (Point p1, Point p2, Point p3) {
return (p1.x - p3.x) * (p2.y - p3.y) - (p1.y - p3.y) * (p2.x - p3.x);
}
public boolean contains (Point p) {
boolean pab, pbc, pca;
pab = sign(p, a, b) < 0f;
pbc = sign(p, b, c) < 0f;
if (pab == pbc)
return false;
pca = sign(p, c, a) < 0f;
return (pab == pca);
}
}
|
# ... existing code ...
this.b = b;
this.c = c;
}
private float sign (Point p1, Point p2, Point p3) {
return (p1.x - p3.x) * (p2.y - p3.y) - (p1.y - p3.y) * (p2.x - p3.x);
}
public boolean contains (Point p) {
boolean pab, pbc, pca;
pab = sign(p, a, b) < 0f;
pbc = sign(p, b, c) < 0f;
if (pab == pbc)
return false;
pca = sign(p, c, a) < 0f;
return (pab == pca);
}
}
# ... rest of the code ...
|
1e76a9c7ee030875929a65d9f30194166dcd62ef
|
docs/reencode.py
|
docs/reencode.py
|
"""Helper binary to reencode a text file from UTF-8 to ISO-8859-1."""
import argparse
import pathlib
def _main() -> None:
parser = argparse.ArgumentParser(allow_abbrev=False)
parser.add_argument('input', type=pathlib.Path)
parser.add_argument('output', type=pathlib.Path)
opts = parser.parse_args()
text = opts.input.read_text(encoding='utf-8')
with opts.output.open(mode='xt', encoding='latin-1', newline='\n') as file:
file.write(text)
if __name__ == '__main__':
_main()
|
"""Helper binary to reencode a text file from UTF-8 to ISO-8859-1."""
import argparse
import pathlib
def _main() -> None:
parser = argparse.ArgumentParser(allow_abbrev=False)
parser.add_argument('input', type=pathlib.Path)
parser.add_argument('output', type=pathlib.Path)
opts = parser.parse_args()
text = opts.input.read_text(encoding='utf-8')
# Force Unix-style line endings for consistent results. See
# https://github.com/bazelbuild/stardoc/issues/110.
with opts.output.open(mode='xt', encoding='latin-1', newline='\n') as file:
file.write(text)
if __name__ == '__main__':
_main()
|
Add a comment about line endings in Stardoc files.
|
Add a comment about line endings in Stardoc files.
|
Python
|
apache-2.0
|
phst/rules_elisp,phst/rules_elisp,phst/rules_elisp,phst/rules_elisp,phst/rules_elisp
|
python
|
## Code Before:
"""Helper binary to reencode a text file from UTF-8 to ISO-8859-1."""
import argparse
import pathlib
def _main() -> None:
parser = argparse.ArgumentParser(allow_abbrev=False)
parser.add_argument('input', type=pathlib.Path)
parser.add_argument('output', type=pathlib.Path)
opts = parser.parse_args()
text = opts.input.read_text(encoding='utf-8')
with opts.output.open(mode='xt', encoding='latin-1', newline='\n') as file:
file.write(text)
if __name__ == '__main__':
_main()
## Instruction:
Add a comment about line endings in Stardoc files.
## Code After:
"""Helper binary to reencode a text file from UTF-8 to ISO-8859-1."""
import argparse
import pathlib
def _main() -> None:
parser = argparse.ArgumentParser(allow_abbrev=False)
parser.add_argument('input', type=pathlib.Path)
parser.add_argument('output', type=pathlib.Path)
opts = parser.parse_args()
text = opts.input.read_text(encoding='utf-8')
# Force Unix-style line endings for consistent results. See
# https://github.com/bazelbuild/stardoc/issues/110.
with opts.output.open(mode='xt', encoding='latin-1', newline='\n') as file:
file.write(text)
if __name__ == '__main__':
_main()
|
# ... existing code ...
parser.add_argument('output', type=pathlib.Path)
opts = parser.parse_args()
text = opts.input.read_text(encoding='utf-8')
# Force Unix-style line endings for consistent results. See
# https://github.com/bazelbuild/stardoc/issues/110.
with opts.output.open(mode='xt', encoding='latin-1', newline='\n') as file:
file.write(text)
# ... rest of the code ...
|
738767fd50fb9c824b80c6e25337e851e116bac6
|
trunk/ClientOfMutabilityDetector/trunk/ClientOfMutabilityDetector/src/main/java/org/mutabilitydetector/unittesting/assertionbenchmarks/CheckSomeClass.java
|
trunk/ClientOfMutabilityDetector/trunk/ClientOfMutabilityDetector/src/main/java/org/mutabilitydetector/unittesting/assertionbenchmarks/CheckSomeClass.java
|
/*
* Mutability Detector
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* Further licensing information for this project can be found in
* license/LICENSE.txt
*/
package org.mutabilitydetector.unittesting.assertionbenchmarks;
import java.math.BigDecimal;
import org.mutabilitydetector.cli.CommandLineOptions;
import org.mutabilitydetector.cli.RunMutabilityDetector;
import org.mutabilitydetector.repackaged.com.google.classpath.ClassPath;
import org.mutabilitydetector.repackaged.com.google.classpath.ClassPathFactory;
public class CheckSomeClass {
public static void main(String[] args) {
//checkClass(TestIllegalFieldValueException.class);
// checkClass(DurationField.class);
checkClass(BigDecimal.class);
}
private static void checkClass(Class<?> toAnalyse) {
ClassPath cp = new ClassPathFactory().createFromJVM();
String match = toAnalyse.getName().replace("$", "\\$");
CommandLineOptions options = new CommandLineOptions(System.out, "-v", "-match", match);
new RunMutabilityDetector(cp, options).run();
}
}
|
/*
* Mutability Detector
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* Further licensing information for this project can be found in
* license/LICENSE.txt
*/
package org.mutabilitydetector.unittesting.assertionbenchmarks;
import java.math.BigDecimal;
import org.mutabilitydetector.cli.CommandLineOptions;
import org.mutabilitydetector.cli.NamesFromClassResources;
import org.mutabilitydetector.cli.RunMutabilityDetector;
import org.mutabilitydetector.repackaged.com.google.classpath.ClassPath;
import org.mutabilitydetector.repackaged.com.google.classpath.ClassPathFactory;
public class CheckSomeClass {
public static void main(String[] args) {
//checkClass(TestIllegalFieldValueException.class);
// checkClass(DurationField.class);
checkClass(BigDecimal.class);
}
private static void checkClass(Class<?> toAnalyse) {
ClassPath cp = new ClassPathFactory().createFromJVM();
String match = toAnalyse.getName().replace("$", "\\$");
CommandLineOptions options = new CommandLineOptions(System.out, "-v", "-match", match);
new RunMutabilityDetector(cp, options, new NamesFromClassResources(options.match())).run();
}
}
|
Update client to account for change in signature.
|
Update client to account for change in signature.
git-svn-id: ed609ce04ec9e3c0bc25e071e87814dd6d976548@583 c7a0535c-eda6-11de-83d8-6d5adf01d787
|
Java
|
apache-2.0
|
MutabilityDetector/MutabilityDetector,MutabilityDetector/MutabilityDetector
|
java
|
## Code Before:
/*
* Mutability Detector
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* Further licensing information for this project can be found in
* license/LICENSE.txt
*/
package org.mutabilitydetector.unittesting.assertionbenchmarks;
import java.math.BigDecimal;
import org.mutabilitydetector.cli.CommandLineOptions;
import org.mutabilitydetector.cli.RunMutabilityDetector;
import org.mutabilitydetector.repackaged.com.google.classpath.ClassPath;
import org.mutabilitydetector.repackaged.com.google.classpath.ClassPathFactory;
public class CheckSomeClass {
public static void main(String[] args) {
//checkClass(TestIllegalFieldValueException.class);
// checkClass(DurationField.class);
checkClass(BigDecimal.class);
}
private static void checkClass(Class<?> toAnalyse) {
ClassPath cp = new ClassPathFactory().createFromJVM();
String match = toAnalyse.getName().replace("$", "\\$");
CommandLineOptions options = new CommandLineOptions(System.out, "-v", "-match", match);
new RunMutabilityDetector(cp, options).run();
}
}
## Instruction:
Update client to account for change in signature.
git-svn-id: ed609ce04ec9e3c0bc25e071e87814dd6d976548@583 c7a0535c-eda6-11de-83d8-6d5adf01d787
## Code After:
/*
* Mutability Detector
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* Further licensing information for this project can be found in
* license/LICENSE.txt
*/
package org.mutabilitydetector.unittesting.assertionbenchmarks;
import java.math.BigDecimal;
import org.mutabilitydetector.cli.CommandLineOptions;
import org.mutabilitydetector.cli.NamesFromClassResources;
import org.mutabilitydetector.cli.RunMutabilityDetector;
import org.mutabilitydetector.repackaged.com.google.classpath.ClassPath;
import org.mutabilitydetector.repackaged.com.google.classpath.ClassPathFactory;
public class CheckSomeClass {
public static void main(String[] args) {
//checkClass(TestIllegalFieldValueException.class);
// checkClass(DurationField.class);
checkClass(BigDecimal.class);
}
private static void checkClass(Class<?> toAnalyse) {
ClassPath cp = new ClassPathFactory().createFromJVM();
String match = toAnalyse.getName().replace("$", "\\$");
CommandLineOptions options = new CommandLineOptions(System.out, "-v", "-match", match);
new RunMutabilityDetector(cp, options, new NamesFromClassResources(options.match())).run();
}
}
|
// ... existing code ...
import java.math.BigDecimal;
import org.mutabilitydetector.cli.CommandLineOptions;
import org.mutabilitydetector.cli.NamesFromClassResources;
import org.mutabilitydetector.cli.RunMutabilityDetector;
import org.mutabilitydetector.repackaged.com.google.classpath.ClassPath;
import org.mutabilitydetector.repackaged.com.google.classpath.ClassPathFactory;
// ... modified code ...
ClassPath cp = new ClassPathFactory().createFromJVM();
String match = toAnalyse.getName().replace("$", "\\$");
CommandLineOptions options = new CommandLineOptions(System.out, "-v", "-match", match);
new RunMutabilityDetector(cp, options, new NamesFromClassResources(options.match())).run();
}
}
// ... rest of the code ...
|
8a1002dd7afd008ff85699f95ae11884b96f60ac
|
nanoservice/crypto.py
|
nanoservice/crypto.py
|
import hmac
import hashlib
from .error import AuthenticatorInvalidSignature
class Authenticator(object):
""" This object is used to authenticate messages """
def __init__(self, secret, digestmod=None):
assert secret
self.secret = secret.encode('utf-8')
self.digestmod = digestmod or hashlib.sha256
self.sig_size = self.digestmod().digest_size * 2
def sign(self, encoded):
""" Return authentication signature of encoded bytes """
h = hmac.new(self.secret, encoded, digestmod=self.digestmod)
return h.hexdigest().encode('utf-8')
def signed(self, encoded):
""" Sign encoded bytes and append signature """
signature = self.sign(encoded)
return signature + encoded
def unsigned(self, encoded):
""" Remove signature and return just the message """
_, message = self.split(encoded)
return message
def split(self, encoded):
""" Split into signature and message """
signature = encoded[:self.sig_size]
message = encoded[self.sig_size:]
return signature, message
def auth(self, encoded):
""" Validate integrity of encoded bytes """
signature, message = self.split(encoded)
computed = self.sign(message)
if not hmac.compare_digest(signature, computed):
raise AuthenticatorInvalidSignature
|
import hmac
import hashlib
from .error import AuthenticatorInvalidSignature
class Authenticator(object):
""" This object is used to authenticate messages """
def __init__(self, secret, digestmod=None):
assert secret
self.secret = secret.encode('utf-8')
self.digestmod = digestmod or hashlib.sha256
self.sig_size = self.digestmod().digest_size * 2
def sign(self, encoded):
""" Return authentication signature of encoded bytes """
h = hmac.new(self.secret, encoded, digestmod=self.digestmod)
return h.hexdigest().encode('utf-8')
def signed(self, encoded):
""" Sign encoded bytes and append signature """
signature = self.sign(encoded)
return encoded + signature
def unsigned(self, encoded):
""" Remove signature and return just the message """
message, _ = self.split(encoded)
return message
def split(self, encoded):
""" Split into signature and message """
maxlen = len(encoded) - self.sig_size
message = encoded[:maxlen]
signature = encoded[-self.sig_size:]
return message, signature
def auth(self, encoded):
""" Validate integrity of encoded bytes """
message, signature = self.split(encoded)
computed = self.sign(message)
if not hmac.compare_digest(signature, computed):
raise AuthenticatorInvalidSignature
|
Put signature at the end to permit pub-sub to happen
|
Put signature at the end to permit pub-sub to happen
|
Python
|
mit
|
walkr/nanoservice
|
python
|
## Code Before:
import hmac
import hashlib
from .error import AuthenticatorInvalidSignature
class Authenticator(object):
""" This object is used to authenticate messages """
def __init__(self, secret, digestmod=None):
assert secret
self.secret = secret.encode('utf-8')
self.digestmod = digestmod or hashlib.sha256
self.sig_size = self.digestmod().digest_size * 2
def sign(self, encoded):
""" Return authentication signature of encoded bytes """
h = hmac.new(self.secret, encoded, digestmod=self.digestmod)
return h.hexdigest().encode('utf-8')
def signed(self, encoded):
""" Sign encoded bytes and append signature """
signature = self.sign(encoded)
return signature + encoded
def unsigned(self, encoded):
""" Remove signature and return just the message """
_, message = self.split(encoded)
return message
def split(self, encoded):
""" Split into signature and message """
signature = encoded[:self.sig_size]
message = encoded[self.sig_size:]
return signature, message
def auth(self, encoded):
""" Validate integrity of encoded bytes """
signature, message = self.split(encoded)
computed = self.sign(message)
if not hmac.compare_digest(signature, computed):
raise AuthenticatorInvalidSignature
## Instruction:
Put signature at the end to permit pub-sub to happen
## Code After:
import hmac
import hashlib
from .error import AuthenticatorInvalidSignature
class Authenticator(object):
""" This object is used to authenticate messages """
def __init__(self, secret, digestmod=None):
assert secret
self.secret = secret.encode('utf-8')
self.digestmod = digestmod or hashlib.sha256
self.sig_size = self.digestmod().digest_size * 2
def sign(self, encoded):
""" Return authentication signature of encoded bytes """
h = hmac.new(self.secret, encoded, digestmod=self.digestmod)
return h.hexdigest().encode('utf-8')
def signed(self, encoded):
""" Sign encoded bytes and append signature """
signature = self.sign(encoded)
return encoded + signature
def unsigned(self, encoded):
""" Remove signature and return just the message """
message, _ = self.split(encoded)
return message
def split(self, encoded):
""" Split into signature and message """
maxlen = len(encoded) - self.sig_size
message = encoded[:maxlen]
signature = encoded[-self.sig_size:]
return message, signature
def auth(self, encoded):
""" Validate integrity of encoded bytes """
message, signature = self.split(encoded)
computed = self.sign(message)
if not hmac.compare_digest(signature, computed):
raise AuthenticatorInvalidSignature
|
// ... existing code ...
def signed(self, encoded):
""" Sign encoded bytes and append signature """
signature = self.sign(encoded)
return encoded + signature
def unsigned(self, encoded):
""" Remove signature and return just the message """
message, _ = self.split(encoded)
return message
def split(self, encoded):
""" Split into signature and message """
maxlen = len(encoded) - self.sig_size
message = encoded[:maxlen]
signature = encoded[-self.sig_size:]
return message, signature
def auth(self, encoded):
""" Validate integrity of encoded bytes """
message, signature = self.split(encoded)
computed = self.sign(message)
if not hmac.compare_digest(signature, computed):
raise AuthenticatorInvalidSignature
// ... rest of the code ...
|
c03ed8c4aa935e2490178b52fbaa73675137f626
|
tests/amd64/wait4_WNOHANG/wait4_WNOHANG.c
|
tests/amd64/wait4_WNOHANG/wait4_WNOHANG.c
|
static int
parent_main(pid_t child_pid)
{
pid_t pid;
int status;
pid = wait4(child_pid, &status, WNOHANG, NULL);
if (pid != 0)
return (1);
return (0);
}
static void
signal_handler(int sig)
{
}
int
main(int argc, const char *argv[])
{
struct sigaction act;
struct timespec t;
pid_t child_pid;
int retval;
act.sa_handler = signal_handler;
act.sa_flags = 0;
if (sigfillset(&act.sa_mask) == -1)
return (16);
if (sigaction(SIGTERM, &act, NULL) == -1)
return (17);
child_pid = fork();
switch (child_pid) {
case -1:
return (18);
case 0:
t.tv_sec = 8;
t.tv_nsec = 0;
nanosleep(&t, NULL);
return (0);
default:
break;
}
retval = parent_main(child_pid);
kill(child_pid, SIGTERM);
return (retval);
}
|
static int
parent_main(pid_t child_pid)
{
pid_t pid;
int status;
pid = wait4(child_pid, &status, WNOHANG, NULL);
if (pid != 0)
return (1);
return (0);
}
int
main(int argc, const char *argv[])
{
pid_t child_pid;
int retval;
child_pid = fork();
switch (child_pid) {
case -1:
return (18);
case 0:
for (;;)
;
return (0);
default:
break;
}
retval = parent_main(child_pid);
kill(child_pid, SIGKILL);
return (retval);
}
|
Simplify the test for wait4(2) with WNOHANG.
|
Simplify the test for wait4(2) with WNOHANG.
|
C
|
mit
|
SumiTomohiko/fsyscall2,SumiTomohiko/fsyscall2,SumiTomohiko/fsyscall2,SumiTomohiko/fsyscall2,SumiTomohiko/fsyscall2
|
c
|
## Code Before:
static int
parent_main(pid_t child_pid)
{
pid_t pid;
int status;
pid = wait4(child_pid, &status, WNOHANG, NULL);
if (pid != 0)
return (1);
return (0);
}
static void
signal_handler(int sig)
{
}
int
main(int argc, const char *argv[])
{
struct sigaction act;
struct timespec t;
pid_t child_pid;
int retval;
act.sa_handler = signal_handler;
act.sa_flags = 0;
if (sigfillset(&act.sa_mask) == -1)
return (16);
if (sigaction(SIGTERM, &act, NULL) == -1)
return (17);
child_pid = fork();
switch (child_pid) {
case -1:
return (18);
case 0:
t.tv_sec = 8;
t.tv_nsec = 0;
nanosleep(&t, NULL);
return (0);
default:
break;
}
retval = parent_main(child_pid);
kill(child_pid, SIGTERM);
return (retval);
}
## Instruction:
Simplify the test for wait4(2) with WNOHANG.
## Code After:
static int
parent_main(pid_t child_pid)
{
pid_t pid;
int status;
pid = wait4(child_pid, &status, WNOHANG, NULL);
if (pid != 0)
return (1);
return (0);
}
int
main(int argc, const char *argv[])
{
pid_t child_pid;
int retval;
child_pid = fork();
switch (child_pid) {
case -1:
return (18);
case 0:
for (;;)
;
return (0);
default:
break;
}
retval = parent_main(child_pid);
kill(child_pid, SIGKILL);
return (retval);
}
|
# ... existing code ...
return (0);
}
int
main(int argc, const char *argv[])
{
pid_t child_pid;
int retval;
child_pid = fork();
switch (child_pid) {
# ... modified code ...
case -1:
return (18);
case 0:
for (;;)
;
return (0);
default:
break;
...
retval = parent_main(child_pid);
kill(child_pid, SIGKILL);
return (retval);
}
# ... rest of the code ...
|
025b356ad4bbaa81ef98467d3c3abd3c8fba98b8
|
skbio/format/sequences/tests/test_fastq.py
|
skbio/format/sequences/tests/test_fastq.py
|
import numpy as np
from unittest import TestCase, main
from skbio.format.sequences.fastq import (format_fastq_record,
_phred_to_ascii33,
_phred_to_ascii64)
class FASTQFormatTests(TestCase):
def test_format_fastq_record(self):
"""Construt a FASTQ record"""
exp = b"@abc\ndef\n+\nfgh\n"
obs = format_fastq_record(b'abc', b'def',
np.array([38, 39, 40], dtype=np.int8), 64)
self.assertEqual(obs, exp)
def test_phred_to_ascii33(self):
"""Write out terrible FASTQ quality scores"""
exp = b'GHI'
obs = _phred_to_ascii33(np.array([38, 39, 40], dtype=np.int8))
self.assertEqual(obs, exp)
def test_phred_to_ascii64(self):
"""Write out terrible FASTQ quality scores"""
exp = b'fgh'
obs = _phred_to_ascii64(np.array([38, 39, 40], dtype=np.int8))
self.assertEqual(obs, exp)
if __name__ == '__main__':
main()
|
import numpy as np
from unittest import TestCase, main
from skbio.format.sequences.fastq import (format_fastq_record,
_phred_to_ascii33,
_phred_to_ascii64)
class FASTQFormatTests(TestCase):
def setUp(self):
self.qual_scores = np.array([38, 39, 40], dtype=np.int8)
self.args = (b'abc', b'def', self.qual_scores)
def test_format_fastq_record_phred_offset_33(self):
exp = b"@abc\ndef\n+\nGHI\n"
obs = format_fastq_record(*self.args, phred_offset=33)
self.assertEqual(obs, exp)
def test_format_fastq_record_phred_offset_64(self):
exp = b"@abc\ndef\n+\nfgh\n"
obs = format_fastq_record(*self.args, phred_offset=64)
self.assertEqual(obs, exp)
def test_format_fastq_record_invalid_phred_offset(self):
with self.assertRaises(ValueError):
format_fastq_record(*self.args, phred_offset=42)
def test_phred_to_ascii33(self):
obs = _phred_to_ascii33(self.qual_scores)
self.assertEqual(obs, b'GHI')
def test_phred_to_ascii64(self):
obs = _phred_to_ascii64(self.qual_scores)
self.assertEqual(obs, b'fgh')
if __name__ == '__main__':
main()
|
Add tests for different types of phred offsets
|
Add tests for different types of phred offsets
|
Python
|
bsd-3-clause
|
corburn/scikit-bio,anderspitman/scikit-bio,johnchase/scikit-bio,SamStudio8/scikit-bio,anderspitman/scikit-bio,averagehat/scikit-bio,wdwvt1/scikit-bio,kdmurray91/scikit-bio,demis001/scikit-bio,SamStudio8/scikit-bio,jdrudolph/scikit-bio,gregcaporaso/scikit-bio,corburn/scikit-bio,johnchase/scikit-bio,jairideout/scikit-bio,Kleptobismol/scikit-bio,demis001/scikit-bio,Kleptobismol/scikit-bio,xguse/scikit-bio,Kleptobismol/scikit-bio,colinbrislawn/scikit-bio,gregcaporaso/scikit-bio,jensreeder/scikit-bio,Achuth17/scikit-bio,colinbrislawn/scikit-bio,xguse/scikit-bio,kdmurray91/scikit-bio,Achuth17/scikit-bio,jairideout/scikit-bio,jdrudolph/scikit-bio,averagehat/scikit-bio,jensreeder/scikit-bio,wdwvt1/scikit-bio
|
python
|
## Code Before:
import numpy as np
from unittest import TestCase, main
from skbio.format.sequences.fastq import (format_fastq_record,
_phred_to_ascii33,
_phred_to_ascii64)
class FASTQFormatTests(TestCase):
def test_format_fastq_record(self):
"""Construt a FASTQ record"""
exp = b"@abc\ndef\n+\nfgh\n"
obs = format_fastq_record(b'abc', b'def',
np.array([38, 39, 40], dtype=np.int8), 64)
self.assertEqual(obs, exp)
def test_phred_to_ascii33(self):
"""Write out terrible FASTQ quality scores"""
exp = b'GHI'
obs = _phred_to_ascii33(np.array([38, 39, 40], dtype=np.int8))
self.assertEqual(obs, exp)
def test_phred_to_ascii64(self):
"""Write out terrible FASTQ quality scores"""
exp = b'fgh'
obs = _phred_to_ascii64(np.array([38, 39, 40], dtype=np.int8))
self.assertEqual(obs, exp)
if __name__ == '__main__':
main()
## Instruction:
Add tests for different types of phred offsets
## Code After:
import numpy as np
from unittest import TestCase, main
from skbio.format.sequences.fastq import (format_fastq_record,
_phred_to_ascii33,
_phred_to_ascii64)
class FASTQFormatTests(TestCase):
def setUp(self):
self.qual_scores = np.array([38, 39, 40], dtype=np.int8)
self.args = (b'abc', b'def', self.qual_scores)
def test_format_fastq_record_phred_offset_33(self):
exp = b"@abc\ndef\n+\nGHI\n"
obs = format_fastq_record(*self.args, phred_offset=33)
self.assertEqual(obs, exp)
def test_format_fastq_record_phred_offset_64(self):
exp = b"@abc\ndef\n+\nfgh\n"
obs = format_fastq_record(*self.args, phred_offset=64)
self.assertEqual(obs, exp)
def test_format_fastq_record_invalid_phred_offset(self):
with self.assertRaises(ValueError):
format_fastq_record(*self.args, phred_offset=42)
def test_phred_to_ascii33(self):
obs = _phred_to_ascii33(self.qual_scores)
self.assertEqual(obs, b'GHI')
def test_phred_to_ascii64(self):
obs = _phred_to_ascii64(self.qual_scores)
self.assertEqual(obs, b'fgh')
if __name__ == '__main__':
main()
|
// ... existing code ...
class FASTQFormatTests(TestCase):
def setUp(self):
self.qual_scores = np.array([38, 39, 40], dtype=np.int8)
self.args = (b'abc', b'def', self.qual_scores)
def test_format_fastq_record_phred_offset_33(self):
exp = b"@abc\ndef\n+\nGHI\n"
obs = format_fastq_record(*self.args, phred_offset=33)
self.assertEqual(obs, exp)
def test_format_fastq_record_phred_offset_64(self):
exp = b"@abc\ndef\n+\nfgh\n"
obs = format_fastq_record(*self.args, phred_offset=64)
self.assertEqual(obs, exp)
def test_format_fastq_record_invalid_phred_offset(self):
with self.assertRaises(ValueError):
format_fastq_record(*self.args, phred_offset=42)
def test_phred_to_ascii33(self):
obs = _phred_to_ascii33(self.qual_scores)
self.assertEqual(obs, b'GHI')
def test_phred_to_ascii64(self):
obs = _phred_to_ascii64(self.qual_scores)
self.assertEqual(obs, b'fgh')
if __name__ == '__main__':
main()
// ... rest of the code ...
|
28d62ce8a611434bce6e4afce6a45d72dbc12a45
|
codemodel-rifle/src/main/java/hu/bme/mit/codemodel/rifle/database/DbServices.java
|
codemodel-rifle/src/main/java/hu/bme/mit/codemodel/rifle/database/DbServices.java
|
package hu.bme.mit.codemodel.rifle.database;
import java.util.Map;
import hu.bme.mit.codemodel.rifle.database.querybuilder.Query;
import org.neo4j.driver.v1.Driver;
import org.neo4j.driver.v1.Session;
import org.neo4j.driver.v1.StatementResult;
import org.neo4j.driver.v1.Transaction;
import org.neo4j.graphdb.GraphDatabaseService;
import neo4j.driver.testkit.EmbeddedTestkitDriver;
/**
* Provides database services like transaction handling and query executing.
*/
public class DbServices {
protected final Driver driver;
protected Session session;
protected Transaction transaction;
public DbServices(Driver driver) {
this.driver = driver;
this.session = this.driver.session();
}
public Transaction beginTx() {
this.transaction = session.beginTransaction();
return this.transaction;
}
public StatementResult execute(String statement) {
return this.transaction.run(statement);
}
public StatementResult execute(String statementTemplate, Map<String, Object> statementParameters) {
return this.transaction.run(statementTemplate, statementParameters);
}
public StatementResult execute(Query q) {
return this.execute(q.getStatementTemplate(), q.getStatementParameters());
}
public GraphDatabaseService getUnderlyingDatabaseService() {
if (driver instanceof EmbeddedTestkitDriver) {
return ((EmbeddedTestkitDriver) driver).getUnderlyingDatabaseService();
} else {
throw new IllegalStateException("Cannot get underyling database service.");
}
}
}
|
package hu.bme.mit.codemodel.rifle.database;
import java.util.Map;
import hu.bme.mit.codemodel.rifle.database.querybuilder.Query;
import org.neo4j.driver.v1.Driver;
import org.neo4j.driver.v1.Session;
import org.neo4j.driver.v1.StatementResult;
import org.neo4j.driver.v1.Transaction;
import org.neo4j.graphdb.GraphDatabaseService;
import neo4j.driver.testkit.EmbeddedTestkitDriver;
/**
* Provides database services like transaction handling and query executing.
*/
public class DbServices {
protected final Driver driver;
protected Transaction transaction;
public DbServices(Driver driver) {
this.driver = driver;
}
public Transaction beginTx() {
Session session = driver.session();
this.transaction = session.beginTransaction();
return this.transaction;
}
public StatementResult execute(String statement) {
return this.transaction.run(statement);
}
public StatementResult execute(String statementTemplate, Map<String, Object> statementParameters) {
return this.transaction.run(statementTemplate, statementParameters);
}
public StatementResult execute(Query q) {
return this.execute(q.getStatementTemplate(), q.getStatementParameters());
}
public GraphDatabaseService getUnderlyingDatabaseService() {
if (driver instanceof EmbeddedTestkitDriver) {
return ((EmbeddedTestkitDriver) driver).getUnderlyingDatabaseService();
} else {
throw new IllegalStateException("Cannot get underyling database service.");
}
}
}
|
Revert "There is no need for a separate session for each transaction"
|
Revert "There is no need for a separate session for each transaction"
This reverts commit 5863051c57182ee6fc61a6b63061cf63b95308e2.
|
Java
|
epl-1.0
|
FTSRG/codemodel-rifle,FTSRG/codemodel-rifle,FTSRG/codemodel-rifle
|
java
|
## Code Before:
package hu.bme.mit.codemodel.rifle.database;
import java.util.Map;
import hu.bme.mit.codemodel.rifle.database.querybuilder.Query;
import org.neo4j.driver.v1.Driver;
import org.neo4j.driver.v1.Session;
import org.neo4j.driver.v1.StatementResult;
import org.neo4j.driver.v1.Transaction;
import org.neo4j.graphdb.GraphDatabaseService;
import neo4j.driver.testkit.EmbeddedTestkitDriver;
/**
* Provides database services like transaction handling and query executing.
*/
public class DbServices {
protected final Driver driver;
protected Session session;
protected Transaction transaction;
public DbServices(Driver driver) {
this.driver = driver;
this.session = this.driver.session();
}
public Transaction beginTx() {
this.transaction = session.beginTransaction();
return this.transaction;
}
public StatementResult execute(String statement) {
return this.transaction.run(statement);
}
public StatementResult execute(String statementTemplate, Map<String, Object> statementParameters) {
return this.transaction.run(statementTemplate, statementParameters);
}
public StatementResult execute(Query q) {
return this.execute(q.getStatementTemplate(), q.getStatementParameters());
}
public GraphDatabaseService getUnderlyingDatabaseService() {
if (driver instanceof EmbeddedTestkitDriver) {
return ((EmbeddedTestkitDriver) driver).getUnderlyingDatabaseService();
} else {
throw new IllegalStateException("Cannot get underyling database service.");
}
}
}
## Instruction:
Revert "There is no need for a separate session for each transaction"
This reverts commit 5863051c57182ee6fc61a6b63061cf63b95308e2.
## Code After:
package hu.bme.mit.codemodel.rifle.database;
import java.util.Map;
import hu.bme.mit.codemodel.rifle.database.querybuilder.Query;
import org.neo4j.driver.v1.Driver;
import org.neo4j.driver.v1.Session;
import org.neo4j.driver.v1.StatementResult;
import org.neo4j.driver.v1.Transaction;
import org.neo4j.graphdb.GraphDatabaseService;
import neo4j.driver.testkit.EmbeddedTestkitDriver;
/**
* Provides database services like transaction handling and query executing.
*/
public class DbServices {
protected final Driver driver;
protected Transaction transaction;
public DbServices(Driver driver) {
this.driver = driver;
}
public Transaction beginTx() {
Session session = driver.session();
this.transaction = session.beginTransaction();
return this.transaction;
}
public StatementResult execute(String statement) {
return this.transaction.run(statement);
}
public StatementResult execute(String statementTemplate, Map<String, Object> statementParameters) {
return this.transaction.run(statementTemplate, statementParameters);
}
public StatementResult execute(Query q) {
return this.execute(q.getStatementTemplate(), q.getStatementParameters());
}
public GraphDatabaseService getUnderlyingDatabaseService() {
if (driver instanceof EmbeddedTestkitDriver) {
return ((EmbeddedTestkitDriver) driver).getUnderlyingDatabaseService();
} else {
throw new IllegalStateException("Cannot get underyling database service.");
}
}
}
|
# ... existing code ...
*/
public class DbServices {
protected final Driver driver;
protected Transaction transaction;
public DbServices(Driver driver) {
this.driver = driver;
}
public Transaction beginTx() {
Session session = driver.session();
this.transaction = session.beginTransaction();
return this.transaction;
}
# ... rest of the code ...
|
40bb2b8aaff899f847211273f6631547b6bac978
|
pyhessian/data_types.py
|
pyhessian/data_types.py
|
__all__ = ['long']
if hasattr(__builtins__, 'long'):
long = long
else:
class long(int):
pass
|
__all__ = ['long']
if 'long' in __builtins__:
long = __builtins__['long']
else:
class long(int):
pass
|
Fix bug encoding long type in python 2.x
|
Fix bug encoding long type in python 2.x
|
Python
|
bsd-3-clause
|
cyrusmg/python-hessian,cyrusmg/python-hessian,cyrusmg/python-hessian
|
python
|
## Code Before:
__all__ = ['long']
if hasattr(__builtins__, 'long'):
long = long
else:
class long(int):
pass
## Instruction:
Fix bug encoding long type in python 2.x
## Code After:
__all__ = ['long']
if 'long' in __builtins__:
long = __builtins__['long']
else:
class long(int):
pass
|
// ... existing code ...
__all__ = ['long']
if 'long' in __builtins__:
long = __builtins__['long']
else:
class long(int):
pass
// ... rest of the code ...
|
c138fbd0341488a344dcada1543fc453493679af
|
rxbinding/src/main/java/com/jakewharton/rxbinding/internal/MainThreadSubscription.java
|
rxbinding/src/main/java/com/jakewharton/rxbinding/internal/MainThreadSubscription.java
|
package com.jakewharton.rxbinding.internal;
import android.os.Handler;
import android.os.Looper;
import java.util.concurrent.atomic.AtomicIntegerFieldUpdater;
import rx.Subscription;
public abstract class MainThreadSubscription implements Subscription, Runnable {
private static final Handler mainThread = new Handler(Looper.getMainLooper());
@SuppressWarnings("unused") // Updated by 'unsubscribedUpdater' object.
private volatile int unsubscribed;
private static final AtomicIntegerFieldUpdater<MainThreadSubscription> unsubscribedUpdater =
AtomicIntegerFieldUpdater.newUpdater(MainThreadSubscription.class, "unsubscribed");
@Override public final boolean isUnsubscribed() {
return unsubscribed != 0;
}
@Override public final void unsubscribe() {
if (unsubscribedUpdater.compareAndSet(this, 0, 1)) {
if (Looper.getMainLooper() == Looper.myLooper()) {
onUnsubscribe();
} else {
mainThread.post(this);
}
}
}
@Override public final void run() {
onUnsubscribe();
}
protected abstract void onUnsubscribe();
}
|
package com.jakewharton.rxbinding.internal;
import android.os.Handler;
import android.os.Looper;
import android.support.annotation.Keep;
import java.util.concurrent.atomic.AtomicIntegerFieldUpdater;
import rx.Subscription;
public abstract class MainThreadSubscription implements Subscription, Runnable {
private static final Handler mainThread = new Handler(Looper.getMainLooper());
@Keep
@SuppressWarnings("unused") // Updated by 'unsubscribedUpdater' object.
private volatile int unsubscribed;
private static final AtomicIntegerFieldUpdater<MainThreadSubscription> unsubscribedUpdater =
AtomicIntegerFieldUpdater.newUpdater(MainThreadSubscription.class, "unsubscribed");
@Override public final boolean isUnsubscribed() {
return unsubscribed != 0;
}
@Override public final void unsubscribe() {
if (unsubscribedUpdater.compareAndSet(this, 0, 1)) {
if (Looper.getMainLooper() == Looper.myLooper()) {
onUnsubscribe();
} else {
mainThread.post(this);
}
}
}
@Override public final void run() {
onUnsubscribe();
}
protected abstract void onUnsubscribe();
}
|
Add @Keep annotation on unsubscribed field
|
Add @Keep annotation on unsubscribed field
Since it's accessed via reflection, this needs to be kept un-obfuscated.
|
Java
|
apache-2.0
|
Edward608/RxBinding,skoric/RxBinding,dlew/RxBinding,hzsweers/RxBinding,chemouna/RxBinding,lockerfish/RxBinding,pitatensai/RxBinding,rharter/RxBinding,JakeWharton/RxBinding,vipulshah2010/RxBinding,Edward608/RxBinding,vipulshah2010/RxBinding,vanniktech/RxBinding,dsvoronin/RxBinding,Edward608/RxBinding,dsvoronin/RxBinding,MaTriXy/RxBinding,lockerfish/RxBinding,dsvoronin/RxBinding,skoric/RxBinding,viacheslavokolitiy/RxBinding-1,xfumihiro/RxBinding,rharter/RxBinding,kunny/RxBinding,vanniktech/RxBinding,kunny/RxBinding,viacheslavokolitiy/RxBinding-1,hzsweers/RxBinding,marcelohd/RxBinding,marcelohd/RxBinding,rharter/RxBinding,MaTriXy/RxBinding,chemouna/RxBinding,hzsweers/RxBinding,xfumihiro/RxBinding,JakeWharton/RxBinding,viacheslavokolitiy/RxBinding-1,dlew/RxBinding,pitatensai/RxBinding,dlew/RxBinding,marcelohd/RxBinding,JakeWharton/RxBinding,skoric/RxBinding,MaTriXy/RxBinding,chemouna/RxBinding,kunny/RxBinding,vipulshah2010/RxBinding,lockerfish/RxBinding,vanniktech/RxBinding,pitatensai/RxBinding,xfumihiro/RxBinding
|
java
|
## Code Before:
package com.jakewharton.rxbinding.internal;
import android.os.Handler;
import android.os.Looper;
import java.util.concurrent.atomic.AtomicIntegerFieldUpdater;
import rx.Subscription;
public abstract class MainThreadSubscription implements Subscription, Runnable {
private static final Handler mainThread = new Handler(Looper.getMainLooper());
@SuppressWarnings("unused") // Updated by 'unsubscribedUpdater' object.
private volatile int unsubscribed;
private static final AtomicIntegerFieldUpdater<MainThreadSubscription> unsubscribedUpdater =
AtomicIntegerFieldUpdater.newUpdater(MainThreadSubscription.class, "unsubscribed");
@Override public final boolean isUnsubscribed() {
return unsubscribed != 0;
}
@Override public final void unsubscribe() {
if (unsubscribedUpdater.compareAndSet(this, 0, 1)) {
if (Looper.getMainLooper() == Looper.myLooper()) {
onUnsubscribe();
} else {
mainThread.post(this);
}
}
}
@Override public final void run() {
onUnsubscribe();
}
protected abstract void onUnsubscribe();
}
## Instruction:
Add @Keep annotation on unsubscribed field
Since it's accessed via reflection, this needs to be kept un-obfuscated.
## Code After:
package com.jakewharton.rxbinding.internal;
import android.os.Handler;
import android.os.Looper;
import android.support.annotation.Keep;
import java.util.concurrent.atomic.AtomicIntegerFieldUpdater;
import rx.Subscription;
public abstract class MainThreadSubscription implements Subscription, Runnable {
private static final Handler mainThread = new Handler(Looper.getMainLooper());
@Keep
@SuppressWarnings("unused") // Updated by 'unsubscribedUpdater' object.
private volatile int unsubscribed;
private static final AtomicIntegerFieldUpdater<MainThreadSubscription> unsubscribedUpdater =
AtomicIntegerFieldUpdater.newUpdater(MainThreadSubscription.class, "unsubscribed");
@Override public final boolean isUnsubscribed() {
return unsubscribed != 0;
}
@Override public final void unsubscribe() {
if (unsubscribedUpdater.compareAndSet(this, 0, 1)) {
if (Looper.getMainLooper() == Looper.myLooper()) {
onUnsubscribe();
} else {
mainThread.post(this);
}
}
}
@Override public final void run() {
onUnsubscribe();
}
protected abstract void onUnsubscribe();
}
|
// ... existing code ...
import android.os.Handler;
import android.os.Looper;
import android.support.annotation.Keep;
import java.util.concurrent.atomic.AtomicIntegerFieldUpdater;
import rx.Subscription;
// ... modified code ...
public abstract class MainThreadSubscription implements Subscription, Runnable {
private static final Handler mainThread = new Handler(Looper.getMainLooper());
@Keep
@SuppressWarnings("unused") // Updated by 'unsubscribedUpdater' object.
private volatile int unsubscribed;
private static final AtomicIntegerFieldUpdater<MainThreadSubscription> unsubscribedUpdater =
// ... rest of the code ...
|
ab47c678b37527a7b8a970b365503b65ffccda87
|
populous/cli.py
|
populous/cli.py
|
import click
from .loader import load_yaml
from .blueprint import Blueprint
from .exceptions import ValidationError, YAMLError
def get_blueprint(*files):
try:
return Blueprint.from_description(load_yaml(*files))
except (YAMLError, ValidationError) as e:
raise click.ClickException(e.message)
except Exception as e:
pass
@click.group()
@click.version_option()
def cli():
pass
@cli.command()
@click.argument('files', nargs=-1)
def predict(files):
"""
Predict how many objects will be created if the given files are used.
"""
blueprint = get_blueprint(*files)
for item in blueprint:
click.echo("{name}: {count} {by}".format(
name=item.name, count=item.total,
by="({} by {})".format(item.count.number, item.count.by)
if item.count.by else ""
))
|
import click
from .loader import load_yaml
from .blueprint import Blueprint
from .exceptions import ValidationError, YAMLError
def get_blueprint(*files):
try:
return Blueprint.from_description(load_yaml(*files))
except (YAMLError, ValidationError) as e:
raise click.ClickException(e.message)
except Exception as e:
raise click.ClickException("Unexpected error during the blueprint "
"loading: {}".format(e.message))
@click.group()
@click.version_option()
def cli():
pass
@cli.command()
@click.argument('files', nargs=-1)
def predict(files):
"""
Predict how many objects will be created if the given files are used.
"""
blueprint = get_blueprint(*files)
for item in blueprint:
click.echo("{name}: {count} {by}".format(
name=item.name, count=item.total,
by="({} by {})".format(item.count.number, item.count.by)
if item.count.by else ""
))
|
Handle unexpected errors properly in load_blueprint
|
Handle unexpected errors properly in load_blueprint
|
Python
|
mit
|
novafloss/populous
|
python
|
## Code Before:
import click
from .loader import load_yaml
from .blueprint import Blueprint
from .exceptions import ValidationError, YAMLError
def get_blueprint(*files):
try:
return Blueprint.from_description(load_yaml(*files))
except (YAMLError, ValidationError) as e:
raise click.ClickException(e.message)
except Exception as e:
pass
@click.group()
@click.version_option()
def cli():
pass
@cli.command()
@click.argument('files', nargs=-1)
def predict(files):
"""
Predict how many objects will be created if the given files are used.
"""
blueprint = get_blueprint(*files)
for item in blueprint:
click.echo("{name}: {count} {by}".format(
name=item.name, count=item.total,
by="({} by {})".format(item.count.number, item.count.by)
if item.count.by else ""
))
## Instruction:
Handle unexpected errors properly in load_blueprint
## Code After:
import click
from .loader import load_yaml
from .blueprint import Blueprint
from .exceptions import ValidationError, YAMLError
def get_blueprint(*files):
try:
return Blueprint.from_description(load_yaml(*files))
except (YAMLError, ValidationError) as e:
raise click.ClickException(e.message)
except Exception as e:
raise click.ClickException("Unexpected error during the blueprint "
"loading: {}".format(e.message))
@click.group()
@click.version_option()
def cli():
pass
@cli.command()
@click.argument('files', nargs=-1)
def predict(files):
"""
Predict how many objects will be created if the given files are used.
"""
blueprint = get_blueprint(*files)
for item in blueprint:
click.echo("{name}: {count} {by}".format(
name=item.name, count=item.total,
by="({} by {})".format(item.count.number, item.count.by)
if item.count.by else ""
))
|
...
except (YAMLError, ValidationError) as e:
raise click.ClickException(e.message)
except Exception as e:
raise click.ClickException("Unexpected error during the blueprint "
"loading: {}".format(e.message))
@click.group()
...
|
d3937a6ab2546c4c0adf025371f2f95b07f56b6c
|
registry-spring-boot-ws/src/main/java/org/gbif/registry/RegistryWsApplication.java
|
registry-spring-boot-ws/src/main/java/org/gbif/registry/RegistryWsApplication.java
|
package org.gbif.registry;
import org.mybatis.spring.annotation.MapperScan;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.actuate.autoconfigure.solr.SolrHealthContributorAutoConfiguration;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.autoconfigure.solr.SolrAutoConfiguration;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
@SpringBootApplication(
scanBasePackages = {
"org.gbif.ws.server.interceptor",
"org.gbif.ws.server.aspect",
"org.gbif.ws.server.filter",
"org.gbif.ws.security",
"org.gbif.registry"},
exclude = {
SolrAutoConfiguration.class,
SolrHealthContributorAutoConfiguration.class
})
@MapperScan("org.gbif.registry.persistence.mapper")
@EnableConfigurationProperties
public class RegistryWsApplication {
public static void main(String[] args) {
SpringApplication.run(RegistryWsApplication.class, args);
}
}
|
package org.gbif.registry;
import org.mybatis.spring.annotation.MapperScan;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.actuate.autoconfigure.solr.SolrHealthContributorAutoConfiguration;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.autoconfigure.solr.SolrAutoConfiguration;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
@SpringBootApplication(
scanBasePackages = {
"org.gbif.ws.server.interceptor",
"org.gbif.ws.server.aspect",
"org.gbif.ws.server.filter",
"org.gbif.ws.server.advice",
"org.gbif.ws.security",
"org.gbif.registry"},
exclude = {
SolrAutoConfiguration.class,
SolrHealthContributorAutoConfiguration.class
})
@MapperScan("org.gbif.registry.persistence.mapper")
@EnableConfigurationProperties
public class RegistryWsApplication {
public static void main(String[] args) {
SpringApplication.run(RegistryWsApplication.class, args);
}
}
|
Add package org.gbif.ws.server.advice to scan
|
Add package org.gbif.ws.server.advice to scan
|
Java
|
apache-2.0
|
gbif/registry,gbif/registry
|
java
|
## Code Before:
package org.gbif.registry;
import org.mybatis.spring.annotation.MapperScan;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.actuate.autoconfigure.solr.SolrHealthContributorAutoConfiguration;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.autoconfigure.solr.SolrAutoConfiguration;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
@SpringBootApplication(
scanBasePackages = {
"org.gbif.ws.server.interceptor",
"org.gbif.ws.server.aspect",
"org.gbif.ws.server.filter",
"org.gbif.ws.security",
"org.gbif.registry"},
exclude = {
SolrAutoConfiguration.class,
SolrHealthContributorAutoConfiguration.class
})
@MapperScan("org.gbif.registry.persistence.mapper")
@EnableConfigurationProperties
public class RegistryWsApplication {
public static void main(String[] args) {
SpringApplication.run(RegistryWsApplication.class, args);
}
}
## Instruction:
Add package org.gbif.ws.server.advice to scan
## Code After:
package org.gbif.registry;
import org.mybatis.spring.annotation.MapperScan;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.actuate.autoconfigure.solr.SolrHealthContributorAutoConfiguration;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.autoconfigure.solr.SolrAutoConfiguration;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
@SpringBootApplication(
scanBasePackages = {
"org.gbif.ws.server.interceptor",
"org.gbif.ws.server.aspect",
"org.gbif.ws.server.filter",
"org.gbif.ws.server.advice",
"org.gbif.ws.security",
"org.gbif.registry"},
exclude = {
SolrAutoConfiguration.class,
SolrHealthContributorAutoConfiguration.class
})
@MapperScan("org.gbif.registry.persistence.mapper")
@EnableConfigurationProperties
public class RegistryWsApplication {
public static void main(String[] args) {
SpringApplication.run(RegistryWsApplication.class, args);
}
}
|
// ... existing code ...
"org.gbif.ws.server.interceptor",
"org.gbif.ws.server.aspect",
"org.gbif.ws.server.filter",
"org.gbif.ws.server.advice",
"org.gbif.ws.security",
"org.gbif.registry"},
exclude = {
// ... rest of the code ...
|
115e9e40e7c912beaf6186d69df97f5996e43310
|
web/src/main/java/org/cbioportal/web/parameter/MolecularDataMultipleStudyFilter.java
|
web/src/main/java/org/cbioportal/web/parameter/MolecularDataMultipleStudyFilter.java
|
package org.cbioportal.web.parameter;
import javax.validation.constraints.AssertTrue;
import javax.validation.constraints.Size;
import java.util.List;
import java.io.Serializable;
public class MolecularDataMultipleStudyFilter implements Serializable {
@Size(min = 1, max = PagingConstants.MAX_PAGE_SIZE)
private List<SampleMolecularIdentifier> sampleMolecularIdentifiers;
private List<String> molecularProfileIds;
@Size(min = 1, max = PagingConstants.MAX_PAGE_SIZE)
private List<Integer> entrezGeneIds;
@AssertTrue
private boolean isEitherMolecularProfileIdsOrSampleMolecularIdentifiersPresent() {
return molecularProfileIds != null ^ sampleMolecularIdentifiers != null;
}
public List<SampleMolecularIdentifier> getSampleMolecularIdentifiers() {
return sampleMolecularIdentifiers;
}
public void setSampleMolecularIdentifiers(List<SampleMolecularIdentifier> sampleMolecularIdentifiers) {
this.sampleMolecularIdentifiers = sampleMolecularIdentifiers;
}
public List<String> getMolecularProfileIds() {
return molecularProfileIds;
}
public void setMolecularProfileIds(List<String> molecularProfileIds) {
this.molecularProfileIds = molecularProfileIds;
}
public List<Integer> getEntrezGeneIds() {
return entrezGeneIds;
}
public void setEntrezGeneIds(List<Integer> entrezGeneIds) {
this.entrezGeneIds = entrezGeneIds;
}
}
|
package org.cbioportal.web.parameter;
import javax.validation.constraints.AssertTrue;
import javax.validation.constraints.Size;
import java.util.List;
import java.io.Serializable;
public class MolecularDataMultipleStudyFilter implements Serializable {
@Size(min = 1, max = PagingConstants.MAX_PAGE_SIZE)
private List<SampleMolecularIdentifier> sampleMolecularIdentifiers;
@Size(min = 1, max = PagingConstants.MAX_PAGE_SIZE)
private List<String> molecularProfileIds;
@Size(min = 1, max = PagingConstants.MAX_PAGE_SIZE)
private List<Integer> entrezGeneIds;
@AssertTrue
private boolean isEitherMolecularProfileIdsOrSampleMolecularIdentifiersPresent() {
return molecularProfileIds != null ^ sampleMolecularIdentifiers != null;
}
public List<SampleMolecularIdentifier> getSampleMolecularIdentifiers() {
return sampleMolecularIdentifiers;
}
public void setSampleMolecularIdentifiers(List<SampleMolecularIdentifier> sampleMolecularIdentifiers) {
this.sampleMolecularIdentifiers = sampleMolecularIdentifiers;
}
public List<String> getMolecularProfileIds() {
return molecularProfileIds;
}
public void setMolecularProfileIds(List<String> molecularProfileIds) {
this.molecularProfileIds = molecularProfileIds;
}
public List<Integer> getEntrezGeneIds() {
return entrezGeneIds;
}
public void setEntrezGeneIds(List<Integer> entrezGeneIds) {
this.entrezGeneIds = entrezGeneIds;
}
}
|
Add size constraint to molecularProfileIds in molecular data endpoint
|
Add size constraint to molecularProfileIds in molecular data endpoint
|
Java
|
agpl-3.0
|
zhx828/cbioportal,pughlab/cbioportal,angelicaochoa/cbioportal,d3b-center/pedcbioportal,pughlab/cbioportal,n1zea144/cbioportal,cBioPortal/cbioportal,mandawilson/cbioportal,pughlab/cbioportal,zhx828/cbioportal,n1zea144/cbioportal,d3b-center/pedcbioportal,zhx828/cbioportal,mandawilson/cbioportal,angelicaochoa/cbioportal,pughlab/cbioportal,n1zea144/cbioportal,pughlab/cbioportal,sheridancbio/cbioportal,d3b-center/pedcbioportal,sheridancbio/cbioportal,onursumer/cbioportal,angelicaochoa/cbioportal,d3b-center/pedcbioportal,mandawilson/cbioportal,n1zea144/cbioportal,cBioPortal/cbioportal,mandawilson/cbioportal,zhx828/cbioportal,onursumer/cbioportal,sheridancbio/cbioportal,d3b-center/pedcbioportal,zhx828/cbioportal,zhx828/cbioportal,mandawilson/cbioportal,onursumer/cbioportal,cBioPortal/cbioportal,sheridancbio/cbioportal,onursumer/cbioportal,mandawilson/cbioportal,n1zea144/cbioportal,d3b-center/pedcbioportal,cBioPortal/cbioportal,onursumer/cbioportal,angelicaochoa/cbioportal,sheridancbio/cbioportal,zhx828/cbioportal,n1zea144/cbioportal,angelicaochoa/cbioportal,n1zea144/cbioportal,sheridancbio/cbioportal,pughlab/cbioportal,cBioPortal/cbioportal,pughlab/cbioportal,mandawilson/cbioportal,d3b-center/pedcbioportal,onursumer/cbioportal,angelicaochoa/cbioportal,angelicaochoa/cbioportal,cBioPortal/cbioportal
|
java
|
## Code Before:
package org.cbioportal.web.parameter;
import javax.validation.constraints.AssertTrue;
import javax.validation.constraints.Size;
import java.util.List;
import java.io.Serializable;
public class MolecularDataMultipleStudyFilter implements Serializable {
@Size(min = 1, max = PagingConstants.MAX_PAGE_SIZE)
private List<SampleMolecularIdentifier> sampleMolecularIdentifiers;
private List<String> molecularProfileIds;
@Size(min = 1, max = PagingConstants.MAX_PAGE_SIZE)
private List<Integer> entrezGeneIds;
@AssertTrue
private boolean isEitherMolecularProfileIdsOrSampleMolecularIdentifiersPresent() {
return molecularProfileIds != null ^ sampleMolecularIdentifiers != null;
}
public List<SampleMolecularIdentifier> getSampleMolecularIdentifiers() {
return sampleMolecularIdentifiers;
}
public void setSampleMolecularIdentifiers(List<SampleMolecularIdentifier> sampleMolecularIdentifiers) {
this.sampleMolecularIdentifiers = sampleMolecularIdentifiers;
}
public List<String> getMolecularProfileIds() {
return molecularProfileIds;
}
public void setMolecularProfileIds(List<String> molecularProfileIds) {
this.molecularProfileIds = molecularProfileIds;
}
public List<Integer> getEntrezGeneIds() {
return entrezGeneIds;
}
public void setEntrezGeneIds(List<Integer> entrezGeneIds) {
this.entrezGeneIds = entrezGeneIds;
}
}
## Instruction:
Add size constraint to molecularProfileIds in molecular data endpoint
## Code After:
package org.cbioportal.web.parameter;
import javax.validation.constraints.AssertTrue;
import javax.validation.constraints.Size;
import java.util.List;
import java.io.Serializable;
public class MolecularDataMultipleStudyFilter implements Serializable {
@Size(min = 1, max = PagingConstants.MAX_PAGE_SIZE)
private List<SampleMolecularIdentifier> sampleMolecularIdentifiers;
@Size(min = 1, max = PagingConstants.MAX_PAGE_SIZE)
private List<String> molecularProfileIds;
@Size(min = 1, max = PagingConstants.MAX_PAGE_SIZE)
private List<Integer> entrezGeneIds;
@AssertTrue
private boolean isEitherMolecularProfileIdsOrSampleMolecularIdentifiersPresent() {
return molecularProfileIds != null ^ sampleMolecularIdentifiers != null;
}
public List<SampleMolecularIdentifier> getSampleMolecularIdentifiers() {
return sampleMolecularIdentifiers;
}
public void setSampleMolecularIdentifiers(List<SampleMolecularIdentifier> sampleMolecularIdentifiers) {
this.sampleMolecularIdentifiers = sampleMolecularIdentifiers;
}
public List<String> getMolecularProfileIds() {
return molecularProfileIds;
}
public void setMolecularProfileIds(List<String> molecularProfileIds) {
this.molecularProfileIds = molecularProfileIds;
}
public List<Integer> getEntrezGeneIds() {
return entrezGeneIds;
}
public void setEntrezGeneIds(List<Integer> entrezGeneIds) {
this.entrezGeneIds = entrezGeneIds;
}
}
|
// ... existing code ...
@Size(min = 1, max = PagingConstants.MAX_PAGE_SIZE)
private List<SampleMolecularIdentifier> sampleMolecularIdentifiers;
@Size(min = 1, max = PagingConstants.MAX_PAGE_SIZE)
private List<String> molecularProfileIds;
@Size(min = 1, max = PagingConstants.MAX_PAGE_SIZE)
private List<Integer> entrezGeneIds;
// ... rest of the code ...
|
8e06061e59d7443de00648d6a7e14f32d5b7ad2f
|
setup.py
|
setup.py
|
from setuptools import setup
setup(name = "medea",
license = "Apache",
version = "0.0",
install_requires = ["protobuf"],
description = "Mesos containerization hooks for Docker",
author = "Jason Dusek",
author_email = "[email protected]",
maintainer = "Mesosphere",
maintainer_email = "[email protected]",
url = "https://github.com/mesosphere/medea",
packages = ["medea"],
entry_points = { "console_scripts": ["medea = medea:cli"] })
|
from setuptools import setup
setup(name = "medea",
license = "Apache",
version = "0.0.0",
install_requires = ["protobuf"],
description = "Mesos containerization hooks for Docker",
author = "Jason Dusek",
author_email = "[email protected]",
maintainer = "Mesosphere",
maintainer_email = "[email protected]",
url = "https://github.com/mesosphere/medea",
packages = ["medea"],
entry_points = { "console_scripts": ["medea = medea:cli"] },
classifiers = [ "Environment :: Console",
"Intended Audience :: Developers",
"Operating System :: Unix",
"Operating System :: POSIX",
"Programming Language :: Python",
"Topic :: System",
"Topic :: System :: Systems Administration",
"Topic :: Software Development",
"License :: OSI Approved :: Apache Software License",
"Development Status :: 4 - Beta" ])
|
Add classifiers and a fuller version number.
|
Add classifiers and a fuller version number.
|
Python
|
apache-2.0
|
solidsnack/deimos,davidbliu/deimos-etcd-mod,solidsnack/deimos,mesosphere/deimos,midonet/mcp,midonet/mcp,mesosphere/deimos,davidbliu/deimos-etcd-mod
|
python
|
## Code Before:
from setuptools import setup
setup(name = "medea",
license = "Apache",
version = "0.0",
install_requires = ["protobuf"],
description = "Mesos containerization hooks for Docker",
author = "Jason Dusek",
author_email = "[email protected]",
maintainer = "Mesosphere",
maintainer_email = "[email protected]",
url = "https://github.com/mesosphere/medea",
packages = ["medea"],
entry_points = { "console_scripts": ["medea = medea:cli"] })
## Instruction:
Add classifiers and a fuller version number.
## Code After:
from setuptools import setup
setup(name = "medea",
license = "Apache",
version = "0.0.0",
install_requires = ["protobuf"],
description = "Mesos containerization hooks for Docker",
author = "Jason Dusek",
author_email = "[email protected]",
maintainer = "Mesosphere",
maintainer_email = "[email protected]",
url = "https://github.com/mesosphere/medea",
packages = ["medea"],
entry_points = { "console_scripts": ["medea = medea:cli"] },
classifiers = [ "Environment :: Console",
"Intended Audience :: Developers",
"Operating System :: Unix",
"Operating System :: POSIX",
"Programming Language :: Python",
"Topic :: System",
"Topic :: System :: Systems Administration",
"Topic :: Software Development",
"License :: OSI Approved :: Apache Software License",
"Development Status :: 4 - Beta" ])
|
# ... existing code ...
setup(name = "medea",
license = "Apache",
version = "0.0.0",
install_requires = ["protobuf"],
description = "Mesos containerization hooks for Docker",
author = "Jason Dusek",
# ... modified code ...
maintainer_email = "[email protected]",
url = "https://github.com/mesosphere/medea",
packages = ["medea"],
entry_points = { "console_scripts": ["medea = medea:cli"] },
classifiers = [ "Environment :: Console",
"Intended Audience :: Developers",
"Operating System :: Unix",
"Operating System :: POSIX",
"Programming Language :: Python",
"Topic :: System",
"Topic :: System :: Systems Administration",
"Topic :: Software Development",
"License :: OSI Approved :: Apache Software License",
"Development Status :: 4 - Beta" ])
# ... rest of the code ...
|
011ad6090e183ce359c0a74bbd2f2530e1d5178c
|
tests/test_repr.py
|
tests/test_repr.py
|
""" Test __str__ methods. """
import pexpect
from . import PexpectTestCase
class TestCaseMisc(PexpectTestCase.PexpectTestCase):
def test_str_spawnu(self):
""" Exercise spawnu.__str__() """
# given,
p = pexpect.spawnu('cat')
# exercise,
value = str(p)
# verify
assert isinstance(value, str)
def test_str_spawn(self):
""" Exercise spawn.__str__() """
# given,
p = pexpect.spawn('cat')
# exercise,
value = str(p)
# verify
assert isinstance(value, str)
def test_str_before_spawn(self):
""" Exercise derived spawn.__str__() """
# given,
child = pexpect.spawn(None, None)
child.read_nonblocking = lambda size, timeout: b''
try:
child.expect('alpha', timeout=0.1)
except pexpect.TIMEOUT:
pass
else:
assert False, 'TIMEOUT exception expected. No exception aised.'
|
""" Test __str__ methods. """
import pexpect
from . import PexpectTestCase
class TestCaseMisc(PexpectTestCase.PexpectTestCase):
def test_str_spawnu(self):
""" Exercise spawnu.__str__() """
# given,
p = pexpect.spawnu('cat')
# exercise,
value = str(p)
# verify
assert isinstance(value, str)
def test_str_spawn(self):
""" Exercise spawn.__str__() """
# given,
p = pexpect.spawn('cat')
# exercise,
value = str(p)
# verify
assert isinstance(value, str)
def test_str_before_spawn(self):
""" Exercise derived spawn.__str__() """
# given,
child = pexpect.spawn(None, None)
child.read_nonblocking = lambda size, timeout: b''
try:
child.expect('alpha', timeout=0.1)
except pexpect.TIMEOUT as e:
str(e) # Smoketest
else:
assert False, 'TIMEOUT exception expected. No exception raised.'
|
Check error repr can be str-ed
|
Check error repr can be str-ed
|
Python
|
isc
|
nodish/pexpect,nodish/pexpect,nodish/pexpect
|
python
|
## Code Before:
""" Test __str__ methods. """
import pexpect
from . import PexpectTestCase
class TestCaseMisc(PexpectTestCase.PexpectTestCase):
def test_str_spawnu(self):
""" Exercise spawnu.__str__() """
# given,
p = pexpect.spawnu('cat')
# exercise,
value = str(p)
# verify
assert isinstance(value, str)
def test_str_spawn(self):
""" Exercise spawn.__str__() """
# given,
p = pexpect.spawn('cat')
# exercise,
value = str(p)
# verify
assert isinstance(value, str)
def test_str_before_spawn(self):
""" Exercise derived spawn.__str__() """
# given,
child = pexpect.spawn(None, None)
child.read_nonblocking = lambda size, timeout: b''
try:
child.expect('alpha', timeout=0.1)
except pexpect.TIMEOUT:
pass
else:
assert False, 'TIMEOUT exception expected. No exception aised.'
## Instruction:
Check error repr can be str-ed
## Code After:
""" Test __str__ methods. """
import pexpect
from . import PexpectTestCase
class TestCaseMisc(PexpectTestCase.PexpectTestCase):
def test_str_spawnu(self):
""" Exercise spawnu.__str__() """
# given,
p = pexpect.spawnu('cat')
# exercise,
value = str(p)
# verify
assert isinstance(value, str)
def test_str_spawn(self):
""" Exercise spawn.__str__() """
# given,
p = pexpect.spawn('cat')
# exercise,
value = str(p)
# verify
assert isinstance(value, str)
def test_str_before_spawn(self):
""" Exercise derived spawn.__str__() """
# given,
child = pexpect.spawn(None, None)
child.read_nonblocking = lambda size, timeout: b''
try:
child.expect('alpha', timeout=0.1)
except pexpect.TIMEOUT as e:
str(e) # Smoketest
else:
assert False, 'TIMEOUT exception expected. No exception raised.'
|
...
child.read_nonblocking = lambda size, timeout: b''
try:
child.expect('alpha', timeout=0.1)
except pexpect.TIMEOUT as e:
str(e) # Smoketest
else:
assert False, 'TIMEOUT exception expected. No exception raised.'
...
|
c833f55999f6fd9029626d1b794c86b2b5b11256
|
post_office/test_settings.py
|
post_office/test_settings.py
|
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
},
}
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'TIMEOUT': 36000,
'KEY_PREFIX': 'post-office',
},
'post_office': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'TIMEOUT': 36000,
'KEY_PREFIX': 'post-office',
}
}
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'post_office',
)
SECRET_KEY = 'a'
ROOT_URLCONF = 'post_office.test_urls'
DEFAULT_FROM_EMAIL = '[email protected]'
|
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
},
}
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'TIMEOUT': 36000,
'KEY_PREFIX': 'post-office',
},
'post_office': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'TIMEOUT': 36000,
'KEY_PREFIX': 'post-office',
}
}
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'post_office',
)
SECRET_KEY = 'a'
ROOT_URLCONF = 'post_office.test_urls'
DEFAULT_FROM_EMAIL = '[email protected]'
TEST_RUNNER = 'django.test.simple.DjangoTestSuiteRunner'
|
Use "DjangoTestSuiteRunner" to in Django 1.6.
|
Use "DjangoTestSuiteRunner" to in Django 1.6.
|
Python
|
mit
|
CasherWest/django-post_office,carrerasrodrigo/django-post_office,fapelhanz/django-post_office,RafRaf/django-post_office,ui/django-post_office,jrief/django-post_office,yprez/django-post_office,JostCrow/django-post_office,ui/django-post_office,LeGast00n/django-post_office,CasherWest/django-post_office,ekohl/django-post_office
|
python
|
## Code Before:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
},
}
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'TIMEOUT': 36000,
'KEY_PREFIX': 'post-office',
},
'post_office': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'TIMEOUT': 36000,
'KEY_PREFIX': 'post-office',
}
}
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'post_office',
)
SECRET_KEY = 'a'
ROOT_URLCONF = 'post_office.test_urls'
DEFAULT_FROM_EMAIL = '[email protected]'
## Instruction:
Use "DjangoTestSuiteRunner" to in Django 1.6.
## Code After:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
},
}
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'TIMEOUT': 36000,
'KEY_PREFIX': 'post-office',
},
'post_office': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'TIMEOUT': 36000,
'KEY_PREFIX': 'post-office',
}
}
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'post_office',
)
SECRET_KEY = 'a'
ROOT_URLCONF = 'post_office.test_urls'
DEFAULT_FROM_EMAIL = '[email protected]'
TEST_RUNNER = 'django.test.simple.DjangoTestSuiteRunner'
|
# ... existing code ...
ROOT_URLCONF = 'post_office.test_urls'
DEFAULT_FROM_EMAIL = '[email protected]'
TEST_RUNNER = 'django.test.simple.DjangoTestSuiteRunner'
# ... rest of the code ...
|
a8c7a6d6cd87f057fbd03c41cec41dba35e6bdf6
|
unittesting/test_color_scheme.py
|
unittesting/test_color_scheme.py
|
import sublime
from sublime_plugin import ApplicationCommand
from .mixin import UnitTestingMixin
from .const import DONE_MESSAGE
try:
from ColorSchemeUnit.lib.runner import ColorSchemeUnit
except Exception:
print('ColorSchemeUnit runner could not be imported')
class UnitTestingColorSchemeCommand(ApplicationCommand, UnitTestingMixin):
def run(self, package=None, **kargs):
if not package:
return
window = sublime.active_window()
settings = self.load_unittesting_settings(package, **kargs)
stream = self.load_stream(package, settings["output"])
# Make sure at least one file from the
# package opened for ColorSchemeUnit.
tests = sublime.find_resources("color_scheme_test*")
tests = [t for t in tests if t.startswith("Packages/%s/" % package)]
if not tests:
stream.write("ERROR: No syntax_test files are found in %s!" % package)
stream.write("\n")
stream.write(DONE_MESSAGE)
stream.close()
return
# trigger "Start reading output"
stream.write("Running ColorSchemeUnit\n")
stream.flush()
view = window.open_file(sublime.packages_path().rstrip('Packages') + tests[0])
view.set_scratch(True)
ColorSchemeUnit(window).run(output=stream)
|
import sublime
from sublime_plugin import ApplicationCommand
from .mixin import UnitTestingMixin
from .const import DONE_MESSAGE
try:
from ColorSchemeUnit.lib.runner import ColorSchemeUnit
except Exception:
print('ColorSchemeUnit runner could not be imported')
class UnitTestingColorSchemeCommand(ApplicationCommand, UnitTestingMixin):
def run(self, package=None, **kargs):
if not package:
return
window = sublime.active_window()
settings = self.load_unittesting_settings(package, **kargs)
stream = self.load_stream(package, settings["output"])
tests = sublime.find_resources("color_scheme_test*")
tests = [t for t in tests if t.startswith("Packages/%s/" % package)]
if not tests:
stream.write("ERROR: No syntax_test files are found in %s!" % package)
stream.write("\n")
stream.write(DONE_MESSAGE)
stream.close()
return
# trigger "Start reading output"
stream.write("Running ColorSchemeUnit\n")
stream.flush()
result = ColorSchemeUnit(window).run(output=stream, package=package, async=False)
if result:
stream.write('\n')
stream.write("OK.\n")
else:
stream.write('\n')
stream.write("FAILED.\n")
stream.write("\n")
stream.write(DONE_MESSAGE)
stream.close()
|
Use ColorSchemeunit new run with package API
|
Use ColorSchemeunit new run with package API
Re: https://github.com/gerardroche/sublime-color-scheme-unit/issues/18
The new API allows to run with package name so Unittesting doesn't need
to open any files before running the tests.
I also added an async parameter which allows to run the tests in non
async mode and get the result of the tests, True for pass and False for
fail. This allows UnitTesting to handle printing the OK, FAILED and DONE
messsages.
|
Python
|
mit
|
randy3k/UnitTesting,randy3k/UnitTesting,randy3k/UnitTesting,randy3k/UnitTesting
|
python
|
## Code Before:
import sublime
from sublime_plugin import ApplicationCommand
from .mixin import UnitTestingMixin
from .const import DONE_MESSAGE
try:
from ColorSchemeUnit.lib.runner import ColorSchemeUnit
except Exception:
print('ColorSchemeUnit runner could not be imported')
class UnitTestingColorSchemeCommand(ApplicationCommand, UnitTestingMixin):
def run(self, package=None, **kargs):
if not package:
return
window = sublime.active_window()
settings = self.load_unittesting_settings(package, **kargs)
stream = self.load_stream(package, settings["output"])
# Make sure at least one file from the
# package opened for ColorSchemeUnit.
tests = sublime.find_resources("color_scheme_test*")
tests = [t for t in tests if t.startswith("Packages/%s/" % package)]
if not tests:
stream.write("ERROR: No syntax_test files are found in %s!" % package)
stream.write("\n")
stream.write(DONE_MESSAGE)
stream.close()
return
# trigger "Start reading output"
stream.write("Running ColorSchemeUnit\n")
stream.flush()
view = window.open_file(sublime.packages_path().rstrip('Packages') + tests[0])
view.set_scratch(True)
ColorSchemeUnit(window).run(output=stream)
## Instruction:
Use ColorSchemeunit new run with package API
Re: https://github.com/gerardroche/sublime-color-scheme-unit/issues/18
The new API allows to run with package name so Unittesting doesn't need
to open any files before running the tests.
I also added an async parameter which allows to run the tests in non
async mode and get the result of the tests, True for pass and False for
fail. This allows UnitTesting to handle printing the OK, FAILED and DONE
messsages.
## Code After:
import sublime
from sublime_plugin import ApplicationCommand
from .mixin import UnitTestingMixin
from .const import DONE_MESSAGE
try:
from ColorSchemeUnit.lib.runner import ColorSchemeUnit
except Exception:
print('ColorSchemeUnit runner could not be imported')
class UnitTestingColorSchemeCommand(ApplicationCommand, UnitTestingMixin):
def run(self, package=None, **kargs):
if not package:
return
window = sublime.active_window()
settings = self.load_unittesting_settings(package, **kargs)
stream = self.load_stream(package, settings["output"])
tests = sublime.find_resources("color_scheme_test*")
tests = [t for t in tests if t.startswith("Packages/%s/" % package)]
if not tests:
stream.write("ERROR: No syntax_test files are found in %s!" % package)
stream.write("\n")
stream.write(DONE_MESSAGE)
stream.close()
return
# trigger "Start reading output"
stream.write("Running ColorSchemeUnit\n")
stream.flush()
result = ColorSchemeUnit(window).run(output=stream, package=package, async=False)
if result:
stream.write('\n')
stream.write("OK.\n")
else:
stream.write('\n')
stream.write("FAILED.\n")
stream.write("\n")
stream.write(DONE_MESSAGE)
stream.close()
|
# ... existing code ...
settings = self.load_unittesting_settings(package, **kargs)
stream = self.load_stream(package, settings["output"])
tests = sublime.find_resources("color_scheme_test*")
tests = [t for t in tests if t.startswith("Packages/%s/" % package)]
# ... modified code ...
stream.write("Running ColorSchemeUnit\n")
stream.flush()
result = ColorSchemeUnit(window).run(output=stream, package=package, async=False)
if result:
stream.write('\n')
stream.write("OK.\n")
else:
stream.write('\n')
stream.write("FAILED.\n")
stream.write("\n")
stream.write(DONE_MESSAGE)
stream.close()
# ... rest of the code ...
|
602321530a9457f29be69ee0df1af27600d63c44
|
coffee-chats/src/main/java/com/google/step/coffee/data/EventStore.java
|
coffee-chats/src/main/java/com/google/step/coffee/data/EventStore.java
|
package com.google.step.coffee.data;
import com.google.appengine.api.datastore.*;
import com.google.step.coffee.entity.Event;
public class EventStore {
private DatastoreService datastore = DatastoreServiceFactory.getDatastoreService();
/**
* Saves the event to the database. Returns an <code>Event</code>
* that has its id set, which the group passed to the function might not.
*/
public Event put(Event event) {
Key key = event.key();
Entity entity = key != null ? new Entity(key) : new Entity("event");
entity.setProperty("description", new Text(event.description()));
entity.setProperty("start", event.start().getEpochSecond());
entity.setProperty("duration", event.duration().toMinutes());
entity.setProperty("group", KeyFactory.stringToKey(event.groupId()));
entity.setProperty("calendarId", event.calendarId());
datastore.put(entity);
return Event.fromEntity(entity);
}
}
|
package com.google.step.coffee.data;
import com.google.appengine.api.datastore.*;
import com.google.step.coffee.entity.Event;
public class EventStore {
private DatastoreService datastore = DatastoreServiceFactory.getDatastoreService();
/**
* Saves the event to the database. Returns an <code>Event</code>
* that has its id set, which the group passed to the function might not.
*/
public Event put(Event event) {
Key key = event.key();
Entity entity = key != null ? new Entity(key) : new Entity("event");
entity.setProperty("description", new Text(event.description()));
entity.setProperty("start", event.start().getEpochSecond());
entity.setProperty("duration", event.duration().toMinutes());
entity.setProperty("group", KeyFactory.stringToKey(event.groupId()));
entity.setProperty("calendarId", event.calendarId());
datastore.put(entity);
return event.modify()
.setId(KeyFactory.keyToString(entity.getKey()))
.build();
}
}
|
Fix calendarId not being returned in response
|
Fix calendarId not being returned in response
|
Java
|
apache-2.0
|
googleinterns/step250-2020,googleinterns/step250-2020,googleinterns/step250-2020,googleinterns/step250-2020
|
java
|
## Code Before:
package com.google.step.coffee.data;
import com.google.appengine.api.datastore.*;
import com.google.step.coffee.entity.Event;
public class EventStore {
private DatastoreService datastore = DatastoreServiceFactory.getDatastoreService();
/**
* Saves the event to the database. Returns an <code>Event</code>
* that has its id set, which the group passed to the function might not.
*/
public Event put(Event event) {
Key key = event.key();
Entity entity = key != null ? new Entity(key) : new Entity("event");
entity.setProperty("description", new Text(event.description()));
entity.setProperty("start", event.start().getEpochSecond());
entity.setProperty("duration", event.duration().toMinutes());
entity.setProperty("group", KeyFactory.stringToKey(event.groupId()));
entity.setProperty("calendarId", event.calendarId());
datastore.put(entity);
return Event.fromEntity(entity);
}
}
## Instruction:
Fix calendarId not being returned in response
## Code After:
package com.google.step.coffee.data;
import com.google.appengine.api.datastore.*;
import com.google.step.coffee.entity.Event;
public class EventStore {
private DatastoreService datastore = DatastoreServiceFactory.getDatastoreService();
/**
* Saves the event to the database. Returns an <code>Event</code>
* that has its id set, which the group passed to the function might not.
*/
public Event put(Event event) {
Key key = event.key();
Entity entity = key != null ? new Entity(key) : new Entity("event");
entity.setProperty("description", new Text(event.description()));
entity.setProperty("start", event.start().getEpochSecond());
entity.setProperty("duration", event.duration().toMinutes());
entity.setProperty("group", KeyFactory.stringToKey(event.groupId()));
entity.setProperty("calendarId", event.calendarId());
datastore.put(entity);
return event.modify()
.setId(KeyFactory.keyToString(entity.getKey()))
.build();
}
}
|
...
datastore.put(entity);
return event.modify()
.setId(KeyFactory.keyToString(entity.getKey()))
.build();
}
}
...
|
09cfd33df218725aa88d2f64d87868056c2778ba
|
indra/tests/test_biogrid.py
|
indra/tests/test_biogrid.py
|
from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
from indra.databases import biogrid_client
from indra.util import unicode_strs
from nose.plugins.attrib import attr
@attr('webservice', 'nonpublic')
def test_biogrid_request():
results = biogrid_client._send_request(['MAP2K1', 'MAPK1'])
assert results is not None
assert unicode_strs(results)
|
from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
from indra.databases import biogrid_client
from indra.util import unicode_strs
from nose.plugins.attrib import attr
from indra.sources.biogrid import process_file
from indra.statements import Complex
@attr('webservice', 'nonpublic')
def test_biogrid_request():
results = biogrid_client._send_request(['MAP2K1', 'MAPK1'])
assert results is not None
assert unicode_strs(results)
def test_biogrid_tsv():
# Download biogrid file form the web and process it
bp = process_file(None)
# We should have a lot of statementse
statements = bp.statements
assert(len(statements) > 500000)
# Any given statement should be a complex, with appropriate evidence
s0 = statements[0]
assert(isinstance(s0, Complex))
ev = s0.evidence[0]
assert(ev.source_api == 'biogrid')
assert(ev.text is None)
assert(ev.pmid is not None)
assert('tsv_row' in ev.annotations)
# The first statement in the file involves MAP2K4 and FLNC
assert(str(s0.members[0]) == 'MAP2K4()')
assert(str(s0.members[1]) == 'FLNC()')
|
Add test for downloading and parsing biogrid tsv file
|
Add test for downloading and parsing biogrid tsv file
|
Python
|
bsd-2-clause
|
johnbachman/belpy,pvtodorov/indra,sorgerlab/indra,sorgerlab/belpy,bgyori/indra,johnbachman/indra,sorgerlab/indra,pvtodorov/indra,sorgerlab/belpy,pvtodorov/indra,johnbachman/belpy,sorgerlab/belpy,johnbachman/indra,pvtodorov/indra,johnbachman/belpy,bgyori/indra,johnbachman/indra,sorgerlab/indra,bgyori/indra
|
python
|
## Code Before:
from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
from indra.databases import biogrid_client
from indra.util import unicode_strs
from nose.plugins.attrib import attr
@attr('webservice', 'nonpublic')
def test_biogrid_request():
results = biogrid_client._send_request(['MAP2K1', 'MAPK1'])
assert results is not None
assert unicode_strs(results)
## Instruction:
Add test for downloading and parsing biogrid tsv file
## Code After:
from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
from indra.databases import biogrid_client
from indra.util import unicode_strs
from nose.plugins.attrib import attr
from indra.sources.biogrid import process_file
from indra.statements import Complex
@attr('webservice', 'nonpublic')
def test_biogrid_request():
results = biogrid_client._send_request(['MAP2K1', 'MAPK1'])
assert results is not None
assert unicode_strs(results)
def test_biogrid_tsv():
# Download biogrid file form the web and process it
bp = process_file(None)
# We should have a lot of statementse
statements = bp.statements
assert(len(statements) > 500000)
# Any given statement should be a complex, with appropriate evidence
s0 = statements[0]
assert(isinstance(s0, Complex))
ev = s0.evidence[0]
assert(ev.source_api == 'biogrid')
assert(ev.text is None)
assert(ev.pmid is not None)
assert('tsv_row' in ev.annotations)
# The first statement in the file involves MAP2K4 and FLNC
assert(str(s0.members[0]) == 'MAP2K4()')
assert(str(s0.members[1]) == 'FLNC()')
|
# ... existing code ...
from indra.databases import biogrid_client
from indra.util import unicode_strs
from nose.plugins.attrib import attr
from indra.sources.biogrid import process_file
from indra.statements import Complex
@attr('webservice', 'nonpublic')
def test_biogrid_request():
# ... modified code ...
results = biogrid_client._send_request(['MAP2K1', 'MAPK1'])
assert results is not None
assert unicode_strs(results)
def test_biogrid_tsv():
# Download biogrid file form the web and process it
bp = process_file(None)
# We should have a lot of statementse
statements = bp.statements
assert(len(statements) > 500000)
# Any given statement should be a complex, with appropriate evidence
s0 = statements[0]
assert(isinstance(s0, Complex))
ev = s0.evidence[0]
assert(ev.source_api == 'biogrid')
assert(ev.text is None)
assert(ev.pmid is not None)
assert('tsv_row' in ev.annotations)
# The first statement in the file involves MAP2K4 and FLNC
assert(str(s0.members[0]) == 'MAP2K4()')
assert(str(s0.members[1]) == 'FLNC()')
# ... rest of the code ...
|
60317dda9795391dd6468b573f5e1038ae1fe384
|
src/apps/utils/db.py
|
src/apps/utils/db.py
|
from __future__ import absolute_import
def retrieve_in_order_from_db(model, ids):
"""
Retrieve entities of the given model from the RDBMS in order given their ids.
:param model: model of the entities
:param ids: ids of the entities
:return: a list of entities
"""
# Retrieve from RDBMS
entities = model.objects.in_bulk(ids)
#TODO: prefetch_related
# Order by search order
ordered_entities = [entities.get(id, None) for id in ids]
# Filter not found entities
filtered_entities = filter(None, ordered_entities)
return filtered_entities
|
from __future__ import absolute_import
def retrieve_in_order_from_db(model, ids, prefetch=True):
"""
Retrieve entities of the given model from the RDBMS in order given their ids.
:param model: model of the entities
:param ids: ids of the entities
:param prefetch: prefetch many-to-many relationships
:return: a list of entities
"""
# Prefetch related
if prefetch:
relationships = [m2m.attname for m2m in model._meta._many_to_many()]
entities = model.objects.all().prefetch_related(*relationships).in_bulk(ids)
else:
entities = model.objects.in_bulk(ids)
# Order by search order
ordered_entities = [entities.get(id, None) for id in ids]
# Filter not found entities
filtered_entities = filter(None, ordered_entities)
return filtered_entities
|
Optimize DB access: use of prefetch_related()
|
Optimize DB access: use of prefetch_related()
|
Python
|
apache-2.0
|
dvalcarce/filmyou-web,dvalcarce/filmyou-web,dvalcarce/filmyou-web
|
python
|
## Code Before:
from __future__ import absolute_import
def retrieve_in_order_from_db(model, ids):
"""
Retrieve entities of the given model from the RDBMS in order given their ids.
:param model: model of the entities
:param ids: ids of the entities
:return: a list of entities
"""
# Retrieve from RDBMS
entities = model.objects.in_bulk(ids)
#TODO: prefetch_related
# Order by search order
ordered_entities = [entities.get(id, None) for id in ids]
# Filter not found entities
filtered_entities = filter(None, ordered_entities)
return filtered_entities
## Instruction:
Optimize DB access: use of prefetch_related()
## Code After:
from __future__ import absolute_import
def retrieve_in_order_from_db(model, ids, prefetch=True):
"""
Retrieve entities of the given model from the RDBMS in order given their ids.
:param model: model of the entities
:param ids: ids of the entities
:param prefetch: prefetch many-to-many relationships
:return: a list of entities
"""
# Prefetch related
if prefetch:
relationships = [m2m.attname for m2m in model._meta._many_to_many()]
entities = model.objects.all().prefetch_related(*relationships).in_bulk(ids)
else:
entities = model.objects.in_bulk(ids)
# Order by search order
ordered_entities = [entities.get(id, None) for id in ids]
# Filter not found entities
filtered_entities = filter(None, ordered_entities)
return filtered_entities
|
# ... existing code ...
from __future__ import absolute_import
def retrieve_in_order_from_db(model, ids, prefetch=True):
"""
Retrieve entities of the given model from the RDBMS in order given their ids.
:param model: model of the entities
:param ids: ids of the entities
:param prefetch: prefetch many-to-many relationships
:return: a list of entities
"""
# Prefetch related
if prefetch:
relationships = [m2m.attname for m2m in model._meta._many_to_many()]
entities = model.objects.all().prefetch_related(*relationships).in_bulk(ids)
else:
entities = model.objects.in_bulk(ids)
# Order by search order
ordered_entities = [entities.get(id, None) for id in ids]
# ... rest of the code ...
|
d1e1ce5612e1437b2776043f3b6276be5b1d25a6
|
csv_converter.py
|
csv_converter.py
|
import csv
class CsvConverter:
def __init__(self, csv_file_path):
self.csv_file_path = csv_file_path
self.rows = []
self.source_product_code = "product_code"
self.source_quantity = "quantity"
def clear(self):
self.rows = []
def addRow(self, row):
self.rows.append(row)
def getRow(self, index):
return self.rows[index]
def setSourceColumns(self, source_product_code, source_quantity):
self.source_product_code = source_product_code
self.source_quantity = source_quantity
def setTargetColumns(self, target_product_code, target_quantity):
self.target_product_code = target_product_code
self.target_quantity = target_quantity
def convertRow(self, row):
return {
'product_code': int(row[self.source_product_code]),
'quantity': int(row[self.source_quantity])
}
def read_file(self):
with open(self.csv_file_path, 'rb') as csvfile:
reader = csv.DictReader(csvfile)
for row in reader:
self.addRow(self.convertRow(row))
|
import csv
class CsvConverter:
def __init__(self, csv_file_path):
self.csv_file_path = csv_file_path
self.rows = []
self.source_product_code = "product_code"
self.source_quantity = "quantity"
def clear(self):
self.rows = []
def addRow(self, row):
self.rows.append(row)
def getRow(self, index):
return self.rows[index]
def setSourceColumns(self, source_product_code, source_quantity):
self.source_product_code = source_product_code
self.source_quantity = source_quantity
def setTargetColumns(self, target_product_code, target_quantity):
self.target_product_code = target_product_code
self.target_quantity = target_quantity
def convertRow(self, row):
if not row[self.source_product_code]:
raise ValueError
return {
'product_code': row[self.source_product_code],
'quantity': int(row[self.source_quantity])
}
def read_file(self):
with open(self.csv_file_path, 'rb') as csvfile:
reader = csv.DictReader(csvfile)
for row in reader:
self.addRow(self.convertRow(row))
|
Add checking empty product code
|
Add checking empty product code
|
Python
|
mit
|
stormaaja/csvconverter,stormaaja/csvconverter,stormaaja/csvconverter
|
python
|
## Code Before:
import csv
class CsvConverter:
def __init__(self, csv_file_path):
self.csv_file_path = csv_file_path
self.rows = []
self.source_product_code = "product_code"
self.source_quantity = "quantity"
def clear(self):
self.rows = []
def addRow(self, row):
self.rows.append(row)
def getRow(self, index):
return self.rows[index]
def setSourceColumns(self, source_product_code, source_quantity):
self.source_product_code = source_product_code
self.source_quantity = source_quantity
def setTargetColumns(self, target_product_code, target_quantity):
self.target_product_code = target_product_code
self.target_quantity = target_quantity
def convertRow(self, row):
return {
'product_code': int(row[self.source_product_code]),
'quantity': int(row[self.source_quantity])
}
def read_file(self):
with open(self.csv_file_path, 'rb') as csvfile:
reader = csv.DictReader(csvfile)
for row in reader:
self.addRow(self.convertRow(row))
## Instruction:
Add checking empty product code
## Code After:
import csv
class CsvConverter:
def __init__(self, csv_file_path):
self.csv_file_path = csv_file_path
self.rows = []
self.source_product_code = "product_code"
self.source_quantity = "quantity"
def clear(self):
self.rows = []
def addRow(self, row):
self.rows.append(row)
def getRow(self, index):
return self.rows[index]
def setSourceColumns(self, source_product_code, source_quantity):
self.source_product_code = source_product_code
self.source_quantity = source_quantity
def setTargetColumns(self, target_product_code, target_quantity):
self.target_product_code = target_product_code
self.target_quantity = target_quantity
def convertRow(self, row):
if not row[self.source_product_code]:
raise ValueError
return {
'product_code': row[self.source_product_code],
'quantity': int(row[self.source_quantity])
}
def read_file(self):
with open(self.csv_file_path, 'rb') as csvfile:
reader = csv.DictReader(csvfile)
for row in reader:
self.addRow(self.convertRow(row))
|
...
self.target_quantity = target_quantity
def convertRow(self, row):
if not row[self.source_product_code]:
raise ValueError
return {
'product_code': row[self.source_product_code],
'quantity': int(row[self.source_quantity])
}
...
|
f9f9f385e4f425da0537680ba6afd2ce81bfb774
|
rembed/test/integration_test.py
|
rembed/test/integration_test.py
|
from hamcrest import *
import pytest
@pytest.mark.xfail
def test_should_get_correct_embedding():
consumer = REmbedConsumer()
embedding = consumer.embed('https://twitter.com/BarackObama/status/266031293945503744')
assert_that(embedding, contains_string('Four more years.'))
|
from rembed import consumer
from hamcrest import *
import pytest
@pytest.mark.xfail
def test_should_get_correct_embedding():
embedding = consumer.embed('https://twitter.com/BarackObama/status/266031293945503744')
assert_that(embedding, contains_string('Four more years.'))
|
Fix import in integration test
|
Fix import in integration test
|
Python
|
mit
|
tino/pyembed,pyembed/pyembed,pyembed/pyembed
|
python
|
## Code Before:
from hamcrest import *
import pytest
@pytest.mark.xfail
def test_should_get_correct_embedding():
consumer = REmbedConsumer()
embedding = consumer.embed('https://twitter.com/BarackObama/status/266031293945503744')
assert_that(embedding, contains_string('Four more years.'))
## Instruction:
Fix import in integration test
## Code After:
from rembed import consumer
from hamcrest import *
import pytest
@pytest.mark.xfail
def test_should_get_correct_embedding():
embedding = consumer.embed('https://twitter.com/BarackObama/status/266031293945503744')
assert_that(embedding, contains_string('Four more years.'))
|
...
from rembed import consumer
from hamcrest import *
import pytest
@pytest.mark.xfail
def test_should_get_correct_embedding():
embedding = consumer.embed('https://twitter.com/BarackObama/status/266031293945503744')
assert_that(embedding, contains_string('Four more years.'))
...
|
f7fa8b72b8d8d1b7bfcd6c738520fc87cd20e320
|
ixdjango/tests/__init__.py
|
ixdjango/tests/__init__.py
|
import subprocess
from django.test.simple import DjangoTestSuiteRunner
from django.utils import unittest
from ixdjango.test_suite.utils import (CoreUtilsTests)
class TestRunner(DjangoTestSuiteRunner):
"""
Place where we hook into DjangoTestSuiteRunner
"""
def setup_test_environment(self, *args, **kwargs):
"""
Hook to set up the test environment
"""
from django.conf import settings
print "Running hooks from %s" % __name__
username = settings.DATABASES['default']['USER']
print " - Ensure %s can create a test DB" % username
subprocess.call(['sudo', 'su', 'postgres', '-c',
"psql -c 'alter user %s with createdb;'" % username])
return super(TestRunner, self).setup_test_environment(*args, **kwargs)
def suite():
"""
Put together a suite of tests to run for the application
"""
loader = unittest.TestLoader()
all_tests = unittest.TestSuite([
#
# Utilities test cases
#
loader.loadTestsFromTestCase(CoreUtilsTests)
])
return all_tests
|
import subprocess
try:
from django.test.runner import DiscoverRunner as BaseTestRunner
except ImportError:
from django.test.simple import DjangoTestSuiteRunner as BaseTestRunner
from django.utils import unittest
from ixdjango.test_suite.utils import (CoreUtilsTests)
class TestRunner(BaseTestRunner):
"""
Place where we hook into DjangoTestSuiteRunner
"""
def setup_test_environment(self, *args, **kwargs):
"""
Hook to set up the test environment
"""
from django.conf import settings
print "Running hooks from %s" % __name__
username = settings.DATABASES['default']['USER']
print " - Ensure %s can create a test DB" % username
subprocess.call(['sudo', 'su', 'postgres', '-c',
"psql -c 'alter user %s with createdb;'" % username])
return super(TestRunner, self).setup_test_environment(*args, **kwargs)
def suite():
"""
Put together a suite of tests to run for the application
"""
loader = unittest.TestLoader()
all_tests = unittest.TestSuite([
#
# Utilities test cases
#
loader.loadTestsFromTestCase(CoreUtilsTests)
])
return all_tests
|
Use DiscoverRunner from Django 1.6 if available
|
Use DiscoverRunner from Django 1.6 if available
|
Python
|
mit
|
infoxchange/ixdjango
|
python
|
## Code Before:
import subprocess
from django.test.simple import DjangoTestSuiteRunner
from django.utils import unittest
from ixdjango.test_suite.utils import (CoreUtilsTests)
class TestRunner(DjangoTestSuiteRunner):
"""
Place where we hook into DjangoTestSuiteRunner
"""
def setup_test_environment(self, *args, **kwargs):
"""
Hook to set up the test environment
"""
from django.conf import settings
print "Running hooks from %s" % __name__
username = settings.DATABASES['default']['USER']
print " - Ensure %s can create a test DB" % username
subprocess.call(['sudo', 'su', 'postgres', '-c',
"psql -c 'alter user %s with createdb;'" % username])
return super(TestRunner, self).setup_test_environment(*args, **kwargs)
def suite():
"""
Put together a suite of tests to run for the application
"""
loader = unittest.TestLoader()
all_tests = unittest.TestSuite([
#
# Utilities test cases
#
loader.loadTestsFromTestCase(CoreUtilsTests)
])
return all_tests
## Instruction:
Use DiscoverRunner from Django 1.6 if available
## Code After:
import subprocess
try:
from django.test.runner import DiscoverRunner as BaseTestRunner
except ImportError:
from django.test.simple import DjangoTestSuiteRunner as BaseTestRunner
from django.utils import unittest
from ixdjango.test_suite.utils import (CoreUtilsTests)
class TestRunner(BaseTestRunner):
"""
Place where we hook into DjangoTestSuiteRunner
"""
def setup_test_environment(self, *args, **kwargs):
"""
Hook to set up the test environment
"""
from django.conf import settings
print "Running hooks from %s" % __name__
username = settings.DATABASES['default']['USER']
print " - Ensure %s can create a test DB" % username
subprocess.call(['sudo', 'su', 'postgres', '-c',
"psql -c 'alter user %s with createdb;'" % username])
return super(TestRunner, self).setup_test_environment(*args, **kwargs)
def suite():
"""
Put together a suite of tests to run for the application
"""
loader = unittest.TestLoader()
all_tests = unittest.TestSuite([
#
# Utilities test cases
#
loader.loadTestsFromTestCase(CoreUtilsTests)
])
return all_tests
|
...
import subprocess
try:
from django.test.runner import DiscoverRunner as BaseTestRunner
except ImportError:
from django.test.simple import DjangoTestSuiteRunner as BaseTestRunner
from django.utils import unittest
from ixdjango.test_suite.utils import (CoreUtilsTests)
class TestRunner(BaseTestRunner):
"""
Place where we hook into DjangoTestSuiteRunner
"""
...
|
2195d0bf6546025726b2e3502db49aa0964f859a
|
src/main/java/jp/gecko655/fujimiya/bot/SchedulerMain.java
|
src/main/java/jp/gecko655/fujimiya/bot/SchedulerMain.java
|
package jp.gecko655.fujimiya.bot;
import org.quartz.*;
import org.quartz.impl.StdSchedulerFactory;
import static org.quartz.JobBuilder.newJob;
import static org.quartz.SimpleScheduleBuilder.repeatSecondlyForever;
import static org.quartz.TriggerBuilder.newTrigger;
public class SchedulerMain {
private static Scheduler scheduler;
public static void main(String[] args) throws SchedulerException {
System.out.println("Scheduler Started!!!");
scheduler = StdSchedulerFactory.getDefaultScheduler();
scheduler.start();
setSchedule(FujimiyaReply.class, 60*5);
//setSchedule(FujimiyaRemove.class, 60*5);
}
private static void setSchedule(Class<? extends Job> classForExecute, int intervalSeconds) throws SchedulerException {
JobDetail jobDetail = newJob(classForExecute).build();
Trigger trigger = newTrigger()
.startNow()
.withSchedule(repeatSecondlyForever(intervalSeconds))
.build();
scheduler.scheduleJob(jobDetail, trigger);
System.out.println(classForExecute.getName()+" has been scheduled in interval: "+intervalSeconds+" [s]");
}
}
|
package jp.gecko655.fujimiya.bot;
import java.time.ZoneId;
import java.util.TimeZone;
import org.quartz.*;
import org.quartz.impl.StdSchedulerFactory;
import static org.quartz.JobBuilder.newJob;
import static org.quartz.SimpleScheduleBuilder.repeatSecondlyForever;
import static org.quartz.TriggerBuilder.newTrigger;
public class SchedulerMain {
private static Scheduler scheduler;
public static void main(String[] args) throws SchedulerException {
System.out.println("Scheduler Started!!!");
scheduler = StdSchedulerFactory.getDefaultScheduler();
scheduler.start();
setSchedule(FujimiyaReply.class, repeatSecondlyForever(60*5));
setSchedule(FujimiyaBot.class, repeatSecondlyForever(60*60*4));
setSchedule(
FujimiyaLunch.class,
CronScheduleBuilder
.dailyAtHourAndMinute(12, 25)
.inTimeZone(TimeZone.getTimeZone("JST")));
setSchedule(
FujimiyaRemove.class,
CronScheduleBuilder
.atHourAndMinuteOnGivenDaysOfWeek(9, 0, DateBuilder.MONDAY)
.inTimeZone(TimeZone.getTimeZone("JST")));
}
private static void setSchedule(Class<? extends Job> classForExecute, ScheduleBuilder<? extends Trigger> schedule) throws SchedulerException {
JobDetail jobDetail = newJob(classForExecute).build();
Trigger trigger = newTrigger()
.startNow()
.withSchedule(schedule)
.build();
scheduler.scheduleJob(jobDetail, trigger);
System.out.println(classForExecute.getName()+" has been scheduled");
}
}
|
Set schedule for 4 crons.
|
Set schedule for 4 crons.
|
Java
|
mit
|
gecko655/hestiaBot,gecko655/fujimiya,gecko655/hestia,gecko655/fujimiya_heroku,gecko655/TomoriNao,gecko655/hestia,gecko655/fujimiya,gecko655/fujimiya_heroku,gecko655/hestiaBot,gecko655/TomoriNao
|
java
|
## Code Before:
package jp.gecko655.fujimiya.bot;
import org.quartz.*;
import org.quartz.impl.StdSchedulerFactory;
import static org.quartz.JobBuilder.newJob;
import static org.quartz.SimpleScheduleBuilder.repeatSecondlyForever;
import static org.quartz.TriggerBuilder.newTrigger;
public class SchedulerMain {
private static Scheduler scheduler;
public static void main(String[] args) throws SchedulerException {
System.out.println("Scheduler Started!!!");
scheduler = StdSchedulerFactory.getDefaultScheduler();
scheduler.start();
setSchedule(FujimiyaReply.class, 60*5);
//setSchedule(FujimiyaRemove.class, 60*5);
}
private static void setSchedule(Class<? extends Job> classForExecute, int intervalSeconds) throws SchedulerException {
JobDetail jobDetail = newJob(classForExecute).build();
Trigger trigger = newTrigger()
.startNow()
.withSchedule(repeatSecondlyForever(intervalSeconds))
.build();
scheduler.scheduleJob(jobDetail, trigger);
System.out.println(classForExecute.getName()+" has been scheduled in interval: "+intervalSeconds+" [s]");
}
}
## Instruction:
Set schedule for 4 crons.
## Code After:
package jp.gecko655.fujimiya.bot;
import java.time.ZoneId;
import java.util.TimeZone;
import org.quartz.*;
import org.quartz.impl.StdSchedulerFactory;
import static org.quartz.JobBuilder.newJob;
import static org.quartz.SimpleScheduleBuilder.repeatSecondlyForever;
import static org.quartz.TriggerBuilder.newTrigger;
public class SchedulerMain {
private static Scheduler scheduler;
public static void main(String[] args) throws SchedulerException {
System.out.println("Scheduler Started!!!");
scheduler = StdSchedulerFactory.getDefaultScheduler();
scheduler.start();
setSchedule(FujimiyaReply.class, repeatSecondlyForever(60*5));
setSchedule(FujimiyaBot.class, repeatSecondlyForever(60*60*4));
setSchedule(
FujimiyaLunch.class,
CronScheduleBuilder
.dailyAtHourAndMinute(12, 25)
.inTimeZone(TimeZone.getTimeZone("JST")));
setSchedule(
FujimiyaRemove.class,
CronScheduleBuilder
.atHourAndMinuteOnGivenDaysOfWeek(9, 0, DateBuilder.MONDAY)
.inTimeZone(TimeZone.getTimeZone("JST")));
}
private static void setSchedule(Class<? extends Job> classForExecute, ScheduleBuilder<? extends Trigger> schedule) throws SchedulerException {
JobDetail jobDetail = newJob(classForExecute).build();
Trigger trigger = newTrigger()
.startNow()
.withSchedule(schedule)
.build();
scheduler.scheduleJob(jobDetail, trigger);
System.out.println(classForExecute.getName()+" has been scheduled");
}
}
|
# ... existing code ...
package jp.gecko655.fujimiya.bot;
import java.time.ZoneId;
import java.util.TimeZone;
import org.quartz.*;
import org.quartz.impl.StdSchedulerFactory;
# ... modified code ...
scheduler = StdSchedulerFactory.getDefaultScheduler();
scheduler.start();
setSchedule(FujimiyaReply.class, repeatSecondlyForever(60*5));
setSchedule(FujimiyaBot.class, repeatSecondlyForever(60*60*4));
setSchedule(
FujimiyaLunch.class,
CronScheduleBuilder
.dailyAtHourAndMinute(12, 25)
.inTimeZone(TimeZone.getTimeZone("JST")));
setSchedule(
FujimiyaRemove.class,
CronScheduleBuilder
.atHourAndMinuteOnGivenDaysOfWeek(9, 0, DateBuilder.MONDAY)
.inTimeZone(TimeZone.getTimeZone("JST")));
}
private static void setSchedule(Class<? extends Job> classForExecute, ScheduleBuilder<? extends Trigger> schedule) throws SchedulerException {
JobDetail jobDetail = newJob(classForExecute).build();
Trigger trigger = newTrigger()
.startNow()
.withSchedule(schedule)
.build();
scheduler.scheduleJob(jobDetail, trigger);
System.out.println(classForExecute.getName()+" has been scheduled");
}
# ... rest of the code ...
|
452b67fa4fe5d9f34a98971e377bbaa1b978907b
|
superblock.py
|
superblock.py
|
import sys
import string
from binascii import hexlify
BLOCKSIZE = 512
def block_printer(filename, offset, block_count):
def nonprintable_replace(char):
if char not in string.printable:
return '.'
if char in '\n\r\t\x0b\x0c':
return '.'
return char
with open(filename, 'rb') as f:
f.seek(offset * BLOCKSIZE)
# Loop over blocks
for i in xrange(block_count):
# Loop over bytes
for j in xrange(BLOCKSIZE / 8):
part1 = f.read(4)
part2 = f.read(4)
print '{0:2}: {1} {2} {3}'.format(j+1, hexlify(part1), hexlify(part2), ''.join(map(nonprintable_replace, part1 + part2)))
if __name__ == '__main__':
if len(sys.argv) < 2:
print 'Usage: superblock.py <filename>'
sys.exit(1)
filename = sys.argv[1]
print 'Printing superblock (bytes 1024-1535) of file %s.\n' % filename
print ''.center(5) + 'HEX'.center(18) + 'ASCII'.center(8)
block_printer(filename, 2, 1)
|
import sys
import string
from binascii import hexlify
BLOCKSIZE = 512
def nonprintable_replace(char):
if char not in string.printable:
return '.'
if char in '\n\r\t\x0b\x0c':
return '.'
return char
def block_printer(filename, offset, block_count):
with open(filename, 'rb') as f:
f.seek(offset * BLOCKSIZE)
# Loop over blocks
for i in xrange(block_count):
# Loop over bytes
for j in xrange(BLOCKSIZE / 16):
word = f.read(4), f.read(4), f.read(4), f.read(4)
hex_string = ' '.join(map(hexlify, word))
ascii_string = ''.join(map(nonprintable_replace, ''.join(word)))
print '{0:2}: {1} {2}'.format(j + 1, hex_string, ascii_string)
if __name__ == '__main__':
if len(sys.argv) < 2:
print 'Usage: superblock.py <filename>'
sys.exit(1)
filename = sys.argv[1]
print '\nPrinting superblock (bytes 1024-1535) of file %s.\n' % filename
print ' ' * 5 + 'HEX'.center(35) + ' ' + 'ASCII'.center(16)
block_printer(filename, 2, 1)
|
Print 16 bit per line
|
Print 16 bit per line
|
Python
|
mit
|
dbrgn/superblock
|
python
|
## Code Before:
import sys
import string
from binascii import hexlify
BLOCKSIZE = 512
def block_printer(filename, offset, block_count):
def nonprintable_replace(char):
if char not in string.printable:
return '.'
if char in '\n\r\t\x0b\x0c':
return '.'
return char
with open(filename, 'rb') as f:
f.seek(offset * BLOCKSIZE)
# Loop over blocks
for i in xrange(block_count):
# Loop over bytes
for j in xrange(BLOCKSIZE / 8):
part1 = f.read(4)
part2 = f.read(4)
print '{0:2}: {1} {2} {3}'.format(j+1, hexlify(part1), hexlify(part2), ''.join(map(nonprintable_replace, part1 + part2)))
if __name__ == '__main__':
if len(sys.argv) < 2:
print 'Usage: superblock.py <filename>'
sys.exit(1)
filename = sys.argv[1]
print 'Printing superblock (bytes 1024-1535) of file %s.\n' % filename
print ''.center(5) + 'HEX'.center(18) + 'ASCII'.center(8)
block_printer(filename, 2, 1)
## Instruction:
Print 16 bit per line
## Code After:
import sys
import string
from binascii import hexlify
BLOCKSIZE = 512
def nonprintable_replace(char):
if char not in string.printable:
return '.'
if char in '\n\r\t\x0b\x0c':
return '.'
return char
def block_printer(filename, offset, block_count):
with open(filename, 'rb') as f:
f.seek(offset * BLOCKSIZE)
# Loop over blocks
for i in xrange(block_count):
# Loop over bytes
for j in xrange(BLOCKSIZE / 16):
word = f.read(4), f.read(4), f.read(4), f.read(4)
hex_string = ' '.join(map(hexlify, word))
ascii_string = ''.join(map(nonprintable_replace, ''.join(word)))
print '{0:2}: {1} {2}'.format(j + 1, hex_string, ascii_string)
if __name__ == '__main__':
if len(sys.argv) < 2:
print 'Usage: superblock.py <filename>'
sys.exit(1)
filename = sys.argv[1]
print '\nPrinting superblock (bytes 1024-1535) of file %s.\n' % filename
print ' ' * 5 + 'HEX'.center(35) + ' ' + 'ASCII'.center(16)
block_printer(filename, 2, 1)
|
...
BLOCKSIZE = 512
def nonprintable_replace(char):
if char not in string.printable:
return '.'
if char in '\n\r\t\x0b\x0c':
return '.'
return char
def block_printer(filename, offset, block_count):
with open(filename, 'rb') as f:
f.seek(offset * BLOCKSIZE)
...
for i in xrange(block_count):
# Loop over bytes
for j in xrange(BLOCKSIZE / 16):
word = f.read(4), f.read(4), f.read(4), f.read(4)
hex_string = ' '.join(map(hexlify, word))
ascii_string = ''.join(map(nonprintable_replace, ''.join(word)))
print '{0:2}: {1} {2}'.format(j + 1, hex_string, ascii_string)
if __name__ == '__main__':
...
sys.exit(1)
filename = sys.argv[1]
print '\nPrinting superblock (bytes 1024-1535) of file %s.\n' % filename
print ' ' * 5 + 'HEX'.center(35) + ' ' + 'ASCII'.center(16)
block_printer(filename, 2, 1)
...
|
85dcb6ff036d03fd1fadc62a519147cf6b9ca8de
|
floq/blockmatrix.py
|
floq/blockmatrix.py
|
import numpy as np
# Provide functions to get/set blocks in numpy arrays
def get_block_from_matrix(matrix,dim_block,n_block,row,column):
start_row = row*dim_block
start_column = column*dim_block
stop_row = start_row+dim_block
stop_column = start_column+dim_block
return matrix[start_row:stop_row,start_column:stop_column]
def set_block_in_matrix(block,matrix,dim_block,n_block,row,column):
start_row = row*dim_block
start_column = column*dim_block
stop_row = start_row+dim_block
stop_column = start_column+dim_block
matrix[start_row:stop_row,start_column:stop_column] = block
|
import numpy as np
# Provide functions to get/set blocks in numpy arrays
def get_block_from_matrix(matrix,dim_block,n_block,row,col):
start_row = row*dim_block
start_col = col*dim_block
stop_row = start_row+dim_block
stop_col = start_col+dim_block
return matrix[start_row:stop_row,start_col:stop_col]
def set_block_in_matrix(block,matrix,dim_block,n_block,row,col):
start_row = row*dim_block
start_col = col*dim_block
stop_row = start_row+dim_block
stop_col = start_col+dim_block
matrix[start_row:stop_row,start_col:stop_col] = block
|
Rename column -> col for consistency with row
|
Rename column -> col for consistency with row
|
Python
|
mit
|
sirmarcel/floq
|
python
|
## Code Before:
import numpy as np
# Provide functions to get/set blocks in numpy arrays
def get_block_from_matrix(matrix,dim_block,n_block,row,column):
start_row = row*dim_block
start_column = column*dim_block
stop_row = start_row+dim_block
stop_column = start_column+dim_block
return matrix[start_row:stop_row,start_column:stop_column]
def set_block_in_matrix(block,matrix,dim_block,n_block,row,column):
start_row = row*dim_block
start_column = column*dim_block
stop_row = start_row+dim_block
stop_column = start_column+dim_block
matrix[start_row:stop_row,start_column:stop_column] = block
## Instruction:
Rename column -> col for consistency with row
## Code After:
import numpy as np
# Provide functions to get/set blocks in numpy arrays
def get_block_from_matrix(matrix,dim_block,n_block,row,col):
start_row = row*dim_block
start_col = col*dim_block
stop_row = start_row+dim_block
stop_col = start_col+dim_block
return matrix[start_row:stop_row,start_col:stop_col]
def set_block_in_matrix(block,matrix,dim_block,n_block,row,col):
start_row = row*dim_block
start_col = col*dim_block
stop_row = start_row+dim_block
stop_col = start_col+dim_block
matrix[start_row:stop_row,start_col:stop_col] = block
|
// ... existing code ...
# Provide functions to get/set blocks in numpy arrays
def get_block_from_matrix(matrix,dim_block,n_block,row,col):
start_row = row*dim_block
start_col = col*dim_block
stop_row = start_row+dim_block
stop_col = start_col+dim_block
return matrix[start_row:stop_row,start_col:stop_col]
def set_block_in_matrix(block,matrix,dim_block,n_block,row,col):
start_row = row*dim_block
start_col = col*dim_block
stop_row = start_row+dim_block
stop_col = start_col+dim_block
matrix[start_row:stop_row,start_col:stop_col] = block
// ... rest of the code ...
|
e84fa6d16a428b6d9f30f2c703bedf990d44becd
|
src/test/java/com/cloudera/data/hdfs/TestHDFSDatasetWriter.java
|
src/test/java/com/cloudera/data/hdfs/TestHDFSDatasetWriter.java
|
package com.cloudera.data.hdfs;
import java.io.File;
import java.io.IOException;
import org.apache.avro.Schema;
import org.apache.avro.Schema.Type;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import com.google.common.io.Files;
public class TestHDFSDatasetWriter {
private File testDirectory;
private FileSystem fileSystem;
@Before
public void setUp() throws IOException {
testDirectory = Files.createTempDir();
Configuration conf = new Configuration();
conf.set("fs.default.name", "file:///");
fileSystem = FileSystem.get(conf);
}
@After
public void tearDown() throws IOException {
fileSystem.delete(new Path(testDirectory.getAbsolutePath()), true);
}
@Test
public void testWrite() throws IOException {
HDFSDatasetWriter<String> writer = new HDFSDatasetWriter<String>(
fileSystem, new Path(testDirectory.getAbsolutePath(), "write-1.avro"),
Schema.create(Type.STRING));
writer.open();
for (int i = 0; i < 10; i++) {
writer.write("entry " + i);
}
writer.close();
}
}
|
package com.cloudera.data.hdfs;
import java.io.File;
import java.io.IOException;
import org.apache.avro.Schema;
import org.apache.avro.Schema.Type;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import com.google.common.io.Files;
public class TestHDFSDatasetWriter {
private File testDirectory;
private FileSystem fileSystem;
@Before
public void setUp() throws IOException {
testDirectory = Files.createTempDir();
Configuration conf = new Configuration();
conf.set("fs.default.name", "file:///");
fileSystem = FileSystem.get(conf);
}
@After
public void tearDown() throws IOException {
fileSystem.delete(new Path(testDirectory.getAbsolutePath()), true);
}
@Test
public void testWrite() throws IOException {
HDFSDatasetWriter<String> writer = new HDFSDatasetWriter<String>(
fileSystem, new Path(testDirectory.getAbsolutePath(), "write-1.avro"),
Schema.create(Type.STRING));
writer.open();
for (int i = 0; i < 100; i++) {
writer.write("entry " + i);
if (i % 10 == 0) {
writer.flush();
}
}
writer.close();
}
}
|
Write 100 records and flush every 10 during HDFSDatasetWriter write test.
|
Write 100 records and flush every 10 during HDFSDatasetWriter write test.
|
Java
|
apache-2.0
|
busbey/kite,dlanza1/kite,bbrownz/kite,ronanstokes/kite,scalingdata/cdk,kite-sdk/kite,yuzhu712/cdk,tomwhite/kite,StephanieMak/kite,yuzhu712/cdk,ronanstokes/kite,rbrush/kite,prazanna/kite,andrewrothstein/kite,stevek-ngdata/kite,bbaugher/kite,rbrush/kite,EdwardSkoviak/kite,megnataraj/kite,whoschek/kite,bbaugher/kite,bbaugher/kite,andrewrothstein/kite,StephanieMak/kite,rdblue/kite,StephanieMak/kite,stevek-ngdata/kite,bbrownz/kite,cloudera/cdk,EdwardSkoviak/kite,megnataraj/kite,tinkujohn/kite,scalingdata/cdk,cloudera/cdk,andrewrothstein/kite,rdblue/kite,scalingdata/cdk,prazanna/kite,busbey/kite,joey/kite,gabrielreid/kite,EdwardSkoviak/kite,mkwhitacre/kite,bbrownz/kite,rdblue/kite,joey/kite,ronanstokes/kite,gabrielreid/kite,yuzhu712/cdk,whoschek/kite,busbey/kite,tinkujohn/kite,megnataraj/kite,mkwhitacre/kite,kite-sdk/kite,dlanza1/kite,tomwhite/kite,gabrielreid/kite,tinkujohn/kite,mkwhitacre/kite,kite-sdk/kite,stevek-ngdata/kite,joey/kite,rbrush/kite,prazanna/kite
|
java
|
## Code Before:
package com.cloudera.data.hdfs;
import java.io.File;
import java.io.IOException;
import org.apache.avro.Schema;
import org.apache.avro.Schema.Type;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import com.google.common.io.Files;
public class TestHDFSDatasetWriter {
private File testDirectory;
private FileSystem fileSystem;
@Before
public void setUp() throws IOException {
testDirectory = Files.createTempDir();
Configuration conf = new Configuration();
conf.set("fs.default.name", "file:///");
fileSystem = FileSystem.get(conf);
}
@After
public void tearDown() throws IOException {
fileSystem.delete(new Path(testDirectory.getAbsolutePath()), true);
}
@Test
public void testWrite() throws IOException {
HDFSDatasetWriter<String> writer = new HDFSDatasetWriter<String>(
fileSystem, new Path(testDirectory.getAbsolutePath(), "write-1.avro"),
Schema.create(Type.STRING));
writer.open();
for (int i = 0; i < 10; i++) {
writer.write("entry " + i);
}
writer.close();
}
}
## Instruction:
Write 100 records and flush every 10 during HDFSDatasetWriter write test.
## Code After:
package com.cloudera.data.hdfs;
import java.io.File;
import java.io.IOException;
import org.apache.avro.Schema;
import org.apache.avro.Schema.Type;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import com.google.common.io.Files;
public class TestHDFSDatasetWriter {
private File testDirectory;
private FileSystem fileSystem;
@Before
public void setUp() throws IOException {
testDirectory = Files.createTempDir();
Configuration conf = new Configuration();
conf.set("fs.default.name", "file:///");
fileSystem = FileSystem.get(conf);
}
@After
public void tearDown() throws IOException {
fileSystem.delete(new Path(testDirectory.getAbsolutePath()), true);
}
@Test
public void testWrite() throws IOException {
HDFSDatasetWriter<String> writer = new HDFSDatasetWriter<String>(
fileSystem, new Path(testDirectory.getAbsolutePath(), "write-1.avro"),
Schema.create(Type.STRING));
writer.open();
for (int i = 0; i < 100; i++) {
writer.write("entry " + i);
if (i % 10 == 0) {
writer.flush();
}
}
writer.close();
}
}
|
# ... existing code ...
writer.open();
for (int i = 0; i < 100; i++) {
writer.write("entry " + i);
if (i % 10 == 0) {
writer.flush();
}
}
writer.close();
# ... rest of the code ...
|
d9dce6f97019d688750c8143777d2c9e2acd4170
|
qtpy/QtOpenGLWidgets.py
|
qtpy/QtOpenGLWidgets.py
|
"""Provides QtOpenGLWidgets classes and functions."""
from . import PYQT5, PYQT6, PYSIDE2, PYSIDE6, QtBindingsNotFoundError, QtBindingMissingModuleError
if PYQT5:
raise QtBindingMissingModuleError(name='QtTextToSpeech')
elif PYQT6:
from PyQt6.QtOpenGLWidgets import *
elif PYSIDE2:
raise QtBindingMissingModuleError(name='QtTextToSpeech')
elif PYSIDE6:
from PySide6.QtOpenGLWidgets import *
else:
raise QtBindingsNotFoundError()
|
"""Provides QtOpenGLWidgets classes and functions."""
from . import PYQT5, PYQT6, PYSIDE2, PYSIDE6, QtBindingsNotFoundError, QtBindingMissingModuleError
if PYQT5:
raise QtBindingMissingModuleError(name='QtOpenGLWidgets')
elif PYQT6:
from PyQt6.QtOpenGLWidgets import *
elif PYSIDE2:
raise QtBindingMissingModuleError(name='QtOpenGLWidgets')
elif PYSIDE6:
from PySide6.QtOpenGLWidgets import *
else:
raise QtBindingsNotFoundError()
|
Fix wrong module name in error message
|
Fix wrong module name in error message
|
Python
|
mit
|
spyder-ide/qtpy
|
python
|
## Code Before:
"""Provides QtOpenGLWidgets classes and functions."""
from . import PYQT5, PYQT6, PYSIDE2, PYSIDE6, QtBindingsNotFoundError, QtBindingMissingModuleError
if PYQT5:
raise QtBindingMissingModuleError(name='QtTextToSpeech')
elif PYQT6:
from PyQt6.QtOpenGLWidgets import *
elif PYSIDE2:
raise QtBindingMissingModuleError(name='QtTextToSpeech')
elif PYSIDE6:
from PySide6.QtOpenGLWidgets import *
else:
raise QtBindingsNotFoundError()
## Instruction:
Fix wrong module name in error message
## Code After:
"""Provides QtOpenGLWidgets classes and functions."""
from . import PYQT5, PYQT6, PYSIDE2, PYSIDE6, QtBindingsNotFoundError, QtBindingMissingModuleError
if PYQT5:
raise QtBindingMissingModuleError(name='QtOpenGLWidgets')
elif PYQT6:
from PyQt6.QtOpenGLWidgets import *
elif PYSIDE2:
raise QtBindingMissingModuleError(name='QtOpenGLWidgets')
elif PYSIDE6:
from PySide6.QtOpenGLWidgets import *
else:
raise QtBindingsNotFoundError()
|
...
from . import PYQT5, PYQT6, PYSIDE2, PYSIDE6, QtBindingsNotFoundError, QtBindingMissingModuleError
if PYQT5:
raise QtBindingMissingModuleError(name='QtOpenGLWidgets')
elif PYQT6:
from PyQt6.QtOpenGLWidgets import *
elif PYSIDE2:
raise QtBindingMissingModuleError(name='QtOpenGLWidgets')
elif PYSIDE6:
from PySide6.QtOpenGLWidgets import *
else:
...
|
6690479e46c9138c6f57ce9415b0429175545e96
|
stock_transfer_restrict_lot/models/stock_production_lot.py
|
stock_transfer_restrict_lot/models/stock_production_lot.py
|
from openerp import models, fields, api
class StockProductionlot(models.Model):
_inherit = 'stock.production.lot'
qty_available_not_res = fields.Float(
compute='_compute_qty_available_not_res',
string='Qty Available Not Reserved',
store=True
)
@api.multi
@api.depends('quant_ids.reservation_id', 'quant_ids.qty')
def _compute_qty_available_not_res(self):
for rec in self:
rec.qty_available_not_res = sum(rec.quant_ids.filtered(
lambda x: not x.reservation_id).mapped('qty'))
@api.multi
def name_get(self):
result = []
for rec in self:
name = '%s (%s u)' % (rec.name, rec.qty_available_not_res or 0.0)
result.append((rec.id, name))
return result
|
from openerp import models, fields, api
class StockProductionlot(models.Model):
_inherit = 'stock.production.lot'
qty_available_not_res = fields.Float(
compute='_compute_qty_available_not_res',
string='Qty Available Not Reserved',
store=True
)
@api.multi
@api.depends('quant_ids.reservation_id', 'quant_ids.qty')
def _compute_qty_available_not_res(self):
for rec in self:
rec.qty_available_not_res = sum(rec.quant_ids.filtered(
lambda x: not x.reservation_id).mapped('qty'))
@api.multi
def name_get(self):
result = []
for rec in self:
read = rec.quant_ids.read_group(
[('lot_id', '=', rec.id)],
['location_id', 'qty'], 'location_id')
locations_qty = ', '.join(
['%s: %s' % (x['location_id'][1], x['qty']) for x in read])
name = '%s (%s)' % (rec.name, locations_qty)
result.append((rec.id, name))
return result
|
FIX in lot name_get to show location with the stock
|
FIX in lot name_get to show location with the stock
|
Python
|
agpl-3.0
|
ingadhoc/stock
|
python
|
## Code Before:
from openerp import models, fields, api
class StockProductionlot(models.Model):
_inherit = 'stock.production.lot'
qty_available_not_res = fields.Float(
compute='_compute_qty_available_not_res',
string='Qty Available Not Reserved',
store=True
)
@api.multi
@api.depends('quant_ids.reservation_id', 'quant_ids.qty')
def _compute_qty_available_not_res(self):
for rec in self:
rec.qty_available_not_res = sum(rec.quant_ids.filtered(
lambda x: not x.reservation_id).mapped('qty'))
@api.multi
def name_get(self):
result = []
for rec in self:
name = '%s (%s u)' % (rec.name, rec.qty_available_not_res or 0.0)
result.append((rec.id, name))
return result
## Instruction:
FIX in lot name_get to show location with the stock
## Code After:
from openerp import models, fields, api
class StockProductionlot(models.Model):
_inherit = 'stock.production.lot'
qty_available_not_res = fields.Float(
compute='_compute_qty_available_not_res',
string='Qty Available Not Reserved',
store=True
)
@api.multi
@api.depends('quant_ids.reservation_id', 'quant_ids.qty')
def _compute_qty_available_not_res(self):
for rec in self:
rec.qty_available_not_res = sum(rec.quant_ids.filtered(
lambda x: not x.reservation_id).mapped('qty'))
@api.multi
def name_get(self):
result = []
for rec in self:
read = rec.quant_ids.read_group(
[('lot_id', '=', rec.id)],
['location_id', 'qty'], 'location_id')
locations_qty = ', '.join(
['%s: %s' % (x['location_id'][1], x['qty']) for x in read])
name = '%s (%s)' % (rec.name, locations_qty)
result.append((rec.id, name))
return result
|
...
def name_get(self):
result = []
for rec in self:
read = rec.quant_ids.read_group(
[('lot_id', '=', rec.id)],
['location_id', 'qty'], 'location_id')
locations_qty = ', '.join(
['%s: %s' % (x['location_id'][1], x['qty']) for x in read])
name = '%s (%s)' % (rec.name, locations_qty)
result.append((rec.id, name))
return result
...
|
72941398fd2e78cbf5d994b4bf8683c4bdefaab9
|
utils/travis_runner.py
|
utils/travis_runner.py
|
"""This script manages all tasks for the TRAVIS build server."""
import os
import subprocess
if __name__ == "__main__":
os.chdir("promotion/grmpy_tutorial_notebook")
cmd = [
"jupyter",
"nbconvert",
"--execute",
"grmpy_tutorial_notebook.ipynb",
"--ExecutePreprocessor.timeout=-1",
]
subprocess.check_call(cmd)
os.chdir("../..")
if __name__ == "__main__":
os.chdir("promotion/grmpy_tutorial_notebook")
cmd = [
"jupyter",
"nbconvert",
"--execute",
"tutorial_semipar_notebook.ipynb",
"--ExecutePreprocessor.timeout=-1",
]
subprocess.check_call(cmd)
|
"""This script manages all tasks for the TRAVIS build server."""
import os
import subprocess
if __name__ == "__main__":
os.chdir("promotion/grmpy_tutorial_notebook")
cmd = [
"jupyter",
"nbconvert",
"--execute",
"grmpy_tutorial_notebook.ipynb",
"--ExecutePreprocessor.timeout=-1",
]
subprocess.check_call(cmd)
os.chdir("../..")
# if __name__ == "__main__":
# os.chdir("promotion/grmpy_tutorial_notebook")
# cmd = [
# "jupyter",
# "nbconvert",
# "--execute",
# "tutorial_semipar_notebook.ipynb",
# "--ExecutePreprocessor.timeout=-1",
# ]
# subprocess.check_call(cmd)
|
Comment out semipar notebook in travis runner until pip build us updated.
|
Comment out semipar notebook in travis runner until pip build us updated.
|
Python
|
mit
|
grmToolbox/grmpy
|
python
|
## Code Before:
"""This script manages all tasks for the TRAVIS build server."""
import os
import subprocess
if __name__ == "__main__":
os.chdir("promotion/grmpy_tutorial_notebook")
cmd = [
"jupyter",
"nbconvert",
"--execute",
"grmpy_tutorial_notebook.ipynb",
"--ExecutePreprocessor.timeout=-1",
]
subprocess.check_call(cmd)
os.chdir("../..")
if __name__ == "__main__":
os.chdir("promotion/grmpy_tutorial_notebook")
cmd = [
"jupyter",
"nbconvert",
"--execute",
"tutorial_semipar_notebook.ipynb",
"--ExecutePreprocessor.timeout=-1",
]
subprocess.check_call(cmd)
## Instruction:
Comment out semipar notebook in travis runner until pip build us updated.
## Code After:
"""This script manages all tasks for the TRAVIS build server."""
import os
import subprocess
if __name__ == "__main__":
os.chdir("promotion/grmpy_tutorial_notebook")
cmd = [
"jupyter",
"nbconvert",
"--execute",
"grmpy_tutorial_notebook.ipynb",
"--ExecutePreprocessor.timeout=-1",
]
subprocess.check_call(cmd)
os.chdir("../..")
# if __name__ == "__main__":
# os.chdir("promotion/grmpy_tutorial_notebook")
# cmd = [
# "jupyter",
# "nbconvert",
# "--execute",
# "tutorial_semipar_notebook.ipynb",
# "--ExecutePreprocessor.timeout=-1",
# ]
# subprocess.check_call(cmd)
|
...
os.chdir("../..")
# if __name__ == "__main__":
# os.chdir("promotion/grmpy_tutorial_notebook")
# cmd = [
# "jupyter",
# "nbconvert",
# "--execute",
# "tutorial_semipar_notebook.ipynb",
# "--ExecutePreprocessor.timeout=-1",
# ]
# subprocess.check_call(cmd)
...
|
b4000968b0a3c19bb60d57220cb31f73926149bf
|
name.abuchen.portfolio.ui/src/name/abuchen/portfolio/ui/views/dashboard/TradesWidget.java
|
name.abuchen.portfolio.ui/src/name/abuchen/portfolio/ui/views/dashboard/TradesWidget.java
|
package name.abuchen.portfolio.ui.views.dashboard;
import java.util.List;
import com.ibm.icu.text.MessageFormat;
import name.abuchen.portfolio.model.Dashboard.Widget;
import name.abuchen.portfolio.snapshot.trades.Trade;
import name.abuchen.portfolio.ui.views.trades.TradeDetailsView;
import name.abuchen.portfolio.util.TextUtil;
public class TradesWidget extends AbstractTradesWidget
{
public TradesWidget(Widget widget, DashboardData dashboardData)
{
super(widget, dashboardData);
}
@Override
public void update(TradeDetailsView.Input input)
{
this.title.setText(TextUtil.tooltip(getWidget().getLabel()));
List<Trade> trades = input.getTrades();
long positive = trades.stream().filter(t -> t.getIRR() > 0).count();
String text = MessageFormat.format("{0} <green>↑{1}</green> <red>↓{2}</red>", //$NON-NLS-1$
trades.size(), positive, trades.size() - positive);
this.indicator.setText(text);
}
}
|
package name.abuchen.portfolio.ui.views.dashboard;
import java.util.List;
import com.ibm.icu.text.MessageFormat;
import name.abuchen.portfolio.model.Dashboard.Widget;
import name.abuchen.portfolio.snapshot.trades.Trade;
import name.abuchen.portfolio.ui.views.trades.TradeDetailsView;
import name.abuchen.portfolio.util.TextUtil;
public class TradesWidget extends AbstractTradesWidget
{
public TradesWidget(Widget widget, DashboardData dashboardData)
{
super(widget, dashboardData);
}
@Override
public void update(TradeDetailsView.Input input)
{
this.title.setText(TextUtil.tooltip(getWidget().getLabel()));
List<Trade> trades = input.getTrades();
long positive = trades.stream().filter(t -> t.getProfitLoss().isPositive()).count();
String text = MessageFormat.format("{0} <green>↑{1}</green> <red>↓{2}</red>", //$NON-NLS-1$
trades.size(), positive, trades.size() - positive);
this.indicator.setText(text);
}
}
|
Fix Trades Widget to count by isPositive rather than IRR
|
Fix Trades Widget to count by isPositive rather than IRR
|
Java
|
epl-1.0
|
cmaoling/portfolio,cmaoling/portfolio,cmaoling/portfolio,cmaoling/portfolio
|
java
|
## Code Before:
package name.abuchen.portfolio.ui.views.dashboard;
import java.util.List;
import com.ibm.icu.text.MessageFormat;
import name.abuchen.portfolio.model.Dashboard.Widget;
import name.abuchen.portfolio.snapshot.trades.Trade;
import name.abuchen.portfolio.ui.views.trades.TradeDetailsView;
import name.abuchen.portfolio.util.TextUtil;
public class TradesWidget extends AbstractTradesWidget
{
public TradesWidget(Widget widget, DashboardData dashboardData)
{
super(widget, dashboardData);
}
@Override
public void update(TradeDetailsView.Input input)
{
this.title.setText(TextUtil.tooltip(getWidget().getLabel()));
List<Trade> trades = input.getTrades();
long positive = trades.stream().filter(t -> t.getIRR() > 0).count();
String text = MessageFormat.format("{0} <green>↑{1}</green> <red>↓{2}</red>", //$NON-NLS-1$
trades.size(), positive, trades.size() - positive);
this.indicator.setText(text);
}
}
## Instruction:
Fix Trades Widget to count by isPositive rather than IRR
## Code After:
package name.abuchen.portfolio.ui.views.dashboard;
import java.util.List;
import com.ibm.icu.text.MessageFormat;
import name.abuchen.portfolio.model.Dashboard.Widget;
import name.abuchen.portfolio.snapshot.trades.Trade;
import name.abuchen.portfolio.ui.views.trades.TradeDetailsView;
import name.abuchen.portfolio.util.TextUtil;
public class TradesWidget extends AbstractTradesWidget
{
public TradesWidget(Widget widget, DashboardData dashboardData)
{
super(widget, dashboardData);
}
@Override
public void update(TradeDetailsView.Input input)
{
this.title.setText(TextUtil.tooltip(getWidget().getLabel()));
List<Trade> trades = input.getTrades();
long positive = trades.stream().filter(t -> t.getProfitLoss().isPositive()).count();
String text = MessageFormat.format("{0} <green>↑{1}</green> <red>↓{2}</red>", //$NON-NLS-1$
trades.size(), positive, trades.size() - positive);
this.indicator.setText(text);
}
}
|
# ... existing code ...
this.title.setText(TextUtil.tooltip(getWidget().getLabel()));
List<Trade> trades = input.getTrades();
long positive = trades.stream().filter(t -> t.getProfitLoss().isPositive()).count();
String text = MessageFormat.format("{0} <green>↑{1}</green> <red>↓{2}</red>", //$NON-NLS-1$
trades.size(), positive, trades.size() - positive);
# ... rest of the code ...
|
42f74f304d0ac404f17d6489033b6140816cb194
|
fireplace/cards/gvg/neutral_common.py
|
fireplace/cards/gvg/neutral_common.py
|
from ..utils import *
##
# Minions
# Explosive Sheep
class GVG_076:
def deathrattle(self):
for target in self.game.board:
self.hit(target, 2)
# Clockwork Gnome
class GVG_082:
deathrattle = giveSparePart
# Micro Machine
class GVG_103:
def TURN_BEGIN(self, player):
# That card ID is not a mistake
self.buff(self, "GVG_076a")
# Pistons
class GVG_076a:
Atk = 1
|
from ..utils import *
##
# Minions
# Stonesplinter Trogg
class GVG_067:
def CARD_PLAYED(self, player, card):
if player is not self.controller and card.type == CardType.SPELL:
self.buff("GVG_067a")
class GVG_067a:
Atk = 1
# Burly Rockjaw Trogg
class GVG_068:
def CARD_PLAYED(self, player, card):
if player is not self.controller and card.type == CardType.SPELL:
self.buff("GVG_068a")
class GVG_068a:
Atk = 2
# Ship's Cannon
class GVG_075:
def OWN_MINION_SUMMONED(self, minion):
if minion.race == Race.PIRATE:
targets = self.controller.getTargets(TARGET_ENEMY_CHARACTERS)
self.hit(random.choice(targets), 2)
# Explosive Sheep
class GVG_076:
def deathrattle(self):
for target in self.game.board:
self.hit(target, 2)
# Clockwork Gnome
class GVG_082:
deathrattle = giveSparePart
# Micro Machine
class GVG_103:
def TURN_BEGIN(self, player):
# That card ID is not a mistake
self.buff(self, "GVG_076a")
# Pistons
class GVG_076a:
Atk = 1
|
Implement Stonesplinter Trogg, Burly Rockjaw Trogg, Ship's Cannon
|
Implement Stonesplinter Trogg, Burly Rockjaw Trogg, Ship's Cannon
|
Python
|
agpl-3.0
|
Ragowit/fireplace,NightKev/fireplace,jleclanche/fireplace,smallnamespace/fireplace,smallnamespace/fireplace,amw2104/fireplace,beheh/fireplace,Meerkov/fireplace,amw2104/fireplace,oftc-ftw/fireplace,butozerca/fireplace,liujimj/fireplace,oftc-ftw/fireplace,Meerkov/fireplace,butozerca/fireplace,Ragowit/fireplace,liujimj/fireplace
|
python
|
## Code Before:
from ..utils import *
##
# Minions
# Explosive Sheep
class GVG_076:
def deathrattle(self):
for target in self.game.board:
self.hit(target, 2)
# Clockwork Gnome
class GVG_082:
deathrattle = giveSparePart
# Micro Machine
class GVG_103:
def TURN_BEGIN(self, player):
# That card ID is not a mistake
self.buff(self, "GVG_076a")
# Pistons
class GVG_076a:
Atk = 1
## Instruction:
Implement Stonesplinter Trogg, Burly Rockjaw Trogg, Ship's Cannon
## Code After:
from ..utils import *
##
# Minions
# Stonesplinter Trogg
class GVG_067:
def CARD_PLAYED(self, player, card):
if player is not self.controller and card.type == CardType.SPELL:
self.buff("GVG_067a")
class GVG_067a:
Atk = 1
# Burly Rockjaw Trogg
class GVG_068:
def CARD_PLAYED(self, player, card):
if player is not self.controller and card.type == CardType.SPELL:
self.buff("GVG_068a")
class GVG_068a:
Atk = 2
# Ship's Cannon
class GVG_075:
def OWN_MINION_SUMMONED(self, minion):
if minion.race == Race.PIRATE:
targets = self.controller.getTargets(TARGET_ENEMY_CHARACTERS)
self.hit(random.choice(targets), 2)
# Explosive Sheep
class GVG_076:
def deathrattle(self):
for target in self.game.board:
self.hit(target, 2)
# Clockwork Gnome
class GVG_082:
deathrattle = giveSparePart
# Micro Machine
class GVG_103:
def TURN_BEGIN(self, player):
# That card ID is not a mistake
self.buff(self, "GVG_076a")
# Pistons
class GVG_076a:
Atk = 1
|
...
##
# Minions
# Stonesplinter Trogg
class GVG_067:
def CARD_PLAYED(self, player, card):
if player is not self.controller and card.type == CardType.SPELL:
self.buff("GVG_067a")
class GVG_067a:
Atk = 1
# Burly Rockjaw Trogg
class GVG_068:
def CARD_PLAYED(self, player, card):
if player is not self.controller and card.type == CardType.SPELL:
self.buff("GVG_068a")
class GVG_068a:
Atk = 2
# Ship's Cannon
class GVG_075:
def OWN_MINION_SUMMONED(self, minion):
if minion.race == Race.PIRATE:
targets = self.controller.getTargets(TARGET_ENEMY_CHARACTERS)
self.hit(random.choice(targets), 2)
# Explosive Sheep
class GVG_076:
...
|
b57aae70855946e5e6a223f10b29af082937e20c
|
BBBAPI/BBBAPI/Classes/BBAAPIErrors.h
|
BBBAPI/BBBAPI/Classes/BBAAPIErrors.h
|
//
// BBAAPIErrors.h
// BBAAPI
//
// Created by Owen Worley on 11/08/2014.
// Copyright (c) 2014 Blinkbox Books. All rights reserved.
//
NS_ENUM(NSInteger, BBAAPIError) {
/**
* Used when needed parameter is not supplied to the method
* or when object is supplied but it has wrong type or
* for example one of it's needed properties is not set
*/
BBAAPIWrongUsage = 700,
/**
* Error returned when for any reason API call cannot connect to the server
*/
BBAAPIErrorCouldNotConnect = 701,
/**
* Returned when call cannot be authenticated, or when server returns 401
*/
BBAAPIErrorUnauthorised = 702,
/**
* Used when server cannot find a resource and returns 404
*/
BBAAPIErrorNotFound = 703,
/**
* Used when server returns 500
*/
BBAAPIServerError = 704,
/**
* Used when server returns 403
*/
BBAAPIErrorForbidden = 705,
/**
* Used when we cannot decode or read data returned from the server
*/
BBAAPIUnreadableData = 706,
};
|
//
// BBAAPIErrors.h
// BBAAPI
//
// Created by Owen Worley on 11/08/2014.
// Copyright (c) 2014 Blinkbox Books. All rights reserved.
//
NS_ENUM(NSInteger, BBAAPIError) {
/**
* Used when needed parameter is not supplied to the method
* or when object is supplied but it has wrong type or
* for example one of it's needed properties is not set
*/
BBAAPIWrongUsage = 700,
/**
* Error returned when for any reason API call cannot connect to the server
*/
BBAAPIErrorCouldNotConnect = 701,
/**
* Returned when call cannot be authenticated, or when server returns 401
*/
BBAAPIErrorUnauthorised = 702,
/**
* Used when server cannot find a resource and returns 404
*/
BBAAPIErrorNotFound = 703,
/**
* Used when server returns 500
*/
BBAAPIServerError = 704,
/**
* Used when server returns 403
*/
BBAAPIErrorForbidden = 705,
/**
* Used when we cannot decode or read data returned from the server
*/
BBAAPIUnreadableData = 706,
/**
* Used when the server returns a 400 (Bad Request)
*/
BBAAPIBadRequest = 707,
};
|
Add Bad Request error code
|
Add Bad Request error code
|
C
|
mit
|
blinkboxbooks/blinkbox-network.objc,blinkboxbooks/blinkbox-network.objc
|
c
|
## Code Before:
//
// BBAAPIErrors.h
// BBAAPI
//
// Created by Owen Worley on 11/08/2014.
// Copyright (c) 2014 Blinkbox Books. All rights reserved.
//
NS_ENUM(NSInteger, BBAAPIError) {
/**
* Used when needed parameter is not supplied to the method
* or when object is supplied but it has wrong type or
* for example one of it's needed properties is not set
*/
BBAAPIWrongUsage = 700,
/**
* Error returned when for any reason API call cannot connect to the server
*/
BBAAPIErrorCouldNotConnect = 701,
/**
* Returned when call cannot be authenticated, or when server returns 401
*/
BBAAPIErrorUnauthorised = 702,
/**
* Used when server cannot find a resource and returns 404
*/
BBAAPIErrorNotFound = 703,
/**
* Used when server returns 500
*/
BBAAPIServerError = 704,
/**
* Used when server returns 403
*/
BBAAPIErrorForbidden = 705,
/**
* Used when we cannot decode or read data returned from the server
*/
BBAAPIUnreadableData = 706,
};
## Instruction:
Add Bad Request error code
## Code After:
//
// BBAAPIErrors.h
// BBAAPI
//
// Created by Owen Worley on 11/08/2014.
// Copyright (c) 2014 Blinkbox Books. All rights reserved.
//
NS_ENUM(NSInteger, BBAAPIError) {
/**
* Used when needed parameter is not supplied to the method
* or when object is supplied but it has wrong type or
* for example one of it's needed properties is not set
*/
BBAAPIWrongUsage = 700,
/**
* Error returned when for any reason API call cannot connect to the server
*/
BBAAPIErrorCouldNotConnect = 701,
/**
* Returned when call cannot be authenticated, or when server returns 401
*/
BBAAPIErrorUnauthorised = 702,
/**
* Used when server cannot find a resource and returns 404
*/
BBAAPIErrorNotFound = 703,
/**
* Used when server returns 500
*/
BBAAPIServerError = 704,
/**
* Used when server returns 403
*/
BBAAPIErrorForbidden = 705,
/**
* Used when we cannot decode or read data returned from the server
*/
BBAAPIUnreadableData = 706,
/**
* Used when the server returns a 400 (Bad Request)
*/
BBAAPIBadRequest = 707,
};
|
# ... existing code ...
* Used when we cannot decode or read data returned from the server
*/
BBAAPIUnreadableData = 706,
/**
* Used when the server returns a 400 (Bad Request)
*/
BBAAPIBadRequest = 707,
};
# ... rest of the code ...
|
0ba44cc7a8e906be42d98599b59e28ad142648b7
|
blogApp/forms/upload_image.py
|
blogApp/forms/upload_image.py
|
from django import forms
from crispy_forms.helper import FormHelper
from crispy_forms.bootstrap import FormActions, Div
from crispy_forms.layout import Layout, Field, HTML, Button, Submit, Reset
class UploadImageForm(forms.Form):
helper = FormHelper()
helper.form_tag = False
helper.form_class = 'form-horizontal'
helper.label_class = 'col-xs-3'
helper.field_class = 'col-xs-9'
title = forms.CharField(
label = "Alt. Title",
required = True,
max_length=50,
)
image = forms.FileField(
label = "Image",
required = True,
)
resize = forms.IntegerField(
label = "Resize to this width or height",
initial = 1000,
required = True,
)
helper.layout = Layout(
Field('title'),
Field('image'),
Field('resize'),
Div(
FormActions(
HTML('<button type="button" class="btn btn-default" data-dismiss="modal">Close</button>'),
Submit('save', 'Upload'),
),
css_class="modal-footer"
),
)
|
from django import forms
from crispy_forms.helper import FormHelper
from crispy_forms.bootstrap import FormActions, Div, AppendedText
from crispy_forms.layout import Layout, Field, HTML, Button, Submit, Reset
class UploadImageForm(forms.Form):
helper = FormHelper()
helper.form_tag = False
helper.form_class = 'form-horizontal'
helper.label_class = 'col-xs-3'
helper.field_class = 'col-xs-9'
title = forms.CharField(
label = "Alt. Title",
required = True,
max_length=50,
)
image = forms.FileField(
label = "Image",
required = True,
)
resize = forms.IntegerField(
label = "Resize to this width or height",
initial = 1000,
required = True,
)
helper.layout = Layout(
Field('title'),
Field('image'),
AppendedText('resize', 'px'),
Div(
FormActions(
HTML('<button type="button" class="btn btn-default" data-dismiss="modal">Close</button>'),
Submit('save', 'Upload'),
),
css_class="modal-footer"
),
)
|
Append 'px' to end of image upload form resize field
|
Append 'px' to end of image upload form resize field
|
Python
|
mit
|
SPARLab/BikeMaps,SPARLab/BikeMaps,SPARLab/BikeMaps
|
python
|
## Code Before:
from django import forms
from crispy_forms.helper import FormHelper
from crispy_forms.bootstrap import FormActions, Div
from crispy_forms.layout import Layout, Field, HTML, Button, Submit, Reset
class UploadImageForm(forms.Form):
helper = FormHelper()
helper.form_tag = False
helper.form_class = 'form-horizontal'
helper.label_class = 'col-xs-3'
helper.field_class = 'col-xs-9'
title = forms.CharField(
label = "Alt. Title",
required = True,
max_length=50,
)
image = forms.FileField(
label = "Image",
required = True,
)
resize = forms.IntegerField(
label = "Resize to this width or height",
initial = 1000,
required = True,
)
helper.layout = Layout(
Field('title'),
Field('image'),
Field('resize'),
Div(
FormActions(
HTML('<button type="button" class="btn btn-default" data-dismiss="modal">Close</button>'),
Submit('save', 'Upload'),
),
css_class="modal-footer"
),
)
## Instruction:
Append 'px' to end of image upload form resize field
## Code After:
from django import forms
from crispy_forms.helper import FormHelper
from crispy_forms.bootstrap import FormActions, Div, AppendedText
from crispy_forms.layout import Layout, Field, HTML, Button, Submit, Reset
class UploadImageForm(forms.Form):
helper = FormHelper()
helper.form_tag = False
helper.form_class = 'form-horizontal'
helper.label_class = 'col-xs-3'
helper.field_class = 'col-xs-9'
title = forms.CharField(
label = "Alt. Title",
required = True,
max_length=50,
)
image = forms.FileField(
label = "Image",
required = True,
)
resize = forms.IntegerField(
label = "Resize to this width or height",
initial = 1000,
required = True,
)
helper.layout = Layout(
Field('title'),
Field('image'),
AppendedText('resize', 'px'),
Div(
FormActions(
HTML('<button type="button" class="btn btn-default" data-dismiss="modal">Close</button>'),
Submit('save', 'Upload'),
),
css_class="modal-footer"
),
)
|
...
from django import forms
from crispy_forms.helper import FormHelper
from crispy_forms.bootstrap import FormActions, Div, AppendedText
from crispy_forms.layout import Layout, Field, HTML, Button, Submit, Reset
class UploadImageForm(forms.Form):
...
helper.layout = Layout(
Field('title'),
Field('image'),
AppendedText('resize', 'px'),
Div(
FormActions(
...
|
d98829d34e49b542097b113d17e6216199483986
|
rbopt.py
|
rbopt.py
|
"""RedBrick Options Module; contains RBOpt class."""
#-----------------------------------------------------------------------------#
# DATA #
#-----------------------------------------------------------------------------#
__version__ = '$Revision$'
__author__ = 'Cillian Sharkey'
#-----------------------------------------------------------------------------#
# CLASSES #
#-----------------------------------------------------------------------------#
class RBOpt:
"""Class for storing options to be shared by modules"""
def __init__(self):
"""Create new RBOpt object."""
# Used by all modules.
self.override = None
# Used by useradm, RBUserDB & RBAccount.
self.test = None
# Used by useradm & rrs.
self.mode = None
self.setpasswd = None
# Used by useradm.
self.args = []
self.help = None
self.username = None
self.dbonly = None
self.aconly = None
self.updatedby = None
self.newbie = None
self.mailuser = None
self.usertype = None
self.name = None
self.email = None
self.id = None
self.course = None
self.year = None
self.years_paid = None
self.birthday = None
self.quiet = None
# Used by rrs.
self.action = None
|
"""RedBrick Options Module; contains RBOpt class."""
#-----------------------------------------------------------------------------#
# DATA #
#-----------------------------------------------------------------------------#
__version__ = '$Revision: 1.2 $'
__author__ = 'Cillian Sharkey'
#-----------------------------------------------------------------------------#
# CLASSES #
#-----------------------------------------------------------------------------#
class RBOpt:
"""Class for storing options to be shared by modules"""
def __init__(self):
"""Create new RBOpt object."""
# Used by all modules.
self.override = None
# Used by useradm, RBUserDB & RBAccount.
self.test = None
# Used by useradm & rrs.
self.mode = None
self.setpasswd = None
# Used by useradm.
self.args = []
self.help = None
self.uid = None
self.dbonly = None
self.aconly = None
self.updatedby = None
self.newbie = None
self.mailuser = None
self.usertype = None
self.cn = None
self.altmail = None
self.id = None
self.course = None
self.year = None
self.yearsPaid = None
self.birthday = None
self.quiet = None
# Used by rrs.
self.action = None
|
Change to new attribute names.
|
Change to new attribute names.
|
Python
|
unlicense
|
gruunday/useradm,gruunday/useradm,gruunday/useradm
|
python
|
## Code Before:
"""RedBrick Options Module; contains RBOpt class."""
#-----------------------------------------------------------------------------#
# DATA #
#-----------------------------------------------------------------------------#
__version__ = '$Revision$'
__author__ = 'Cillian Sharkey'
#-----------------------------------------------------------------------------#
# CLASSES #
#-----------------------------------------------------------------------------#
class RBOpt:
"""Class for storing options to be shared by modules"""
def __init__(self):
"""Create new RBOpt object."""
# Used by all modules.
self.override = None
# Used by useradm, RBUserDB & RBAccount.
self.test = None
# Used by useradm & rrs.
self.mode = None
self.setpasswd = None
# Used by useradm.
self.args = []
self.help = None
self.username = None
self.dbonly = None
self.aconly = None
self.updatedby = None
self.newbie = None
self.mailuser = None
self.usertype = None
self.name = None
self.email = None
self.id = None
self.course = None
self.year = None
self.years_paid = None
self.birthday = None
self.quiet = None
# Used by rrs.
self.action = None
## Instruction:
Change to new attribute names.
## Code After:
"""RedBrick Options Module; contains RBOpt class."""
#-----------------------------------------------------------------------------#
# DATA #
#-----------------------------------------------------------------------------#
__version__ = '$Revision: 1.2 $'
__author__ = 'Cillian Sharkey'
#-----------------------------------------------------------------------------#
# CLASSES #
#-----------------------------------------------------------------------------#
class RBOpt:
"""Class for storing options to be shared by modules"""
def __init__(self):
"""Create new RBOpt object."""
# Used by all modules.
self.override = None
# Used by useradm, RBUserDB & RBAccount.
self.test = None
# Used by useradm & rrs.
self.mode = None
self.setpasswd = None
# Used by useradm.
self.args = []
self.help = None
self.uid = None
self.dbonly = None
self.aconly = None
self.updatedby = None
self.newbie = None
self.mailuser = None
self.usertype = None
self.cn = None
self.altmail = None
self.id = None
self.course = None
self.year = None
self.yearsPaid = None
self.birthday = None
self.quiet = None
# Used by rrs.
self.action = None
|
# ... existing code ...
# DATA #
#-----------------------------------------------------------------------------#
__version__ = '$Revision: 1.2 $'
__author__ = 'Cillian Sharkey'
#-----------------------------------------------------------------------------#
# ... modified code ...
# Used by useradm.
self.args = []
self.help = None
self.uid = None
self.dbonly = None
self.aconly = None
self.updatedby = None
...
self.newbie = None
self.mailuser = None
self.usertype = None
self.cn = None
self.altmail = None
self.id = None
self.course = None
self.year = None
self.yearsPaid = None
self.birthday = None
self.quiet = None
# Used by rrs.
# ... rest of the code ...
|
e255b92589000c2d485d35f9008b78e0313b4374
|
pystache/template_spec.py
|
pystache/template_spec.py
|
# TODO: finish the class docstring.
class TemplateSpec(object):
"""
A mixin or interface for specifying custom template information.
The "spec" in TemplateSpec can be taken to mean that the template
information is either "specified" or "special."
A view should subclass this class only if customized template loading
is needed. The following attributes allow one to customize/override
template information on a per view basis. A None value means to use
default behavior for that value and perform no customization. All
attributes are initialized to None.
Attributes:
template: the template as a string.
template_rel_path: the path to the template file, relative to the
directory containing the module defining the class.
template_rel_directory: the directory containing the template file, relative
to the directory containing the module defining the class.
template_extension: the template file extension. Defaults to "mustache".
Pass False for no extension (i.e. extensionless template files).
"""
template = None
template_rel_path = None
template_rel_directory = None
template_name = None
template_extension = None
template_encoding = None
|
class TemplateSpec(object):
"""
A mixin or interface for specifying custom template information.
The "spec" in TemplateSpec can be taken to mean that the template
information is either "specified" or "special."
A view should subclass this class only if customized template loading
is needed. The following attributes allow one to customize/override
template information on a per view basis. A None value means to use
default behavior for that value and perform no customization. All
attributes are initialized to None.
Attributes:
template: the template as a string.
template_encoding: the encoding used by the template.
template_extension: the template file extension. Defaults to "mustache".
Pass False for no extension (i.e. extensionless template files).
template_name: the name of the template.
template_rel_directory: the directory containing the template file,
relative to the directory containing the module defining the class.
template_rel_path: the path to the template file, relative to the
directory containing the module defining the class.
"""
template = None
template_encoding = None
template_extension = None
template_name = None
template_rel_directory = None
template_rel_path = None
|
Reorder TemplateSpec attributes and add to docstring.
|
Reorder TemplateSpec attributes and add to docstring.
|
Python
|
mit
|
nitish116/pystache,rismalrv/pystache,charbeljc/pystache,rismalrv/pystache,harsh00008/pystache,arlenesr28/pystache,defunkt/pystache,beni55/pystache,nitish116/pystache,nitish116/pystache,rismalrv/pystache,jrnold/pystache,jrnold/pystache,harsh00008/pystache,harsh00008/pystache,charbeljc/pystache,arlenesr28/pystache,beni55/pystache,arlenesr28/pystache
|
python
|
## Code Before:
# TODO: finish the class docstring.
class TemplateSpec(object):
"""
A mixin or interface for specifying custom template information.
The "spec" in TemplateSpec can be taken to mean that the template
information is either "specified" or "special."
A view should subclass this class only if customized template loading
is needed. The following attributes allow one to customize/override
template information on a per view basis. A None value means to use
default behavior for that value and perform no customization. All
attributes are initialized to None.
Attributes:
template: the template as a string.
template_rel_path: the path to the template file, relative to the
directory containing the module defining the class.
template_rel_directory: the directory containing the template file, relative
to the directory containing the module defining the class.
template_extension: the template file extension. Defaults to "mustache".
Pass False for no extension (i.e. extensionless template files).
"""
template = None
template_rel_path = None
template_rel_directory = None
template_name = None
template_extension = None
template_encoding = None
## Instruction:
Reorder TemplateSpec attributes and add to docstring.
## Code After:
class TemplateSpec(object):
"""
A mixin or interface for specifying custom template information.
The "spec" in TemplateSpec can be taken to mean that the template
information is either "specified" or "special."
A view should subclass this class only if customized template loading
is needed. The following attributes allow one to customize/override
template information on a per view basis. A None value means to use
default behavior for that value and perform no customization. All
attributes are initialized to None.
Attributes:
template: the template as a string.
template_encoding: the encoding used by the template.
template_extension: the template file extension. Defaults to "mustache".
Pass False for no extension (i.e. extensionless template files).
template_name: the name of the template.
template_rel_directory: the directory containing the template file,
relative to the directory containing the module defining the class.
template_rel_path: the path to the template file, relative to the
directory containing the module defining the class.
"""
template = None
template_encoding = None
template_extension = None
template_name = None
template_rel_directory = None
template_rel_path = None
|
# ... existing code ...
class TemplateSpec(object):
"""
# ... modified code ...
template: the template as a string.
template_encoding: the encoding used by the template.
template_extension: the template file extension. Defaults to "mustache".
Pass False for no extension (i.e. extensionless template files).
template_name: the name of the template.
template_rel_directory: the directory containing the template file,
relative to the directory containing the module defining the class.
template_rel_path: the path to the template file, relative to the
directory containing the module defining the class.
"""
template = None
template_encoding = None
template_extension = None
template_name = None
template_rel_directory = None
template_rel_path = None
# ... rest of the code ...
|
c0be5d3e98bfaa910c3d9de04d1f83ca79d8b8be
|
app/src/main/java/jp/takke/datastats/BootReceiver.java
|
app/src/main/java/jp/takke/datastats/BootReceiver.java
|
package jp.takke.datastats;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.preference.PreferenceManager;
import jp.takke.util.MyLog;
public class BootReceiver extends BroadcastReceiver {
@Override
public void onReceive(Context context, Intent intent) {
MyLog.i("BootReceiver.onReceive");
if (Intent.ACTION_BOOT_COMPLETED.equals(intent.getAction())) {
// 端末起動時の処理
// 自動起動の確認
final SharedPreferences pref = PreferenceManager.getDefaultSharedPreferences(context);
final boolean startOnBoot = pref.getBoolean(C.PREF_KEY_START_ON_BOOT, false);
MyLog.i("start on boot[" + (startOnBoot ? "YES" : "NO") + "]");
if (startOnBoot) {
// サービス起動
final Intent serviceIntent = new Intent(context, LayerService.class);
context.startService(serviceIntent);
}
}
}
}
|
package jp.takke.datastats;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.os.Build;
import android.preference.PreferenceManager;
import jp.takke.util.MyLog;
public class BootReceiver extends BroadcastReceiver {
@Override
public void onReceive(Context context, Intent intent) {
MyLog.i("BootReceiver.onReceive");
if (Intent.ACTION_BOOT_COMPLETED.equals(intent.getAction())) {
// 端末起動時の処理
// 自動起動の確認
final SharedPreferences pref = PreferenceManager.getDefaultSharedPreferences(context);
final boolean startOnBoot = pref.getBoolean(C.PREF_KEY_START_ON_BOOT, false);
MyLog.i("start on boot[" + (startOnBoot ? "YES" : "NO") + "]");
if (startOnBoot) {
// サービス起動
final Intent serviceIntent = new Intent(context, LayerService.class);
if (Build.VERSION.SDK_INT >= 26) {
context.startForegroundService(serviceIntent);
} else {
context.startService(serviceIntent);
}
}
}
}
}
|
Fix crash on boot (Android 8.0 or later)
|
Fix crash on boot (Android 8.0 or later)
|
Java
|
apache-2.0
|
takke/DataStats,takke/DataStats
|
java
|
## Code Before:
package jp.takke.datastats;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.preference.PreferenceManager;
import jp.takke.util.MyLog;
public class BootReceiver extends BroadcastReceiver {
@Override
public void onReceive(Context context, Intent intent) {
MyLog.i("BootReceiver.onReceive");
if (Intent.ACTION_BOOT_COMPLETED.equals(intent.getAction())) {
// 端末起動時の処理
// 自動起動の確認
final SharedPreferences pref = PreferenceManager.getDefaultSharedPreferences(context);
final boolean startOnBoot = pref.getBoolean(C.PREF_KEY_START_ON_BOOT, false);
MyLog.i("start on boot[" + (startOnBoot ? "YES" : "NO") + "]");
if (startOnBoot) {
// サービス起動
final Intent serviceIntent = new Intent(context, LayerService.class);
context.startService(serviceIntent);
}
}
}
}
## Instruction:
Fix crash on boot (Android 8.0 or later)
## Code After:
package jp.takke.datastats;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.os.Build;
import android.preference.PreferenceManager;
import jp.takke.util.MyLog;
public class BootReceiver extends BroadcastReceiver {
@Override
public void onReceive(Context context, Intent intent) {
MyLog.i("BootReceiver.onReceive");
if (Intent.ACTION_BOOT_COMPLETED.equals(intent.getAction())) {
// 端末起動時の処理
// 自動起動の確認
final SharedPreferences pref = PreferenceManager.getDefaultSharedPreferences(context);
final boolean startOnBoot = pref.getBoolean(C.PREF_KEY_START_ON_BOOT, false);
MyLog.i("start on boot[" + (startOnBoot ? "YES" : "NO") + "]");
if (startOnBoot) {
// サービス起動
final Intent serviceIntent = new Intent(context, LayerService.class);
if (Build.VERSION.SDK_INT >= 26) {
context.startForegroundService(serviceIntent);
} else {
context.startService(serviceIntent);
}
}
}
}
}
|
# ... existing code ...
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.os.Build;
import android.preference.PreferenceManager;
import jp.takke.util.MyLog;
# ... modified code ...
if (startOnBoot) {
// サービス起動
final Intent serviceIntent = new Intent(context, LayerService.class);
if (Build.VERSION.SDK_INT >= 26) {
context.startForegroundService(serviceIntent);
} else {
context.startService(serviceIntent);
}
}
}
# ... rest of the code ...
|
c362a24479fe0345c2ae2524c0cab8d215a2a885
|
src/edu/washington/escience/myria/operator/NAryOperator.java
|
src/edu/washington/escience/myria/operator/NAryOperator.java
|
package edu.washington.escience.myria.operator;
import java.util.Objects;
import com.google.common.base.Preconditions;
import edu.washington.escience.myria.Schema;
public abstract class NAryOperator extends Operator {
/**
* The children of the n-ary operator.
* */
protected Operator[] children;
public NAryOperator() {
super();
}
@Override
public Operator[] getChildren() {
return children;
}
@Override
public void setChildren(final Operator[] children) {
Objects.requireNonNull(children);
Preconditions.checkArgument(children.length > 0);
this.children = children;
}
@Override
public Schema getSchema() {
return children[0].getSchema();
}
/**
* @return number of children
*/
protected int numChildren() {
return children.length;
}
}
|
package edu.washington.escience.myria.operator;
import java.util.Objects;
import com.google.common.base.Preconditions;
import edu.washington.escience.myria.Schema;
/**
*
* @author dominik
*
*/
public abstract class NAryOperator extends Operator {
/** Required for Java serialization. */
private static final long serialVersionUID = 1L;
/**
* The children of the n-ary operator.
* */
protected Operator[] children;
/**
* Default, empty constructor.
*/
public NAryOperator() {
super();
}
@Override
public Operator[] getChildren() {
return children;
}
@Override
public void setChildren(final Operator[] children) {
Objects.requireNonNull(children);
Preconditions.checkArgument(children.length > 0);
this.children = children;
}
@Override
public Schema getSchema() {
return children[0].getSchema();
}
/**
* @return number of children
*/
protected int numChildren() {
return children.length;
}
}
|
Write comments in n-ary operator
|
Write comments in n-ary operator
|
Java
|
bsd-3-clause
|
bsalimi/myria,bsalimi/myria,jamesmarva/myria,uwescience/myria,bsalimi/myria,uwescience/myria,jamesmarva/myria,jamesmarva/myria,uwescience/myria
|
java
|
## Code Before:
package edu.washington.escience.myria.operator;
import java.util.Objects;
import com.google.common.base.Preconditions;
import edu.washington.escience.myria.Schema;
public abstract class NAryOperator extends Operator {
/**
* The children of the n-ary operator.
* */
protected Operator[] children;
public NAryOperator() {
super();
}
@Override
public Operator[] getChildren() {
return children;
}
@Override
public void setChildren(final Operator[] children) {
Objects.requireNonNull(children);
Preconditions.checkArgument(children.length > 0);
this.children = children;
}
@Override
public Schema getSchema() {
return children[0].getSchema();
}
/**
* @return number of children
*/
protected int numChildren() {
return children.length;
}
}
## Instruction:
Write comments in n-ary operator
## Code After:
package edu.washington.escience.myria.operator;
import java.util.Objects;
import com.google.common.base.Preconditions;
import edu.washington.escience.myria.Schema;
/**
*
* @author dominik
*
*/
public abstract class NAryOperator extends Operator {
/** Required for Java serialization. */
private static final long serialVersionUID = 1L;
/**
* The children of the n-ary operator.
* */
protected Operator[] children;
/**
* Default, empty constructor.
*/
public NAryOperator() {
super();
}
@Override
public Operator[] getChildren() {
return children;
}
@Override
public void setChildren(final Operator[] children) {
Objects.requireNonNull(children);
Preconditions.checkArgument(children.length > 0);
this.children = children;
}
@Override
public Schema getSchema() {
return children[0].getSchema();
}
/**
* @return number of children
*/
protected int numChildren() {
return children.length;
}
}
|
// ... existing code ...
import edu.washington.escience.myria.Schema;
/**
*
* @author dominik
*
*/
public abstract class NAryOperator extends Operator {
/** Required for Java serialization. */
private static final long serialVersionUID = 1L;
/**
* The children of the n-ary operator.
// ... modified code ...
* */
protected Operator[] children;
/**
* Default, empty constructor.
*/
public NAryOperator() {
super();
}
// ... rest of the code ...
|
7a5c3b6b5d3b51bc1ba4c2fd20dee640415761a5
|
src/main/java/io/github/likcoras/asuka/IgnoreManager.java
|
src/main/java/io/github/likcoras/asuka/IgnoreManager.java
|
package io.github.likcoras.asuka;
import java.io.BufferedWriter;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
import org.pircbotx.User;
import lombok.Cleanup;
public class IgnoreManager {
private Set<String> ignored;
private Path ignoreFile;
public IgnoreManager() {
ignored = Collections.synchronizedSet(new HashSet<String>());
ignoreFile = Paths.get("ignore.txt");
}
public void addIgnored(User user) {
addIgnored(BotUtil.getId(user));
}
public void addIgnored(String user) {
ignored.add(user);
}
public void removeIgnored(User user) {
removeIgnored(BotUtil.getId(user));
}
public void removeIgnored(String user) {
ignored.remove(user);
}
public boolean isIgnored(User user) {
return ignored.contains(BotUtil.getId(user));
}
public void writeFile() throws IOException {
@Cleanup
BufferedWriter write = Files.newBufferedWriter(ignoreFile);
synchronized(ignored) {
for (String user : ignored)
write.write(user + "\n");
}
write.flush();
}
}
|
package io.github.likcoras.asuka;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
import org.pircbotx.User;
import lombok.Cleanup;
public class IgnoreManager {
private Set<String> ignored;
public IgnoreManager() {
ignored = Collections.synchronizedSet(new HashSet<String>());
}
public void addIgnored(User user) {
addIgnored(BotUtil.getId(user));
}
public void addIgnored(String user) {
ignored.add(user);
}
public void removeIgnored(User user) {
removeIgnored(BotUtil.getId(user));
}
public void removeIgnored(String user) {
ignored.remove(user);
}
public boolean isIgnored(User user) {
return ignored.contains(BotUtil.getId(user));
}
public void readFile(Path ignoreFile) throws IOException {
@Cleanup
BufferedReader read = Files.newBufferedReader(ignoreFile);
synchronized (ignored) {
String line;
while ((line = read.readLine()) != null)
ignored.add(line);
}
}
public void writeFile(Path ignoreFile) throws IOException {
@Cleanup
BufferedWriter write = Files.newBufferedWriter(ignoreFile);
synchronized (ignored) {
for (String user : ignored)
write.write(user + "\n");
}
write.flush();
}
}
|
Allow reading and writing from a specified Path
|
Allow reading and writing from a specified Path
|
Java
|
mit
|
likcoras/SSBot,likcoras/Asuka
|
java
|
## Code Before:
package io.github.likcoras.asuka;
import java.io.BufferedWriter;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
import org.pircbotx.User;
import lombok.Cleanup;
public class IgnoreManager {
private Set<String> ignored;
private Path ignoreFile;
public IgnoreManager() {
ignored = Collections.synchronizedSet(new HashSet<String>());
ignoreFile = Paths.get("ignore.txt");
}
public void addIgnored(User user) {
addIgnored(BotUtil.getId(user));
}
public void addIgnored(String user) {
ignored.add(user);
}
public void removeIgnored(User user) {
removeIgnored(BotUtil.getId(user));
}
public void removeIgnored(String user) {
ignored.remove(user);
}
public boolean isIgnored(User user) {
return ignored.contains(BotUtil.getId(user));
}
public void writeFile() throws IOException {
@Cleanup
BufferedWriter write = Files.newBufferedWriter(ignoreFile);
synchronized(ignored) {
for (String user : ignored)
write.write(user + "\n");
}
write.flush();
}
}
## Instruction:
Allow reading and writing from a specified Path
## Code After:
package io.github.likcoras.asuka;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
import org.pircbotx.User;
import lombok.Cleanup;
public class IgnoreManager {
private Set<String> ignored;
public IgnoreManager() {
ignored = Collections.synchronizedSet(new HashSet<String>());
}
public void addIgnored(User user) {
addIgnored(BotUtil.getId(user));
}
public void addIgnored(String user) {
ignored.add(user);
}
public void removeIgnored(User user) {
removeIgnored(BotUtil.getId(user));
}
public void removeIgnored(String user) {
ignored.remove(user);
}
public boolean isIgnored(User user) {
return ignored.contains(BotUtil.getId(user));
}
public void readFile(Path ignoreFile) throws IOException {
@Cleanup
BufferedReader read = Files.newBufferedReader(ignoreFile);
synchronized (ignored) {
String line;
while ((line = read.readLine()) != null)
ignored.add(line);
}
}
public void writeFile(Path ignoreFile) throws IOException {
@Cleanup
BufferedWriter write = Files.newBufferedWriter(ignoreFile);
synchronized (ignored) {
for (String user : ignored)
write.write(user + "\n");
}
write.flush();
}
}
|
...
package io.github.likcoras.asuka;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
...
public class IgnoreManager {
private Set<String> ignored;
public IgnoreManager() {
ignored = Collections.synchronizedSet(new HashSet<String>());
}
public void addIgnored(User user) {
...
return ignored.contains(BotUtil.getId(user));
}
public void readFile(Path ignoreFile) throws IOException {
@Cleanup
BufferedReader read = Files.newBufferedReader(ignoreFile);
synchronized (ignored) {
String line;
while ((line = read.readLine()) != null)
ignored.add(line);
}
}
public void writeFile(Path ignoreFile) throws IOException {
@Cleanup
BufferedWriter write = Files.newBufferedWriter(ignoreFile);
synchronized (ignored) {
for (String user : ignored)
write.write(user + "\n");
}
...
|
cf62b474bcdab74ee67cda54af53a29a9625d836
|
services/slip/src/main/java/com/ippon/jug/slip/SlipController.java
|
services/slip/src/main/java/com/ippon/jug/slip/SlipController.java
|
package com.ippon.jug.slip;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.io.IOException;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.nio.file.Files;
import java.nio.file.Paths;
@RestController
public class SlipController {
@RequestMapping(value = "/request")
public String getRequest() throws InterruptedException, UnknownHostException {
this.doSlip();
StringBuilder builder = new StringBuilder()
.append("Hello I'm ")
.append(InetAddress.getLocalHost().getHostName())
.append(". My little secret is ... ");
try {
Files.readAllLines(Paths.get("/run/secrets/bdx"))
.forEach(builder::append);
} catch (IOException e) {
// No secret here
builder.append(" UNKNOWN !");
}
return builder.toString();
}
private static final int SLEEP_MILLIS = 50;
private synchronized void doSlip() throws InterruptedException {
Thread.sleep(SLEEP_MILLIS);
}
@RequestMapping(value = "/dockerHealth")
public ResponseEntity<Void> getHealth() {
return new ResponseEntity<Void>(HttpStatus.INTERNAL_SERVER_ERROR);
}
}
|
package com.ippon.jug.slip;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.io.IOException;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.nio.file.Files;
import java.nio.file.Paths;
@RestController
public class SlipController {
@RequestMapping(value = "/request")
public String getRequest() throws InterruptedException, UnknownHostException {
this.doSlip();
StringBuilder builder = new StringBuilder()
.append("Hello I'm ")
.append(InetAddress.getLocalHost().getHostName())
.append(". My little secret is ... ");
try {
Files.readAllLines(Paths.get("/run/secrets/bdx"))
.forEach(builder::append);
} catch (IOException e) {
// No secret here
builder.append(" UNKNOWN !");
}
return builder.toString();
}
private static final int SLEEP_MILLIS = 50;
private synchronized void doSlip() throws InterruptedException {
Thread.sleep(SLEEP_MILLIS);
}
@RequestMapping(value = "/dockerHealth")
public ResponseEntity<Void> getHealth() {
return new ResponseEntity<Void>(HttpStatus.OK);
}
}
|
Set to OK the answer of docker health endpoint
|
Set to OK the answer of docker health endpoint
|
Java
|
apache-2.0
|
ImFlog/docker-discovery,ImFlog/docker-discovery
|
java
|
## Code Before:
package com.ippon.jug.slip;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.io.IOException;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.nio.file.Files;
import java.nio.file.Paths;
@RestController
public class SlipController {
@RequestMapping(value = "/request")
public String getRequest() throws InterruptedException, UnknownHostException {
this.doSlip();
StringBuilder builder = new StringBuilder()
.append("Hello I'm ")
.append(InetAddress.getLocalHost().getHostName())
.append(". My little secret is ... ");
try {
Files.readAllLines(Paths.get("/run/secrets/bdx"))
.forEach(builder::append);
} catch (IOException e) {
// No secret here
builder.append(" UNKNOWN !");
}
return builder.toString();
}
private static final int SLEEP_MILLIS = 50;
private synchronized void doSlip() throws InterruptedException {
Thread.sleep(SLEEP_MILLIS);
}
@RequestMapping(value = "/dockerHealth")
public ResponseEntity<Void> getHealth() {
return new ResponseEntity<Void>(HttpStatus.INTERNAL_SERVER_ERROR);
}
}
## Instruction:
Set to OK the answer of docker health endpoint
## Code After:
package com.ippon.jug.slip;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.io.IOException;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.nio.file.Files;
import java.nio.file.Paths;
@RestController
public class SlipController {
@RequestMapping(value = "/request")
public String getRequest() throws InterruptedException, UnknownHostException {
this.doSlip();
StringBuilder builder = new StringBuilder()
.append("Hello I'm ")
.append(InetAddress.getLocalHost().getHostName())
.append(". My little secret is ... ");
try {
Files.readAllLines(Paths.get("/run/secrets/bdx"))
.forEach(builder::append);
} catch (IOException e) {
// No secret here
builder.append(" UNKNOWN !");
}
return builder.toString();
}
private static final int SLEEP_MILLIS = 50;
private synchronized void doSlip() throws InterruptedException {
Thread.sleep(SLEEP_MILLIS);
}
@RequestMapping(value = "/dockerHealth")
public ResponseEntity<Void> getHealth() {
return new ResponseEntity<Void>(HttpStatus.OK);
}
}
|
# ... existing code ...
@RequestMapping(value = "/dockerHealth")
public ResponseEntity<Void> getHealth() {
return new ResponseEntity<Void>(HttpStatus.OK);
}
}
# ... rest of the code ...
|
46ebeba28f8fbb9d43457aa3fa539b29048a581b
|
netbox/users/api/views.py
|
netbox/users/api/views.py
|
from django.contrib.auth.models import Group, User
from django.db.models import Count
from users import filters
from users.models import ObjectPermission
from utilities.api import ModelViewSet
from utilities.querysets import RestrictedQuerySet
from . import serializers
#
# Users and groups
#
class UserViewSet(ModelViewSet):
queryset = RestrictedQuerySet(model=User).prefetch_related('groups')
serializer_class = serializers.UserSerializer
filterset_class = filters.UserFilterSet
class GroupViewSet(ModelViewSet):
queryset = RestrictedQuerySet(model=Group).annotate(user_count=Count('user'))
serializer_class = serializers.GroupSerializer
filterset_class = filters.GroupFilterSet
#
# ObjectPermissions
#
class ObjectPermissionViewSet(ModelViewSet):
queryset = ObjectPermission.objects.prefetch_related('object_types', 'groups', 'users')
serializer_class = serializers.ObjectPermissionSerializer
filterset_class = filters.ObjectPermissionFilterSet
|
from django.contrib.auth.models import Group, User
from django.db.models import Count
from users import filters
from users.models import ObjectPermission
from utilities.api import ModelViewSet
from utilities.querysets import RestrictedQuerySet
from . import serializers
#
# Users and groups
#
class UserViewSet(ModelViewSet):
queryset = RestrictedQuerySet(model=User).prefetch_related('groups').order_by('username')
serializer_class = serializers.UserSerializer
filterset_class = filters.UserFilterSet
class GroupViewSet(ModelViewSet):
queryset = RestrictedQuerySet(model=Group).annotate(user_count=Count('user')).order_by('name')
serializer_class = serializers.GroupSerializer
filterset_class = filters.GroupFilterSet
#
# ObjectPermissions
#
class ObjectPermissionViewSet(ModelViewSet):
queryset = ObjectPermission.objects.prefetch_related('object_types', 'groups', 'users')
serializer_class = serializers.ObjectPermissionSerializer
filterset_class = filters.ObjectPermissionFilterSet
|
Set default ordering for user and group API endpoints
|
Set default ordering for user and group API endpoints
|
Python
|
apache-2.0
|
digitalocean/netbox,digitalocean/netbox,digitalocean/netbox,digitalocean/netbox
|
python
|
## Code Before:
from django.contrib.auth.models import Group, User
from django.db.models import Count
from users import filters
from users.models import ObjectPermission
from utilities.api import ModelViewSet
from utilities.querysets import RestrictedQuerySet
from . import serializers
#
# Users and groups
#
class UserViewSet(ModelViewSet):
queryset = RestrictedQuerySet(model=User).prefetch_related('groups')
serializer_class = serializers.UserSerializer
filterset_class = filters.UserFilterSet
class GroupViewSet(ModelViewSet):
queryset = RestrictedQuerySet(model=Group).annotate(user_count=Count('user'))
serializer_class = serializers.GroupSerializer
filterset_class = filters.GroupFilterSet
#
# ObjectPermissions
#
class ObjectPermissionViewSet(ModelViewSet):
queryset = ObjectPermission.objects.prefetch_related('object_types', 'groups', 'users')
serializer_class = serializers.ObjectPermissionSerializer
filterset_class = filters.ObjectPermissionFilterSet
## Instruction:
Set default ordering for user and group API endpoints
## Code After:
from django.contrib.auth.models import Group, User
from django.db.models import Count
from users import filters
from users.models import ObjectPermission
from utilities.api import ModelViewSet
from utilities.querysets import RestrictedQuerySet
from . import serializers
#
# Users and groups
#
class UserViewSet(ModelViewSet):
queryset = RestrictedQuerySet(model=User).prefetch_related('groups').order_by('username')
serializer_class = serializers.UserSerializer
filterset_class = filters.UserFilterSet
class GroupViewSet(ModelViewSet):
queryset = RestrictedQuerySet(model=Group).annotate(user_count=Count('user')).order_by('name')
serializer_class = serializers.GroupSerializer
filterset_class = filters.GroupFilterSet
#
# ObjectPermissions
#
class ObjectPermissionViewSet(ModelViewSet):
queryset = ObjectPermission.objects.prefetch_related('object_types', 'groups', 'users')
serializer_class = serializers.ObjectPermissionSerializer
filterset_class = filters.ObjectPermissionFilterSet
|
...
#
class UserViewSet(ModelViewSet):
queryset = RestrictedQuerySet(model=User).prefetch_related('groups').order_by('username')
serializer_class = serializers.UserSerializer
filterset_class = filters.UserFilterSet
class GroupViewSet(ModelViewSet):
queryset = RestrictedQuerySet(model=Group).annotate(user_count=Count('user')).order_by('name')
serializer_class = serializers.GroupSerializer
filterset_class = filters.GroupFilterSet
...
|
370c49eba30253f259454884441e9921b51719ab
|
dudebot/ai.py
|
dudebot/ai.py
|
class BotAI(object):
def set_nickname(self, nickname):
self.nickname = nickname
def initialise(self, init_params_as_dict):
pass
def respond(self, sender_nickname, message):
pass
class Echo(BotAI):
def respond(self, sender_nickname, message):
return True, message
|
class BotAI(object):
def set_nickname(self, nickname):
self.nickname = nickname
def initialise(self, init_params_as_dict):
pass
def respond(self, sender_nickname, message):
return False, ''
class message_must_begin_with_prefix(object):
"""A simple decorator so that a bot AI can ignore all messages that don't
begin with the given prefix.
That way you can have your dude bot only respond to messages that, for
example, begin with 'dude '.
"""
def __init__(self, desired_prefix):
self.desired_prefix = desired_prefix
def __call__(self, func):
def wrapped_func(botai, sender_nickname, message, *args, **kwargs):
if message.startswith(self.desired_prefix):
return func(botai, sender_nickname, message, *args, **kwargs)
else:
return False, ''
return wrapped_func
def message_must_begin_with_nickname(func):
"""A simple decorator so that a bot AI can ignore all messages that don't
begin with the bot AI's nickname.
"""
def wrapped_func(botai, sender_nickname, message, *args, **kwargs):
if message.startswith(botai.nickname):
return func(botai, sender_nickname, message, *args, **kwargs)
else:
return False, ''
return wrapped_func
class Echo(BotAI):
def respond(self, sender_nickname, message):
return True, message
|
Add some decorators to make life easier.
|
Add some decorators to make life easier.
|
Python
|
bsd-2-clause
|
sujaymansingh/dudebot
|
python
|
## Code Before:
class BotAI(object):
def set_nickname(self, nickname):
self.nickname = nickname
def initialise(self, init_params_as_dict):
pass
def respond(self, sender_nickname, message):
pass
class Echo(BotAI):
def respond(self, sender_nickname, message):
return True, message
## Instruction:
Add some decorators to make life easier.
## Code After:
class BotAI(object):
def set_nickname(self, nickname):
self.nickname = nickname
def initialise(self, init_params_as_dict):
pass
def respond(self, sender_nickname, message):
return False, ''
class message_must_begin_with_prefix(object):
"""A simple decorator so that a bot AI can ignore all messages that don't
begin with the given prefix.
That way you can have your dude bot only respond to messages that, for
example, begin with 'dude '.
"""
def __init__(self, desired_prefix):
self.desired_prefix = desired_prefix
def __call__(self, func):
def wrapped_func(botai, sender_nickname, message, *args, **kwargs):
if message.startswith(self.desired_prefix):
return func(botai, sender_nickname, message, *args, **kwargs)
else:
return False, ''
return wrapped_func
def message_must_begin_with_nickname(func):
"""A simple decorator so that a bot AI can ignore all messages that don't
begin with the bot AI's nickname.
"""
def wrapped_func(botai, sender_nickname, message, *args, **kwargs):
if message.startswith(botai.nickname):
return func(botai, sender_nickname, message, *args, **kwargs)
else:
return False, ''
return wrapped_func
class Echo(BotAI):
def respond(self, sender_nickname, message):
return True, message
|
// ... existing code ...
pass
def respond(self, sender_nickname, message):
return False, ''
class message_must_begin_with_prefix(object):
"""A simple decorator so that a bot AI can ignore all messages that don't
begin with the given prefix.
That way you can have your dude bot only respond to messages that, for
example, begin with 'dude '.
"""
def __init__(self, desired_prefix):
self.desired_prefix = desired_prefix
def __call__(self, func):
def wrapped_func(botai, sender_nickname, message, *args, **kwargs):
if message.startswith(self.desired_prefix):
return func(botai, sender_nickname, message, *args, **kwargs)
else:
return False, ''
return wrapped_func
def message_must_begin_with_nickname(func):
"""A simple decorator so that a bot AI can ignore all messages that don't
begin with the bot AI's nickname.
"""
def wrapped_func(botai, sender_nickname, message, *args, **kwargs):
if message.startswith(botai.nickname):
return func(botai, sender_nickname, message, *args, **kwargs)
else:
return False, ''
return wrapped_func
class Echo(BotAI):
// ... rest of the code ...
|
8c7daf1c0e140cb68c425b34eb60d9b001fd7063
|
fiduswriter/base/management/commands/jest.py
|
fiduswriter/base/management/commands/jest.py
|
from pathlib import Path
import shutil
from subprocess import call
from django.core.management.base import BaseCommand
from django.core.management import call_command
from django.conf import settings
BABEL_CONF = '''
module.exports = {
presets: [
[
'@babel/preset-env',
{
targets: {
node: 'current',
},
},
],
],
}
'''
class Command(BaseCommand):
help = 'Run jest unit tests.'
def handle(self, *ars, **options):
call_command('transpile')
p = Path(settings.PROJECT_PATH) / '.transpile'
shutil.os.chdir(p)
conf_file = p / 'babel.config.js'
if not conf_file.exists():
print(f'Creating "babel.config.js" at {p}.')
conf_file.write_text(BABEL_CONF)
command_array = [
p / 'node_modules' / '.bin' / 'jest',
'--no-cache',
]
return_value = call(command_array)
if return_value > 0:
exit(return_value)
|
from pathlib import Path
import shutil
from subprocess import call
from django.core.management.base import BaseCommand
from django.core.management import call_command
from django.conf import settings
BABEL_CONF = '''
module.exports = {
presets: [
[
'@babel/preset-env',
{
targets: {
node: 'current',
},
},
],
],
}
'''
class Command(BaseCommand):
help = 'Run jest unit tests.'
def handle(self, *ars, **options):
call_command('transpile')
p = Path(settings.PROJECT_PATH) / '.transpile'
shutil.os.chdir(p)
conf_file = p / 'babel.config.js'
if not conf_file.exists():
print(f'Creating "babel.config.js" at {p}.')
conf_file.write_text(BABEL_CONF)
command_array = [
p / 'node_modules' / '.bin' / 'jest',
'--no-cache',
'--passWithNoTests',
]
return_value = call(command_array)
if return_value > 0:
exit(return_value)
|
Make test suite pass when there are no tests
|
Make test suite pass when there are no tests
|
Python
|
agpl-3.0
|
fiduswriter/fiduswriter,fiduswriter/fiduswriter,fiduswriter/fiduswriter,fiduswriter/fiduswriter
|
python
|
## Code Before:
from pathlib import Path
import shutil
from subprocess import call
from django.core.management.base import BaseCommand
from django.core.management import call_command
from django.conf import settings
BABEL_CONF = '''
module.exports = {
presets: [
[
'@babel/preset-env',
{
targets: {
node: 'current',
},
},
],
],
}
'''
class Command(BaseCommand):
help = 'Run jest unit tests.'
def handle(self, *ars, **options):
call_command('transpile')
p = Path(settings.PROJECT_PATH) / '.transpile'
shutil.os.chdir(p)
conf_file = p / 'babel.config.js'
if not conf_file.exists():
print(f'Creating "babel.config.js" at {p}.')
conf_file.write_text(BABEL_CONF)
command_array = [
p / 'node_modules' / '.bin' / 'jest',
'--no-cache',
]
return_value = call(command_array)
if return_value > 0:
exit(return_value)
## Instruction:
Make test suite pass when there are no tests
## Code After:
from pathlib import Path
import shutil
from subprocess import call
from django.core.management.base import BaseCommand
from django.core.management import call_command
from django.conf import settings
BABEL_CONF = '''
module.exports = {
presets: [
[
'@babel/preset-env',
{
targets: {
node: 'current',
},
},
],
],
}
'''
class Command(BaseCommand):
help = 'Run jest unit tests.'
def handle(self, *ars, **options):
call_command('transpile')
p = Path(settings.PROJECT_PATH) / '.transpile'
shutil.os.chdir(p)
conf_file = p / 'babel.config.js'
if not conf_file.exists():
print(f'Creating "babel.config.js" at {p}.')
conf_file.write_text(BABEL_CONF)
command_array = [
p / 'node_modules' / '.bin' / 'jest',
'--no-cache',
'--passWithNoTests',
]
return_value = call(command_array)
if return_value > 0:
exit(return_value)
|
...
command_array = [
p / 'node_modules' / '.bin' / 'jest',
'--no-cache',
'--passWithNoTests',
]
return_value = call(command_array)
if return_value > 0:
...
|
b5e368437a600d78e22a53abe53c0103b20daa24
|
_python/main/migrations/0003_auto_20191029_2015.py
|
_python/main/migrations/0003_auto_20191029_2015.py
|
from django.db import migrations, models
import main.models
class Migration(migrations.Migration):
dependencies = [
('main', '0002_auto_20191007_1639'),
]
operations = [
migrations.AlterField(
model_name='contentnode',
name='headnote',
field=main.models.SanitizingTextField(blank=True, null=True),
),
migrations.AlterField(
model_name='default',
name='url',
field=models.URLField(max_length=1024),
),
migrations.AlterField(
model_name='textblock',
name='content',
field=main.models.SanitizingCharField(max_length=5242880),
),
]
|
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0002_auto_20191007_1639'),
]
operations = [
migrations.AlterField(
model_name='default',
name='url',
field=models.URLField(max_length=1024),
),
]
|
Repair migration, which was a no-op in SQL and was 'faked' anyway.
|
Repair migration, which was a no-op in SQL and was 'faked' anyway.
|
Python
|
agpl-3.0
|
harvard-lil/h2o,harvard-lil/h2o,harvard-lil/h2o,harvard-lil/h2o
|
python
|
## Code Before:
from django.db import migrations, models
import main.models
class Migration(migrations.Migration):
dependencies = [
('main', '0002_auto_20191007_1639'),
]
operations = [
migrations.AlterField(
model_name='contentnode',
name='headnote',
field=main.models.SanitizingTextField(blank=True, null=True),
),
migrations.AlterField(
model_name='default',
name='url',
field=models.URLField(max_length=1024),
),
migrations.AlterField(
model_name='textblock',
name='content',
field=main.models.SanitizingCharField(max_length=5242880),
),
]
## Instruction:
Repair migration, which was a no-op in SQL and was 'faked' anyway.
## Code After:
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0002_auto_20191007_1639'),
]
operations = [
migrations.AlterField(
model_name='default',
name='url',
field=models.URLField(max_length=1024),
),
]
|
...
from django.db import migrations, models
class Migration(migrations.Migration):
...
operations = [
migrations.AlterField(
model_name='default',
name='url',
field=models.URLField(max_length=1024),
),
]
...
|
5aca109f486786266164f4ac7a10e4d76f0730e4
|
scrappyr/scraps/forms.py
|
scrappyr/scraps/forms.py
|
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit
from django import forms
from .models import Scrap
class ScrapForm(forms.ModelForm):
class Meta:
model = Scrap
fields = ['raw_title']
def __init__(self, *args, **kwargs):
super(ScrapForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_class = 'col-md-4'
self.helper.add_input(Submit('submit', 'Add Scrap', css_class='btn-sm'))
|
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit
from django import forms
from .models import Scrap
class ScrapForm(forms.ModelForm):
class Meta:
model = Scrap
fields = ['raw_title']
def __init__(self, *args, **kwargs):
super(ScrapForm, self).__init__(*args, **kwargs)
self.fields['raw_title'].label = 'New scrap title'
self.helper = FormHelper()
self.helper.form_class = 'col-md-4'
self.helper.add_input(Submit('submit', 'Add Scrap', css_class='btn-sm'))
|
Make add-scrap title user friendly
|
Make add-scrap title user friendly
|
Python
|
mit
|
tonysyu/scrappyr-app,tonysyu/scrappyr-app,tonysyu/scrappyr-app,tonysyu/scrappyr-app
|
python
|
## Code Before:
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit
from django import forms
from .models import Scrap
class ScrapForm(forms.ModelForm):
class Meta:
model = Scrap
fields = ['raw_title']
def __init__(self, *args, **kwargs):
super(ScrapForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_class = 'col-md-4'
self.helper.add_input(Submit('submit', 'Add Scrap', css_class='btn-sm'))
## Instruction:
Make add-scrap title user friendly
## Code After:
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit
from django import forms
from .models import Scrap
class ScrapForm(forms.ModelForm):
class Meta:
model = Scrap
fields = ['raw_title']
def __init__(self, *args, **kwargs):
super(ScrapForm, self).__init__(*args, **kwargs)
self.fields['raw_title'].label = 'New scrap title'
self.helper = FormHelper()
self.helper.form_class = 'col-md-4'
self.helper.add_input(Submit('submit', 'Add Scrap', css_class='btn-sm'))
|
// ... existing code ...
def __init__(self, *args, **kwargs):
super(ScrapForm, self).__init__(*args, **kwargs)
self.fields['raw_title'].label = 'New scrap title'
self.helper = FormHelper()
self.helper.form_class = 'col-md-4'
self.helper.add_input(Submit('submit', 'Add Scrap', css_class='btn-sm'))
// ... rest of the code ...
|
74ff2d2fd3575da37ad16dfe122e4e568ba57e47
|
src/main/java/de/schunterkino/kinoapi/websocket/messages/christie/LampOffMessage.java
|
src/main/java/de/schunterkino/kinoapi/websocket/messages/christie/LampOffMessage.java
|
package de.schunterkino.kinoapi.websocket.messages.christie;
import java.time.Instant;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import de.schunterkino.kinoapi.websocket.messages.BaseMessage;
public class LampOffMessage extends BaseMessage {
public String timestamp;
public LampOffMessage(Instant lampOffTime) {
super("playback", "lamp_off");
DateTimeFormatter formatter = DateTimeFormatter.ISO_DATE_TIME.withZone(ZoneId.systemDefault());
this.timestamp = formatter.format(lampOffTime);
}
}
|
package de.schunterkino.kinoapi.websocket.messages.christie;
import java.time.Instant;
import de.schunterkino.kinoapi.websocket.messages.BaseMessage;
public class LampOffMessage extends BaseMessage {
public String timestamp;
public LampOffMessage(Instant lampOffTime) {
super("playback", "lamp_off");
this.timestamp = lampOffTime.toString();
}
}
|
Fix timestamp format in lamp_off message
|
Fix timestamp format in lamp_off message
|
Java
|
apache-2.0
|
SchunterKino/kinoapi
|
java
|
## Code Before:
package de.schunterkino.kinoapi.websocket.messages.christie;
import java.time.Instant;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import de.schunterkino.kinoapi.websocket.messages.BaseMessage;
public class LampOffMessage extends BaseMessage {
public String timestamp;
public LampOffMessage(Instant lampOffTime) {
super("playback", "lamp_off");
DateTimeFormatter formatter = DateTimeFormatter.ISO_DATE_TIME.withZone(ZoneId.systemDefault());
this.timestamp = formatter.format(lampOffTime);
}
}
## Instruction:
Fix timestamp format in lamp_off message
## Code After:
package de.schunterkino.kinoapi.websocket.messages.christie;
import java.time.Instant;
import de.schunterkino.kinoapi.websocket.messages.BaseMessage;
public class LampOffMessage extends BaseMessage {
public String timestamp;
public LampOffMessage(Instant lampOffTime) {
super("playback", "lamp_off");
this.timestamp = lampOffTime.toString();
}
}
|
# ... existing code ...
package de.schunterkino.kinoapi.websocket.messages.christie;
import java.time.Instant;
import de.schunterkino.kinoapi.websocket.messages.BaseMessage;
# ... modified code ...
public LampOffMessage(Instant lampOffTime) {
super("playback", "lamp_off");
this.timestamp = lampOffTime.toString();
}
}
# ... rest of the code ...
|
f185f04f6efdabe161ae29ba72f7208b8adccc41
|
bulletin/tools/plugins/models.py
|
bulletin/tools/plugins/models.py
|
from django.db import models
from bulletin.models import Post
class Event(Post):
start_date = models.DateTimeField()
end_date = models.DateTimeField(null=True, blank=True)
time = models.CharField(max_length=255,
null=True, blank=True)
organization = models.CharField(max_length=255,
null=True, blank=True)
location = models.CharField(max_length=255)
class Job(Post):
organization = models.CharField(max_length=255)
class NewResource(Post):
blurb = models.TextField()
class Opportunity(Post):
blurb = models.TextField()
class Meta:
verbose_name_plural = 'opportunities'
class Story(Post):
blurb = models.TextField()
date = models.DateTimeField()
class Meta:
verbose_name_plural = 'stories'
|
from django.db import models
from bulletin.models import Post
class Event(Post):
start_date = models.DateTimeField()
end_date = models.DateTimeField(null=True, blank=True)
time = models.CharField(max_length=255,
null=True, blank=True)
organization = models.CharField(max_length=255,
null=True, blank=True)
location = models.CharField(max_length=255)
class Job(Post):
organization = models.CharField(max_length=255)
class NewResource(Post):
blurb = models.TextField()
verbose_name = 'newresource'
class Opportunity(Post):
blurb = models.TextField()
class Meta:
verbose_name_plural = 'opportunities'
class Story(Post):
blurb = models.TextField()
date = models.DateTimeField()
class Meta:
verbose_name_plural = 'stories'
|
Set verbose name of NewResource.
|
Set verbose name of NewResource.
|
Python
|
mit
|
AASHE/django-bulletin,AASHE/django-bulletin,AASHE/django-bulletin
|
python
|
## Code Before:
from django.db import models
from bulletin.models import Post
class Event(Post):
start_date = models.DateTimeField()
end_date = models.DateTimeField(null=True, blank=True)
time = models.CharField(max_length=255,
null=True, blank=True)
organization = models.CharField(max_length=255,
null=True, blank=True)
location = models.CharField(max_length=255)
class Job(Post):
organization = models.CharField(max_length=255)
class NewResource(Post):
blurb = models.TextField()
class Opportunity(Post):
blurb = models.TextField()
class Meta:
verbose_name_plural = 'opportunities'
class Story(Post):
blurb = models.TextField()
date = models.DateTimeField()
class Meta:
verbose_name_plural = 'stories'
## Instruction:
Set verbose name of NewResource.
## Code After:
from django.db import models
from bulletin.models import Post
class Event(Post):
start_date = models.DateTimeField()
end_date = models.DateTimeField(null=True, blank=True)
time = models.CharField(max_length=255,
null=True, blank=True)
organization = models.CharField(max_length=255,
null=True, blank=True)
location = models.CharField(max_length=255)
class Job(Post):
organization = models.CharField(max_length=255)
class NewResource(Post):
blurb = models.TextField()
verbose_name = 'newresource'
class Opportunity(Post):
blurb = models.TextField()
class Meta:
verbose_name_plural = 'opportunities'
class Story(Post):
blurb = models.TextField()
date = models.DateTimeField()
class Meta:
verbose_name_plural = 'stories'
|
# ... existing code ...
class NewResource(Post):
blurb = models.TextField()
verbose_name = 'newresource'
class Opportunity(Post):
# ... rest of the code ...
|
643f666468d3e378cc0b39e501c253e33c267f0f
|
tests/python/PyUnitTests.py
|
tests/python/PyUnitTests.py
|
PYTHONPATH="%builddir%":"%srcdir%":$PYTHONPATH \
python "%srcdir%"/tests/python/UnitTests.py
|
PYTHONPATH="%builddir%":"%srcdir%":$PYTHONPATH \
DYLD_LIBRARY_PATH="%builddir%/.libs":"%builddir%/gdtoa/.libs":$DYLD_LIBRARY_PATH \
python "%srcdir%"/tests/python/UnitTests.py
|
Set DYLD_LIBRARY_PATH to find locally built dynamic libraries.
|
Set DYLD_LIBRARY_PATH to find locally built dynamic libraries.
|
Python
|
bsd-3-clause
|
duncanmortimer/ledger,paulbdavis/ledger,duncanmortimer/ledger,duncanmortimer/ledger,afh/ledger,paulbdavis/ledger,duncanmortimer/ledger,duncanmortimer/ledger,afh/ledger,ledger/ledger,jwakely/ledger,ledger/ledger,jwakely/ledger,ledger/ledger,ledger/ledger,paulbdavis/ledger,jwakely/ledger,afh/ledger,paulbdavis/ledger,ledger/ledger,duncanmortimer/ledger,jwakely/ledger,jwakely/ledger,duncanmortimer/ledger,paulbdavis/ledger,paulbdavis/ledger,afh/ledger,jwakely/ledger,jwakely/ledger,afh/ledger,paulbdavis/ledger
|
python
|
## Code Before:
PYTHONPATH="%builddir%":"%srcdir%":$PYTHONPATH \
python "%srcdir%"/tests/python/UnitTests.py
## Instruction:
Set DYLD_LIBRARY_PATH to find locally built dynamic libraries.
## Code After:
PYTHONPATH="%builddir%":"%srcdir%":$PYTHONPATH \
DYLD_LIBRARY_PATH="%builddir%/.libs":"%builddir%/gdtoa/.libs":$DYLD_LIBRARY_PATH \
python "%srcdir%"/tests/python/UnitTests.py
|
...
PYTHONPATH="%builddir%":"%srcdir%":$PYTHONPATH \
DYLD_LIBRARY_PATH="%builddir%/.libs":"%builddir%/gdtoa/.libs":$DYLD_LIBRARY_PATH \
python "%srcdir%"/tests/python/UnitTests.py
...
|
d1504f3c3129c926bd9897a6660669f146e64c38
|
cachupy/cachupy.py
|
cachupy/cachupy.py
|
import datetime
class Cache:
EXPIRE_IN = 'expire_in'
def __init__(self):
self.store = {}
def get(self, key):
"""Gets a value based upon a key."""
self._check_expiry(key)
return self.store[key]['value']
def set(self, dictionary, expire_in):
"""Sets a dictionary to the cache with a timedelta expiration."""
for key in dictionary.keys():
self.store[key] = {
Cache.EXPIRE_IN: datetime.datetime.now() + expire_in,
'value': dictionary[key]
}
def has(self, key):
"""Returns whether a key is in the cache."""
self._check_expiry(key)
return key in self.store
def _check_expiry(self, key):
"""Removes a key/value pair if it's expired."""
if key in self.store and datetime.datetime.now() > self.store[key][Cache.EXPIRE_IN]:
self.store.pop(key, None)
|
import datetime
class Cache:
EXPIRE_IN = 'expire_in'
VALUE = 'value'
def __init__(self):
self.lock = False
self.store = {}
def get(self, key):
"""Gets a value based upon a key."""
self._check_expiry(key)
return self.store[key][Cache.VALUE]
def set(self, expire_in, *args):
"""Sets a dictionary to the cache with a timedelta expiration."""
for arg in args:
if isinstance(arg, dict):
for k,v in arg.items():
self._set(k, v, expire_in)
else:
for v in arg:
self._set(v[0], v[1], expire_in)
def has(self, key):
"""Returns whether a key is in the cache."""
self._check_expiry(key)
return key in self.store
def _set(self, key, value, expire_in):
self.store[key] = {
Cache.EXPIRE_IN: datetime.datetime.now() + expire_in,
Cache.VALUE: value
}
def _check_expiry(self, key):
"""Removes a key/value pair if it's expired."""
if not self.lock and key in self.store and datetime.datetime.now() > self.store[key][Cache.EXPIRE_IN]:
self.store.pop(key, None)
|
Change signature of set() method.
|
Change signature of set() method.
|
Python
|
mit
|
patrickbird/cachupy
|
python
|
## Code Before:
import datetime
class Cache:
EXPIRE_IN = 'expire_in'
def __init__(self):
self.store = {}
def get(self, key):
"""Gets a value based upon a key."""
self._check_expiry(key)
return self.store[key]['value']
def set(self, dictionary, expire_in):
"""Sets a dictionary to the cache with a timedelta expiration."""
for key in dictionary.keys():
self.store[key] = {
Cache.EXPIRE_IN: datetime.datetime.now() + expire_in,
'value': dictionary[key]
}
def has(self, key):
"""Returns whether a key is in the cache."""
self._check_expiry(key)
return key in self.store
def _check_expiry(self, key):
"""Removes a key/value pair if it's expired."""
if key in self.store and datetime.datetime.now() > self.store[key][Cache.EXPIRE_IN]:
self.store.pop(key, None)
## Instruction:
Change signature of set() method.
## Code After:
import datetime
class Cache:
EXPIRE_IN = 'expire_in'
VALUE = 'value'
def __init__(self):
self.lock = False
self.store = {}
def get(self, key):
"""Gets a value based upon a key."""
self._check_expiry(key)
return self.store[key][Cache.VALUE]
def set(self, expire_in, *args):
"""Sets a dictionary to the cache with a timedelta expiration."""
for arg in args:
if isinstance(arg, dict):
for k,v in arg.items():
self._set(k, v, expire_in)
else:
for v in arg:
self._set(v[0], v[1], expire_in)
def has(self, key):
"""Returns whether a key is in the cache."""
self._check_expiry(key)
return key in self.store
def _set(self, key, value, expire_in):
self.store[key] = {
Cache.EXPIRE_IN: datetime.datetime.now() + expire_in,
Cache.VALUE: value
}
def _check_expiry(self, key):
"""Removes a key/value pair if it's expired."""
if not self.lock and key in self.store and datetime.datetime.now() > self.store[key][Cache.EXPIRE_IN]:
self.store.pop(key, None)
|
...
class Cache:
EXPIRE_IN = 'expire_in'
VALUE = 'value'
def __init__(self):
self.lock = False
self.store = {}
def get(self, key):
"""Gets a value based upon a key."""
self._check_expiry(key)
return self.store[key][Cache.VALUE]
def set(self, expire_in, *args):
"""Sets a dictionary to the cache with a timedelta expiration."""
for arg in args:
if isinstance(arg, dict):
for k,v in arg.items():
self._set(k, v, expire_in)
else:
for v in arg:
self._set(v[0], v[1], expire_in)
def has(self, key):
"""Returns whether a key is in the cache."""
self._check_expiry(key)
return key in self.store
def _set(self, key, value, expire_in):
self.store[key] = {
Cache.EXPIRE_IN: datetime.datetime.now() + expire_in,
Cache.VALUE: value
}
def _check_expiry(self, key):
"""Removes a key/value pair if it's expired."""
if not self.lock and key in self.store and datetime.datetime.now() > self.store[key][Cache.EXPIRE_IN]:
self.store.pop(key, None)
...
|
c9e90de4730050e4ab41fc6b42a4a51018262db7
|
sergey/management/commands/fix_speaker_slugs.py
|
sergey/management/commands/fix_speaker_slugs.py
|
from django.core.management import BaseCommand
from django.template.defaultfilters import slugify
from unidecode import unidecode
from richard.videos.models import Speaker
class Command(BaseCommand):
help = 'Fixes speaker slugs'
def handle(self, *args, **options):
for speaker in Speaker.objects.all():
old_slug = speaker.slug
fixed_slug = slugify(unidecode(speaker.name))
speaker.save()
self.stdout.write(u'Changed slug for %s "%s" => %s\n' % (speaker.name, old_slug, fixed_slug))
|
from django.core.management import BaseCommand
from django.template.defaultfilters import slugify
from unidecode import unidecode
from richard.videos.models import Speaker
class Command(BaseCommand):
help = 'Fixes speaker slugs'
def handle(self, *args, **options):
for speaker in Speaker.objects.all():
old_slug = speaker.slug
try:
speaker.slug = slugify(unidecode(speaker.name))
speaker.save()
self.stdout.write(u'Changed slug for %s "%s" => %s\n' % (speaker.name, old_slug, speaker.slug))
except Exception:
self.stdout.write(u'Problem fixing slug for %s\n' % speaker.name)
|
Fix management command for fixing slugs
|
Fix management command for fixing slugs
|
Python
|
bsd-3-clause
|
WarmongeR1/pyvideo.ru,coagulant/pyvideo.ru,coagulant/pyvideo.ru,WarmongeR1/pyvideo.ru,WarmongeR1/pyvideo.ru,coagulant/pyvideo.ru
|
python
|
## Code Before:
from django.core.management import BaseCommand
from django.template.defaultfilters import slugify
from unidecode import unidecode
from richard.videos.models import Speaker
class Command(BaseCommand):
help = 'Fixes speaker slugs'
def handle(self, *args, **options):
for speaker in Speaker.objects.all():
old_slug = speaker.slug
fixed_slug = slugify(unidecode(speaker.name))
speaker.save()
self.stdout.write(u'Changed slug for %s "%s" => %s\n' % (speaker.name, old_slug, fixed_slug))
## Instruction:
Fix management command for fixing slugs
## Code After:
from django.core.management import BaseCommand
from django.template.defaultfilters import slugify
from unidecode import unidecode
from richard.videos.models import Speaker
class Command(BaseCommand):
help = 'Fixes speaker slugs'
def handle(self, *args, **options):
for speaker in Speaker.objects.all():
old_slug = speaker.slug
try:
speaker.slug = slugify(unidecode(speaker.name))
speaker.save()
self.stdout.write(u'Changed slug for %s "%s" => %s\n' % (speaker.name, old_slug, speaker.slug))
except Exception:
self.stdout.write(u'Problem fixing slug for %s\n' % speaker.name)
|
...
for speaker in Speaker.objects.all():
old_slug = speaker.slug
try:
speaker.slug = slugify(unidecode(speaker.name))
speaker.save()
self.stdout.write(u'Changed slug for %s "%s" => %s\n' % (speaker.name, old_slug, speaker.slug))
except Exception:
self.stdout.write(u'Problem fixing slug for %s\n' % speaker.name)
...
|
3382b5003eadec99f0816d9190038bd2caf6c412
|
system_maintenance/urls.py
|
system_maintenance/urls.py
|
from django.conf.urls import patterns, url
from .views import (DocumentationRecordListView, DocumentationRecordDetailView,
MaintenanceRecordDetailView, MaintenanceRecordListView,
system_maintenance_home_view)
urlpatterns = patterns('',
url(r'^$', system_maintenance_home_view, name='system_maintenance_home_view'),
url(r'^authentication/$', 'django.contrib.auth.views.login', {'template_name': 'system_maintenance/authentication.html'}, name='authentication'),
url(r'^documentation/$', DocumentationRecordListView.as_view(), name='documentation_record_list'),
url(r'^documentation/(?P<pk>\d+)/$', DocumentationRecordDetailView.as_view(), name='documentation_record_detail'),
url(r'^logout/$', 'django.contrib.auth.views.logout', {'next_page': '/system_maintenance/'}, name='logout'),
url(r'^records/$', MaintenanceRecordListView.as_view(), name='maintenance_record_list'),
url(r'^records/(?P<pk>\d+)/$', MaintenanceRecordDetailView.as_view(), name='maintenance_record_detail'),
)
|
from django.conf.urls import url
from django.contrib.auth import views as auth_views
from .views import (DocumentationRecordListView, DocumentationRecordDetailView,
MaintenanceRecordDetailView, MaintenanceRecordListView,
system_maintenance_home_view)
urlpatterns = [
url(r'^$', system_maintenance_home_view, name='system_maintenance_home_view'),
url(r'^authentication/$', auth_views.login, {'template_name': 'system_maintenance/authentication.html'}, name='authentication'),
url(r'^documentation/$', DocumentationRecordListView.as_view(), name='documentation_record_list'),
url(r'^documentation/(?P<pk>\d+)/$', DocumentationRecordDetailView.as_view(), name='documentation_record_detail'),
url(r'^logout/$', auth_views.logout, {'next_page': '/system_maintenance/'}, name='logout'),
url(r'^records/$', MaintenanceRecordListView.as_view(), name='maintenance_record_list'),
url(r'^records/(?P<pk>\d+)/$', MaintenanceRecordDetailView.as_view(), name='maintenance_record_detail'),
]
|
Resolve Django 1.10 deprecation warnings
|
Resolve Django 1.10 deprecation warnings
|
Python
|
bsd-3-clause
|
mfcovington/django-system-maintenance,mfcovington/django-system-maintenance,mfcovington/django-system-maintenance
|
python
|
## Code Before:
from django.conf.urls import patterns, url
from .views import (DocumentationRecordListView, DocumentationRecordDetailView,
MaintenanceRecordDetailView, MaintenanceRecordListView,
system_maintenance_home_view)
urlpatterns = patterns('',
url(r'^$', system_maintenance_home_view, name='system_maintenance_home_view'),
url(r'^authentication/$', 'django.contrib.auth.views.login', {'template_name': 'system_maintenance/authentication.html'}, name='authentication'),
url(r'^documentation/$', DocumentationRecordListView.as_view(), name='documentation_record_list'),
url(r'^documentation/(?P<pk>\d+)/$', DocumentationRecordDetailView.as_view(), name='documentation_record_detail'),
url(r'^logout/$', 'django.contrib.auth.views.logout', {'next_page': '/system_maintenance/'}, name='logout'),
url(r'^records/$', MaintenanceRecordListView.as_view(), name='maintenance_record_list'),
url(r'^records/(?P<pk>\d+)/$', MaintenanceRecordDetailView.as_view(), name='maintenance_record_detail'),
)
## Instruction:
Resolve Django 1.10 deprecation warnings
## Code After:
from django.conf.urls import url
from django.contrib.auth import views as auth_views
from .views import (DocumentationRecordListView, DocumentationRecordDetailView,
MaintenanceRecordDetailView, MaintenanceRecordListView,
system_maintenance_home_view)
urlpatterns = [
url(r'^$', system_maintenance_home_view, name='system_maintenance_home_view'),
url(r'^authentication/$', auth_views.login, {'template_name': 'system_maintenance/authentication.html'}, name='authentication'),
url(r'^documentation/$', DocumentationRecordListView.as_view(), name='documentation_record_list'),
url(r'^documentation/(?P<pk>\d+)/$', DocumentationRecordDetailView.as_view(), name='documentation_record_detail'),
url(r'^logout/$', auth_views.logout, {'next_page': '/system_maintenance/'}, name='logout'),
url(r'^records/$', MaintenanceRecordListView.as_view(), name='maintenance_record_list'),
url(r'^records/(?P<pk>\d+)/$', MaintenanceRecordDetailView.as_view(), name='maintenance_record_detail'),
]
|
// ... existing code ...
from django.conf.urls import url
from django.contrib.auth import views as auth_views
from .views import (DocumentationRecordListView, DocumentationRecordDetailView,
MaintenanceRecordDetailView, MaintenanceRecordListView,
// ... modified code ...
system_maintenance_home_view)
urlpatterns = [
url(r'^$', system_maintenance_home_view, name='system_maintenance_home_view'),
url(r'^authentication/$', auth_views.login, {'template_name': 'system_maintenance/authentication.html'}, name='authentication'),
url(r'^documentation/$', DocumentationRecordListView.as_view(), name='documentation_record_list'),
url(r'^documentation/(?P<pk>\d+)/$', DocumentationRecordDetailView.as_view(), name='documentation_record_detail'),
url(r'^logout/$', auth_views.logout, {'next_page': '/system_maintenance/'}, name='logout'),
url(r'^records/$', MaintenanceRecordListView.as_view(), name='maintenance_record_list'),
url(r'^records/(?P<pk>\d+)/$', MaintenanceRecordDetailView.as_view(), name='maintenance_record_detail'),
]
// ... rest of the code ...
|
70ee0532f68a08fa12ba7bbfb217273ca8ef7a48
|
bluesky/tests/test_simulators.py
|
bluesky/tests/test_simulators.py
|
from bluesky.plans import scan
from bluesky.simulators import print_summary, print_summary_wrapper
def test_print_summary(motor_det):
motor, det = motor_det
print_summary(scan([det], motor, -1, 1, 10))
list(print_summary_wrapper(scan([det], motor, -1, 1, 10)))
|
from bluesky.plans import scan
from bluesky.simulators import print_summary, print_summary_wrapper
import pytest
def test_print_summary(motor_det):
motor, det = motor_det
print_summary(scan([det], motor, -1, 1, 10))
list(print_summary_wrapper(scan([det], motor, -1, 1, 10)))
def test_old_module_name(motor_det):
from bluesky.plan_tools import print_summary, print_summary_wrapper
motor, det = motor_det
with pytest.warns(UserWarning):
print_summary(scan([det], motor, -1, 1, 10))
with pytest.warns(UserWarning):
list(print_summary_wrapper(scan([det], motor, -1, 1, 10)))
|
Test that plan_tools import works but warns.
|
TST: Test that plan_tools import works but warns.
|
Python
|
bsd-3-clause
|
ericdill/bluesky,ericdill/bluesky
|
python
|
## Code Before:
from bluesky.plans import scan
from bluesky.simulators import print_summary, print_summary_wrapper
def test_print_summary(motor_det):
motor, det = motor_det
print_summary(scan([det], motor, -1, 1, 10))
list(print_summary_wrapper(scan([det], motor, -1, 1, 10)))
## Instruction:
TST: Test that plan_tools import works but warns.
## Code After:
from bluesky.plans import scan
from bluesky.simulators import print_summary, print_summary_wrapper
import pytest
def test_print_summary(motor_det):
motor, det = motor_det
print_summary(scan([det], motor, -1, 1, 10))
list(print_summary_wrapper(scan([det], motor, -1, 1, 10)))
def test_old_module_name(motor_det):
from bluesky.plan_tools import print_summary, print_summary_wrapper
motor, det = motor_det
with pytest.warns(UserWarning):
print_summary(scan([det], motor, -1, 1, 10))
with pytest.warns(UserWarning):
list(print_summary_wrapper(scan([det], motor, -1, 1, 10)))
|
...
from bluesky.plans import scan
from bluesky.simulators import print_summary, print_summary_wrapper
import pytest
def test_print_summary(motor_det):
...
motor, det = motor_det
print_summary(scan([det], motor, -1, 1, 10))
list(print_summary_wrapper(scan([det], motor, -1, 1, 10)))
def test_old_module_name(motor_det):
from bluesky.plan_tools import print_summary, print_summary_wrapper
motor, det = motor_det
with pytest.warns(UserWarning):
print_summary(scan([det], motor, -1, 1, 10))
with pytest.warns(UserWarning):
list(print_summary_wrapper(scan([det], motor, -1, 1, 10)))
...
|
9617d0d65b4c9bcc6513d1aa6cca537713f178c6
|
SSPSolution/SSPSolution/DoorEntity.h
|
SSPSolution/SSPSolution/DoorEntity.h
|
struct DoorSyncState {
int entityID;
bool isOpened;
};
struct ElementState {
int entityID;
EVENT desiredState;
bool desiredStateReached;
};
class DoorEntity :
public Entity
{
private:
std::vector<ElementState> m_subjectStates;
bool m_isOpened;
float m_minRotation;
float m_maxRotation;
float m_rotateTime;
float m_rotatePerSec;
bool m_needSync;
public:
DoorEntity();
virtual ~DoorEntity();
int Initialize(int entityID, PhysicsComponent* pComp, GraphicsComponent* gComp, std::vector<ElementState> subjectStates, float rotateTime = 1.0f, float minRotation = 0.0f, float maxRotation = DirectX::XM_PI / 2.0f);
int Update(float dT, InputHandler* inputHandler);
int React(int entityID, EVENT reactEvent);
bool SetIsOpened(bool isOpened);
bool GetIsOpened();
};
#endif
|
struct ElementState {
int entityID;
EVENT desiredState;
bool desiredStateReached;
};
class DoorEntity :
public Entity
{
private:
std::vector<ElementState> m_subjectStates;
bool m_isOpened;
float m_minRotation;
float m_maxRotation;
float m_rotateTime;
float m_rotatePerSec;
bool m_needSync;
public:
DoorEntity();
virtual ~DoorEntity();
int Initialize(int entityID, PhysicsComponent* pComp, GraphicsComponent* gComp, std::vector<ElementState> subjectStates, float rotateTime = 1.0f, float minRotation = 0.0f, float maxRotation = DirectX::XM_PI / 2.0f);
int Update(float dT, InputHandler* inputHandler);
int React(int entityID, EVENT reactEvent);
bool SetIsOpened(bool isOpened);
bool GetIsOpened();
};
#endif
|
REMOVE syns state from Door
|
REMOVE syns state from Door
We will only sync actuators such as levers, buttons and wheels
|
C
|
apache-2.0
|
Chringo/SSP,Chringo/SSP
|
c
|
## Code Before:
struct DoorSyncState {
int entityID;
bool isOpened;
};
struct ElementState {
int entityID;
EVENT desiredState;
bool desiredStateReached;
};
class DoorEntity :
public Entity
{
private:
std::vector<ElementState> m_subjectStates;
bool m_isOpened;
float m_minRotation;
float m_maxRotation;
float m_rotateTime;
float m_rotatePerSec;
bool m_needSync;
public:
DoorEntity();
virtual ~DoorEntity();
int Initialize(int entityID, PhysicsComponent* pComp, GraphicsComponent* gComp, std::vector<ElementState> subjectStates, float rotateTime = 1.0f, float minRotation = 0.0f, float maxRotation = DirectX::XM_PI / 2.0f);
int Update(float dT, InputHandler* inputHandler);
int React(int entityID, EVENT reactEvent);
bool SetIsOpened(bool isOpened);
bool GetIsOpened();
};
#endif
## Instruction:
REMOVE syns state from Door
We will only sync actuators such as levers, buttons and wheels
## Code After:
struct ElementState {
int entityID;
EVENT desiredState;
bool desiredStateReached;
};
class DoorEntity :
public Entity
{
private:
std::vector<ElementState> m_subjectStates;
bool m_isOpened;
float m_minRotation;
float m_maxRotation;
float m_rotateTime;
float m_rotatePerSec;
bool m_needSync;
public:
DoorEntity();
virtual ~DoorEntity();
int Initialize(int entityID, PhysicsComponent* pComp, GraphicsComponent* gComp, std::vector<ElementState> subjectStates, float rotateTime = 1.0f, float minRotation = 0.0f, float maxRotation = DirectX::XM_PI / 2.0f);
int Update(float dT, InputHandler* inputHandler);
int React(int entityID, EVENT reactEvent);
bool SetIsOpened(bool isOpened);
bool GetIsOpened();
};
#endif
|
...
struct ElementState {
int entityID;
...
|
541d4080821692ed879bfee47eb0ce1a8b278dac
|
Python/reverse-words-in-a-string-iii.py
|
Python/reverse-words-in-a-string-iii.py
|
class Solution(object):
def reverseWords(self, s):
"""
:type s: str
:rtype: str
"""
def reverse(s, begin, end):
for i in xrange((end - begin) // 2):
s[begin + i], s[end - 1 - i] = s[end - 1 - i], s[begin + i]
s, i = list(s), 0
for j in xrange(len(s) + 1):
if j == len(s) or s[j] == ' ':
reverse(s, i, j)
i = j + 1
return "".join(s)
|
class Solution(object):
def reverseWords(self, s):
"""
:type s: str
:rtype: str
"""
def reverse(s, begin, end):
for i in xrange((end - begin) // 2):
s[begin + i], s[end - 1 - i] = s[end - 1 - i], s[begin + i]
s, i = list(s), 0
for j in xrange(len(s) + 1):
if j == len(s) or s[j] == ' ':
reverse(s, i, j)
i = j + 1
return "".join(s)
class Solution2(object):
def reverseWords(self, s):
reversed_words = [word[::-1] for word in s.split(' ')]
return ' '.join(reversed_words)
|
Add alternative solution for 'Reverse words in string III'
|
Add alternative solution for 'Reverse words in string III'
|
Python
|
mit
|
kamyu104/LeetCode,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015
|
python
|
## Code Before:
class Solution(object):
def reverseWords(self, s):
"""
:type s: str
:rtype: str
"""
def reverse(s, begin, end):
for i in xrange((end - begin) // 2):
s[begin + i], s[end - 1 - i] = s[end - 1 - i], s[begin + i]
s, i = list(s), 0
for j in xrange(len(s) + 1):
if j == len(s) or s[j] == ' ':
reverse(s, i, j)
i = j + 1
return "".join(s)
## Instruction:
Add alternative solution for 'Reverse words in string III'
## Code After:
class Solution(object):
def reverseWords(self, s):
"""
:type s: str
:rtype: str
"""
def reverse(s, begin, end):
for i in xrange((end - begin) // 2):
s[begin + i], s[end - 1 - i] = s[end - 1 - i], s[begin + i]
s, i = list(s), 0
for j in xrange(len(s) + 1):
if j == len(s) or s[j] == ' ':
reverse(s, i, j)
i = j + 1
return "".join(s)
class Solution2(object):
def reverseWords(self, s):
reversed_words = [word[::-1] for word in s.split(' ')]
return ' '.join(reversed_words)
|
// ... existing code ...
reverse(s, i, j)
i = j + 1
return "".join(s)
class Solution2(object):
def reverseWords(self, s):
reversed_words = [word[::-1] for word in s.split(' ')]
return ' '.join(reversed_words)
// ... rest of the code ...
|
15621b2d1dc58998f4e9f84ec8f4ef2c50458868
|
openerp/tests/addons/test_translation_import/models.py
|
openerp/tests/addons/test_translation_import/models.py
|
import openerp
from openerp.tools.translate import _
class m(openerp.osv.osv.Model):
""" A model to provide source strings.
"""
_name = 'test.translation.import'
_columns = {
'name': openerp.osv.fields.char(
'1XBUO5PUYH2RYZSA1FTLRYS8SPCNU1UYXMEYMM25ASV7JC2KTJZQESZYRV9L8CGB',
size=32, help='Efgh'),
}
_('Ijkl')
# With the name label above, this source string should be generated twice.
_('1XBUO5PUYH2RYZSA1FTLRYS8SPCNU1UYXMEYMM25ASV7JC2KTJZQESZYRV9L8CGB')
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
import openerp
from openerp.tools.translate import _
class m(openerp.osv.orm.TransientModel):
""" A model to provide source strings.
"""
_name = 'test.translation.import'
_columns = {
'name': openerp.osv.fields.char(
'1XBUO5PUYH2RYZSA1FTLRYS8SPCNU1UYXMEYMM25ASV7JC2KTJZQESZYRV9L8CGB',
size=32, help='Efgh'),
}
_('Ijkl')
# With the name label above, this source string should be generated twice.
_('1XBUO5PUYH2RYZSA1FTLRYS8SPCNU1UYXMEYMM25ASV7JC2KTJZQESZYRV9L8CGB')
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
Use TransientModel for the dummy model used in translation testing
|
[IMP] Use TransientModel for the dummy model used in translation testing
|
Python
|
agpl-3.0
|
akretion/openerp-server,akretion/openerp-server,akretion/openerp-server
|
python
|
## Code Before:
import openerp
from openerp.tools.translate import _
class m(openerp.osv.osv.Model):
""" A model to provide source strings.
"""
_name = 'test.translation.import'
_columns = {
'name': openerp.osv.fields.char(
'1XBUO5PUYH2RYZSA1FTLRYS8SPCNU1UYXMEYMM25ASV7JC2KTJZQESZYRV9L8CGB',
size=32, help='Efgh'),
}
_('Ijkl')
# With the name label above, this source string should be generated twice.
_('1XBUO5PUYH2RYZSA1FTLRYS8SPCNU1UYXMEYMM25ASV7JC2KTJZQESZYRV9L8CGB')
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
## Instruction:
[IMP] Use TransientModel for the dummy model used in translation testing
## Code After:
import openerp
from openerp.tools.translate import _
class m(openerp.osv.orm.TransientModel):
""" A model to provide source strings.
"""
_name = 'test.translation.import'
_columns = {
'name': openerp.osv.fields.char(
'1XBUO5PUYH2RYZSA1FTLRYS8SPCNU1UYXMEYMM25ASV7JC2KTJZQESZYRV9L8CGB',
size=32, help='Efgh'),
}
_('Ijkl')
# With the name label above, this source string should be generated twice.
_('1XBUO5PUYH2RYZSA1FTLRYS8SPCNU1UYXMEYMM25ASV7JC2KTJZQESZYRV9L8CGB')
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
// ... existing code ...
import openerp
from openerp.tools.translate import _
class m(openerp.osv.orm.TransientModel):
""" A model to provide source strings.
"""
_name = 'test.translation.import'
// ... rest of the code ...
|
2cfe6e6c9284dfffba2943a8562e38844b6ba089
|
temba/campaigns/migrations/0015_campaignevent_message_new.py
|
temba/campaigns/migrations/0015_campaignevent_message_new.py
|
from __future__ import unicode_literals
import json
import temba.utils.models
from django.contrib.postgres.operations import HStoreExtension
from django.db import migrations
def populate_message_new(apps, schema_editor):
CampaignEvent = apps.get_model('campaigns', 'CampaignEvent')
events = list(CampaignEvent.objects.filter(event_type='M').select_related('flow'))
for event in events:
try:
event.message_new = json.loads(event.message)
except Exception:
event.message_new = {event.flow.base_language: event.message}
event.save(update_fields=('message_new',))
if events:
print("Converted %d campaign events" % len(events))
class Migration(migrations.Migration):
dependencies = [
('campaigns', '0014_auto_20170228_0837'),
]
operations = [
HStoreExtension(),
migrations.AddField(
model_name='campaignevent',
name='message_new',
field=temba.utils.models.TranslatableField(max_length=640, null=True),
),
migrations.RunPython(populate_message_new)
]
|
from __future__ import unicode_literals
import json
import temba.utils.models
from django.contrib.postgres.operations import HStoreExtension
from django.db import migrations
def populate_message_new(apps, schema_editor):
CampaignEvent = apps.get_model('campaigns', 'CampaignEvent')
events = list(CampaignEvent.objects.filter(event_type='M').select_related('flow'))
for event in events:
try:
event.message_new = json.loads(event.message)
except Exception:
base_lang = event.flow.base_language or 'base'
event.message_new = {base_lang: event.message}
event.save(update_fields=('message_new',))
if events:
print("Converted %d campaign events" % len(events))
class Migration(migrations.Migration):
dependencies = [
('campaigns', '0014_auto_20170228_0837'),
]
operations = [
HStoreExtension(),
migrations.AddField(
model_name='campaignevent',
name='message_new',
field=temba.utils.models.TranslatableField(max_length=640, null=True),
),
migrations.RunPython(populate_message_new)
]
|
Fix migration to work with flows with no base_language
|
Fix migration to work with flows with no base_language
|
Python
|
agpl-3.0
|
pulilab/rapidpro,pulilab/rapidpro,pulilab/rapidpro,pulilab/rapidpro,pulilab/rapidpro
|
python
|
## Code Before:
from __future__ import unicode_literals
import json
import temba.utils.models
from django.contrib.postgres.operations import HStoreExtension
from django.db import migrations
def populate_message_new(apps, schema_editor):
CampaignEvent = apps.get_model('campaigns', 'CampaignEvent')
events = list(CampaignEvent.objects.filter(event_type='M').select_related('flow'))
for event in events:
try:
event.message_new = json.loads(event.message)
except Exception:
event.message_new = {event.flow.base_language: event.message}
event.save(update_fields=('message_new',))
if events:
print("Converted %d campaign events" % len(events))
class Migration(migrations.Migration):
dependencies = [
('campaigns', '0014_auto_20170228_0837'),
]
operations = [
HStoreExtension(),
migrations.AddField(
model_name='campaignevent',
name='message_new',
field=temba.utils.models.TranslatableField(max_length=640, null=True),
),
migrations.RunPython(populate_message_new)
]
## Instruction:
Fix migration to work with flows with no base_language
## Code After:
from __future__ import unicode_literals
import json
import temba.utils.models
from django.contrib.postgres.operations import HStoreExtension
from django.db import migrations
def populate_message_new(apps, schema_editor):
CampaignEvent = apps.get_model('campaigns', 'CampaignEvent')
events = list(CampaignEvent.objects.filter(event_type='M').select_related('flow'))
for event in events:
try:
event.message_new = json.loads(event.message)
except Exception:
base_lang = event.flow.base_language or 'base'
event.message_new = {base_lang: event.message}
event.save(update_fields=('message_new',))
if events:
print("Converted %d campaign events" % len(events))
class Migration(migrations.Migration):
dependencies = [
('campaigns', '0014_auto_20170228_0837'),
]
operations = [
HStoreExtension(),
migrations.AddField(
model_name='campaignevent',
name='message_new',
field=temba.utils.models.TranslatableField(max_length=640, null=True),
),
migrations.RunPython(populate_message_new)
]
|
...
try:
event.message_new = json.loads(event.message)
except Exception:
base_lang = event.flow.base_language or 'base'
event.message_new = {base_lang: event.message}
event.save(update_fields=('message_new',))
...
|
27aad0e3ed95cb43b28eb3c02fa96b3a9b74de5b
|
tests/test_container.py
|
tests/test_container.py
|
from .common import *
class TestContainers(TestCase):
def test_unicode_filename(self):
container = av.open(self.sandboxed(u'¢∞§¶•ªº.mov'), 'w')
|
import os
import sys
import unittest
from .common import *
# On Windows, Python 3.0 - 3.5 have issues handling unicode filenames.
# Starting with Python 3.6 the situation is saner thanks to PEP 529:
#
# https://www.python.org/dev/peps/pep-0529/
broken_unicode = (
os.name == 'nt' and
sys.version_info >= (3, 0) and
sys.version_info < (3, 6))
class TestContainers(TestCase):
@unittest.skipIf(broken_unicode, 'Unicode filename handling is broken')
def test_unicode_filename(self):
container = av.open(self.sandboxed(u'¢∞§¶•ªº.mov'), 'w')
|
Disable unicode filename test on Windows with Python 3.0 - 3.5
|
Disable unicode filename test on Windows with Python 3.0 - 3.5
Before PEP 529 landed in Python 3.6, unicode filename handling on
Windows is hit-and-miss, so don't break CI.
|
Python
|
bsd-3-clause
|
PyAV-Org/PyAV,mikeboers/PyAV,PyAV-Org/PyAV,mikeboers/PyAV
|
python
|
## Code Before:
from .common import *
class TestContainers(TestCase):
def test_unicode_filename(self):
container = av.open(self.sandboxed(u'¢∞§¶•ªº.mov'), 'w')
## Instruction:
Disable unicode filename test on Windows with Python 3.0 - 3.5
Before PEP 529 landed in Python 3.6, unicode filename handling on
Windows is hit-and-miss, so don't break CI.
## Code After:
import os
import sys
import unittest
from .common import *
# On Windows, Python 3.0 - 3.5 have issues handling unicode filenames.
# Starting with Python 3.6 the situation is saner thanks to PEP 529:
#
# https://www.python.org/dev/peps/pep-0529/
broken_unicode = (
os.name == 'nt' and
sys.version_info >= (3, 0) and
sys.version_info < (3, 6))
class TestContainers(TestCase):
@unittest.skipIf(broken_unicode, 'Unicode filename handling is broken')
def test_unicode_filename(self):
container = av.open(self.sandboxed(u'¢∞§¶•ªº.mov'), 'w')
|
...
import os
import sys
import unittest
from .common import *
# On Windows, Python 3.0 - 3.5 have issues handling unicode filenames.
# Starting with Python 3.6 the situation is saner thanks to PEP 529:
#
# https://www.python.org/dev/peps/pep-0529/
broken_unicode = (
os.name == 'nt' and
sys.version_info >= (3, 0) and
sys.version_info < (3, 6))
class TestContainers(TestCase):
@unittest.skipIf(broken_unicode, 'Unicode filename handling is broken')
def test_unicode_filename(self):
container = av.open(self.sandboxed(u'¢∞§¶•ªº.mov'), 'w')
...
|
98c0ccec77cc6f1657c21acb3cdc07b483a9a178
|
proselint/checks/writegood/lexical_illusions.py
|
proselint/checks/writegood/lexical_illusions.py
|
from proselint.tools import existence_check, memoize
@memoize
def check(text):
"""Check the text."""
err = "WGD105"
msg = u"There's a lexical illusion here: a word is repeated."
commercialese = [
"the\sthe",
"is\sis"
]
return existence_check(text, commercialese, err, msg)
|
from proselint.tools import existence_check, memoize
@memoize
def check(text):
"""Check the text."""
err = "WGD105"
msg = u"There's a lexical illusion here: a word is repeated."
commercialese = [
"the\sthe",
]
return existence_check(text, commercialese, err, msg)
|
Remove "is is" from lexical illusions
|
Remove "is is" from lexical illusions
|
Python
|
bsd-3-clause
|
jstewmon/proselint,jstewmon/proselint,amperser/proselint,jstewmon/proselint,amperser/proselint,amperser/proselint,amperser/proselint,amperser/proselint
|
python
|
## Code Before:
from proselint.tools import existence_check, memoize
@memoize
def check(text):
"""Check the text."""
err = "WGD105"
msg = u"There's a lexical illusion here: a word is repeated."
commercialese = [
"the\sthe",
"is\sis"
]
return existence_check(text, commercialese, err, msg)
## Instruction:
Remove "is is" from lexical illusions
## Code After:
from proselint.tools import existence_check, memoize
@memoize
def check(text):
"""Check the text."""
err = "WGD105"
msg = u"There's a lexical illusion here: a word is repeated."
commercialese = [
"the\sthe",
]
return existence_check(text, commercialese, err, msg)
|
// ... existing code ...
commercialese = [
"the\sthe",
]
return existence_check(text, commercialese, err, msg)
// ... rest of the code ...
|
f467ade3ebb8164597a2265a1b9af5e447dc88c6
|
core/src/main/java/cucumber/runtime/io/FileResource.java
|
core/src/main/java/cucumber/runtime/io/FileResource.java
|
package cucumber.runtime.io;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
class FileResource implements Resource {
private final File root;
private final File file;
private final boolean classpathFileResource;
static FileResource createFileResource(File root, File file) {
return new FileResource(root, file, false);
}
static FileResource createClasspathFileResource(File root, File file) {
return new FileResource(root, file, true);
}
private FileResource(File root, File file, boolean classpathFileResource) {
this.root = root;
this.file = file;
this.classpathFileResource = classpathFileResource;
if (!file.getAbsolutePath().startsWith(root.getAbsolutePath())) {
throw new IllegalArgumentException(file.getAbsolutePath() + " is not a parent of " + root.getAbsolutePath());
}
}
@Override
public String getPath() {
if (classpathFileResource) {
return file.getAbsolutePath().substring(root.getAbsolutePath().length() + 1);
} else {
return file.getPath();
}
}
@Override
public InputStream getInputStream() throws IOException {
return new FileInputStream(file);
}
}
|
package cucumber.runtime.io;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
class FileResource implements Resource {
private final File root;
private final File file;
private final boolean classpathFileResource;
static FileResource createFileResource(File root, File file) {
return new FileResource(root, file, false);
}
static FileResource createClasspathFileResource(File root, File file) {
return new FileResource(root, file, true);
}
private FileResource(File root, File file, boolean classpathFileResource) {
this.root = root;
this.file = file;
this.classpathFileResource = classpathFileResource;
if (!file.getAbsolutePath().startsWith(root.getAbsolutePath())) {
throw new IllegalArgumentException(file.getAbsolutePath() + " is not a parent of " + root.getAbsolutePath());
}
}
@Override
public String getPath() {
if (classpathFileResource) {
String relativePath = file.getAbsolutePath().substring(root.getAbsolutePath().length() + 1);
return relativePath.replace(File.separatorChar, '/');
} else {
return file.getPath();
}
}
@Override
public InputStream getInputStream() throws IOException {
return new FileInputStream(file);
}
}
|
Fix class loading on Windows
|
[Core] Fix class loading on Windows
Fixes: #1540
|
Java
|
mit
|
cucumber/cucumber-jvm,cucumber/cucumber-jvm,cucumber/cucumber-jvm,cucumber/cucumber-jvm,cucumber/cucumber-jvm
|
java
|
## Code Before:
package cucumber.runtime.io;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
class FileResource implements Resource {
private final File root;
private final File file;
private final boolean classpathFileResource;
static FileResource createFileResource(File root, File file) {
return new FileResource(root, file, false);
}
static FileResource createClasspathFileResource(File root, File file) {
return new FileResource(root, file, true);
}
private FileResource(File root, File file, boolean classpathFileResource) {
this.root = root;
this.file = file;
this.classpathFileResource = classpathFileResource;
if (!file.getAbsolutePath().startsWith(root.getAbsolutePath())) {
throw new IllegalArgumentException(file.getAbsolutePath() + " is not a parent of " + root.getAbsolutePath());
}
}
@Override
public String getPath() {
if (classpathFileResource) {
return file.getAbsolutePath().substring(root.getAbsolutePath().length() + 1);
} else {
return file.getPath();
}
}
@Override
public InputStream getInputStream() throws IOException {
return new FileInputStream(file);
}
}
## Instruction:
[Core] Fix class loading on Windows
Fixes: #1540
## Code After:
package cucumber.runtime.io;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
class FileResource implements Resource {
private final File root;
private final File file;
private final boolean classpathFileResource;
static FileResource createFileResource(File root, File file) {
return new FileResource(root, file, false);
}
static FileResource createClasspathFileResource(File root, File file) {
return new FileResource(root, file, true);
}
private FileResource(File root, File file, boolean classpathFileResource) {
this.root = root;
this.file = file;
this.classpathFileResource = classpathFileResource;
if (!file.getAbsolutePath().startsWith(root.getAbsolutePath())) {
throw new IllegalArgumentException(file.getAbsolutePath() + " is not a parent of " + root.getAbsolutePath());
}
}
@Override
public String getPath() {
if (classpathFileResource) {
String relativePath = file.getAbsolutePath().substring(root.getAbsolutePath().length() + 1);
return relativePath.replace(File.separatorChar, '/');
} else {
return file.getPath();
}
}
@Override
public InputStream getInputStream() throws IOException {
return new FileInputStream(file);
}
}
|
# ... existing code ...
@Override
public String getPath() {
if (classpathFileResource) {
String relativePath = file.getAbsolutePath().substring(root.getAbsolutePath().length() + 1);
return relativePath.replace(File.separatorChar, '/');
} else {
return file.getPath();
}
# ... rest of the code ...
|
e5803617b27144cb88563b3533b66f0b96482143
|
guv/green/time.py
|
guv/green/time.py
|
__time = __import__('time')
from ..patcher import slurp_properties
__patched__ = ['sleep']
slurp_properties(__time, globals(), ignore=__patched__, srckeys=dir(__time))
from ..greenthread import sleep
sleep # silence pyflakes
|
__time = __import__('time')
from ..patcher import slurp_properties
__patched__ = ['sleep']
slurp_properties(__time, globals(), ignore=__patched__, srckeys=dir(__time))
from .. import greenthread
sleep = greenthread.sleep
|
Declare sleep as a global instead of relying on import
|
Declare sleep as a global instead of relying on import
This is a nicer way to define it in the greenified module. Unused imports may
accidentally disappear after using your IDE's "optimize imports" function.
|
Python
|
mit
|
veegee/guv,veegee/guv
|
python
|
## Code Before:
__time = __import__('time')
from ..patcher import slurp_properties
__patched__ = ['sleep']
slurp_properties(__time, globals(), ignore=__patched__, srckeys=dir(__time))
from ..greenthread import sleep
sleep # silence pyflakes
## Instruction:
Declare sleep as a global instead of relying on import
This is a nicer way to define it in the greenified module. Unused imports may
accidentally disappear after using your IDE's "optimize imports" function.
## Code After:
__time = __import__('time')
from ..patcher import slurp_properties
__patched__ = ['sleep']
slurp_properties(__time, globals(), ignore=__patched__, srckeys=dir(__time))
from .. import greenthread
sleep = greenthread.sleep
|
...
__patched__ = ['sleep']
slurp_properties(__time, globals(), ignore=__patched__, srckeys=dir(__time))
from .. import greenthread
sleep = greenthread.sleep
...
|
b722fe0d5b84eeb5c9e7279679826ff5097bfd91
|
contentdensity/textifai/urls.py
|
contentdensity/textifai/urls.py
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^textinput', views.textinput, name='textinput'),
url(r'^featureoutput', views.featureoutput, name='featureoutput'),
url(r'^account', views.account, name='account'),
]
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^textinput', views.textinput, name='textinput'),
url(r'^featureoutput', views.featureoutput, name='featureoutput'),
url(r'^account', views.account, name='account'),
url(r'^general-insights$', views.general_insights, name='general-insights'),
]
|
Add URL mapping for general-insights page
|
Add URL mapping for general-insights page
|
Python
|
mit
|
CS326-important/space-deer,CS326-important/space-deer
|
python
|
## Code Before:
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^textinput', views.textinput, name='textinput'),
url(r'^featureoutput', views.featureoutput, name='featureoutput'),
url(r'^account', views.account, name='account'),
]
## Instruction:
Add URL mapping for general-insights page
## Code After:
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^textinput', views.textinput, name='textinput'),
url(r'^featureoutput', views.featureoutput, name='featureoutput'),
url(r'^account', views.account, name='account'),
url(r'^general-insights$', views.general_insights, name='general-insights'),
]
|
// ... existing code ...
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.index, name='index'),
// ... modified code ...
url(r'^textinput', views.textinput, name='textinput'),
url(r'^featureoutput', views.featureoutput, name='featureoutput'),
url(r'^account', views.account, name='account'),
url(r'^general-insights$', views.general_insights, name='general-insights'),
]
// ... rest of the code ...
|
27fe9d6531a2e76affd9388db53c0433062a9cfa
|
photonix/photos/management/commands/create_library.py
|
photonix/photos/management/commands/create_library.py
|
import os
from pathlib import Path
from django.contrib.auth import get_user_model
from django.core.management.base import BaseCommand
from django.db.utils import IntegrityError
from photonix.photos.models import Library, LibraryPath, LibraryUser
from photonix.photos.utils.db import record_photo
from photonix.photos.utils.fs import determine_destination, download_file
User = get_user_model()
class Command(BaseCommand):
help = 'Create a library for a user'
def create_library(self, username, library_name):
# Get user
user = User.objects.get(username=username)
# Create Library
library, _ = Library.objects.get_or_create(
name=library_name,
)
library_path, _ = LibraryPath.objects.get_or_create(
library=library,
type='St',
backend_type='Lo',
path='/data/photos/',
url='/photos/',
)
library_user, _ = LibraryUser.objects.get_or_create(
library=library,
user=user,
owner=True,
)
print(f'Library "{library_name}" created successfully for user "{username}"')
def add_arguments(self, parser):
# Positional arguments
parser.add_argument('username', nargs='+', type=str)
parser.add_argument('library_name', nargs='+', type=str)
def handle(self, *args, **options):
self.create_library(options['username'][0], options['library_name'][0])
|
import os
from pathlib import Path
from django.contrib.auth import get_user_model
from django.core.management.base import BaseCommand
from django.db.utils import IntegrityError
from photonix.photos.models import Library, LibraryPath, LibraryUser
from photonix.photos.utils.db import record_photo
from photonix.photos.utils.fs import determine_destination, download_file
User = get_user_model()
class Command(BaseCommand):
help = 'Create a library for a user'
def create_library(self, username, library_name, path):
# Get user
user = User.objects.get(username=username)
# Create Library
library, _ = Library.objects.get_or_create(
name=library_name,
)
library_path, _ = LibraryPath.objects.get_or_create(
library=library,
type='St',
backend_type='Lo',
path=path,
)
library_user, _ = LibraryUser.objects.get_or_create(
library=library,
user=user,
owner=True,
)
print(f'Library "{library_name}" with path "{path}" created successfully for user "{username}"')
def add_arguments(self, parser):
# Positional arguments
parser.add_argument('username', type=str)
parser.add_argument('library_name', type=str)
parser.add_argument('--path', type=str, default='/data/photos')
def handle(self, *args, **options):
self.create_library(options['username'], options['library_name'], options['path'])
|
Fix path cannot be set when creating new library
|
Fix path cannot be set when creating new library
|
Python
|
agpl-3.0
|
damianmoore/photo-manager,damianmoore/photo-manager,damianmoore/photo-manager,damianmoore/photo-manager
|
python
|
## Code Before:
import os
from pathlib import Path
from django.contrib.auth import get_user_model
from django.core.management.base import BaseCommand
from django.db.utils import IntegrityError
from photonix.photos.models import Library, LibraryPath, LibraryUser
from photonix.photos.utils.db import record_photo
from photonix.photos.utils.fs import determine_destination, download_file
User = get_user_model()
class Command(BaseCommand):
help = 'Create a library for a user'
def create_library(self, username, library_name):
# Get user
user = User.objects.get(username=username)
# Create Library
library, _ = Library.objects.get_or_create(
name=library_name,
)
library_path, _ = LibraryPath.objects.get_or_create(
library=library,
type='St',
backend_type='Lo',
path='/data/photos/',
url='/photos/',
)
library_user, _ = LibraryUser.objects.get_or_create(
library=library,
user=user,
owner=True,
)
print(f'Library "{library_name}" created successfully for user "{username}"')
def add_arguments(self, parser):
# Positional arguments
parser.add_argument('username', nargs='+', type=str)
parser.add_argument('library_name', nargs='+', type=str)
def handle(self, *args, **options):
self.create_library(options['username'][0], options['library_name'][0])
## Instruction:
Fix path cannot be set when creating new library
## Code After:
import os
from pathlib import Path
from django.contrib.auth import get_user_model
from django.core.management.base import BaseCommand
from django.db.utils import IntegrityError
from photonix.photos.models import Library, LibraryPath, LibraryUser
from photonix.photos.utils.db import record_photo
from photonix.photos.utils.fs import determine_destination, download_file
User = get_user_model()
class Command(BaseCommand):
help = 'Create a library for a user'
def create_library(self, username, library_name, path):
# Get user
user = User.objects.get(username=username)
# Create Library
library, _ = Library.objects.get_or_create(
name=library_name,
)
library_path, _ = LibraryPath.objects.get_or_create(
library=library,
type='St',
backend_type='Lo',
path=path,
)
library_user, _ = LibraryUser.objects.get_or_create(
library=library,
user=user,
owner=True,
)
print(f'Library "{library_name}" with path "{path}" created successfully for user "{username}"')
def add_arguments(self, parser):
# Positional arguments
parser.add_argument('username', type=str)
parser.add_argument('library_name', type=str)
parser.add_argument('--path', type=str, default='/data/photos')
def handle(self, *args, **options):
self.create_library(options['username'], options['library_name'], options['path'])
|
# ... existing code ...
class Command(BaseCommand):
help = 'Create a library for a user'
def create_library(self, username, library_name, path):
# Get user
user = User.objects.get(username=username)
# Create Library
# ... modified code ...
library=library,
type='St',
backend_type='Lo',
path=path,
)
library_user, _ = LibraryUser.objects.get_or_create(
library=library,
...
owner=True,
)
print(f'Library "{library_name}" with path "{path}" created successfully for user "{username}"')
def add_arguments(self, parser):
# Positional arguments
parser.add_argument('username', type=str)
parser.add_argument('library_name', type=str)
parser.add_argument('--path', type=str, default='/data/photos')
def handle(self, *args, **options):
self.create_library(options['username'], options['library_name'], options['path'])
# ... rest of the code ...
|
b8e9a2af61e1b8fe45e32966495e46357a145a56
|
dom/automation/detect_assertions.py
|
dom/automation/detect_assertions.py
|
def amiss(logPrefix):
global ignoreList
foundSomething = False
currentFile = file(logPrefix + "-err", "r")
# map from (assertion message) to (true, if seen in the current file)
seenInCurrentFile = {}
for line in currentFile:
line = line.strip("\x07").rstrip("\n")
if (line.startswith("###!!!") and not (line in seenInCurrentFile)):
seenInCurrentFile[line] = True
if not (ignore(line)):
print line
foundSomething = True
currentFile.close()
return foundSomething
def getIgnores():
global ignoreList
ignoreFile = open("known_assertions.txt", "r")
for line in ignoreFile:
line = line.strip()
if ((len(line) > 0) and not line.startswith("#")):
ignoreList.append(line)
def ignore(assertion):
global ignoreList
for ig in ignoreList:
if (assertion.find(ig) != -1):
return True
return False
ignoreList = []
getIgnores()
# print "detect_assertions is ready (ignoring %d assertions)" % len(ignoreList)
|
import platform
def amiss(logPrefix):
global ignoreList
foundSomething = False
currentFile = file(logPrefix + "-err", "r")
# map from (assertion message) to (true, if seen in the current file)
seenInCurrentFile = {}
for line in currentFile:
line = line.strip("\x07").rstrip("\n")
if (line.startswith("###!!!") and not (line in seenInCurrentFile)):
seenInCurrentFile[line] = True
if not (ignore(line)):
print line
foundSomething = True
currentFile.close()
return foundSomething
def getIgnores():
global simpleIgnoreList
ignoreFile = open("known_assertions.txt", "r")
for line in ignoreFile:
line = line.strip()
if ((len(line) > 0) and not line.startswith("#")):
mpi = line.find(", file ") # assertions use this format
if (mpi == -1):
mpi = line.find(": file ") # aborts use this format
if (mpi == -1):
simpleIgnoreList.append(line)
else:
twoPartIgnoreList.append((line[:mpi+7], localSlashes(line[mpi+7:])))
def localSlashes(s):
if platform.system() in ('Windows', 'Microsoft'):
return s.replace("\\", "/")
return s
def ignore(assertion):
global simpleIgnoreList
for ig in simpleIgnoreList:
if assertion.find(ig) != -1:
return True
for (part1, part2) in twoPartIgnoreList:
if assertion.find(part1) != -1 and assertion.find(part2) != -1:
return True
return False
simpleIgnoreList = []
twoPartIgnoreList = []
getIgnores()
#print "detect_assertions is ready (ignoring %d strings without filenames and %d strings with filenames)" % (len(simpleIgnoreList), len(twoPartIgnoreList))
|
Make known_assertions.txt cross-machine and hopefully also cross-platform.
|
Make known_assertions.txt cross-machine and hopefully also cross-platform.
|
Python
|
mpl-2.0
|
nth10sd/funfuzz,MozillaSecurity/funfuzz,MozillaSecurity/funfuzz,nth10sd/funfuzz,MozillaSecurity/funfuzz,nth10sd/funfuzz
|
python
|
## Code Before:
def amiss(logPrefix):
global ignoreList
foundSomething = False
currentFile = file(logPrefix + "-err", "r")
# map from (assertion message) to (true, if seen in the current file)
seenInCurrentFile = {}
for line in currentFile:
line = line.strip("\x07").rstrip("\n")
if (line.startswith("###!!!") and not (line in seenInCurrentFile)):
seenInCurrentFile[line] = True
if not (ignore(line)):
print line
foundSomething = True
currentFile.close()
return foundSomething
def getIgnores():
global ignoreList
ignoreFile = open("known_assertions.txt", "r")
for line in ignoreFile:
line = line.strip()
if ((len(line) > 0) and not line.startswith("#")):
ignoreList.append(line)
def ignore(assertion):
global ignoreList
for ig in ignoreList:
if (assertion.find(ig) != -1):
return True
return False
ignoreList = []
getIgnores()
# print "detect_assertions is ready (ignoring %d assertions)" % len(ignoreList)
## Instruction:
Make known_assertions.txt cross-machine and hopefully also cross-platform.
## Code After:
import platform
def amiss(logPrefix):
global ignoreList
foundSomething = False
currentFile = file(logPrefix + "-err", "r")
# map from (assertion message) to (true, if seen in the current file)
seenInCurrentFile = {}
for line in currentFile:
line = line.strip("\x07").rstrip("\n")
if (line.startswith("###!!!") and not (line in seenInCurrentFile)):
seenInCurrentFile[line] = True
if not (ignore(line)):
print line
foundSomething = True
currentFile.close()
return foundSomething
def getIgnores():
global simpleIgnoreList
ignoreFile = open("known_assertions.txt", "r")
for line in ignoreFile:
line = line.strip()
if ((len(line) > 0) and not line.startswith("#")):
mpi = line.find(", file ") # assertions use this format
if (mpi == -1):
mpi = line.find(": file ") # aborts use this format
if (mpi == -1):
simpleIgnoreList.append(line)
else:
twoPartIgnoreList.append((line[:mpi+7], localSlashes(line[mpi+7:])))
def localSlashes(s):
if platform.system() in ('Windows', 'Microsoft'):
return s.replace("\\", "/")
return s
def ignore(assertion):
global simpleIgnoreList
for ig in simpleIgnoreList:
if assertion.find(ig) != -1:
return True
for (part1, part2) in twoPartIgnoreList:
if assertion.find(part1) != -1 and assertion.find(part2) != -1:
return True
return False
simpleIgnoreList = []
twoPartIgnoreList = []
getIgnores()
#print "detect_assertions is ready (ignoring %d strings without filenames and %d strings with filenames)" % (len(simpleIgnoreList), len(twoPartIgnoreList))
|
# ... existing code ...
import platform
def amiss(logPrefix):
global ignoreList
# ... modified code ...
def getIgnores():
global simpleIgnoreList
ignoreFile = open("known_assertions.txt", "r")
for line in ignoreFile:
line = line.strip()
if ((len(line) > 0) and not line.startswith("#")):
mpi = line.find(", file ") # assertions use this format
if (mpi == -1):
mpi = line.find(": file ") # aborts use this format
if (mpi == -1):
simpleIgnoreList.append(line)
else:
twoPartIgnoreList.append((line[:mpi+7], localSlashes(line[mpi+7:])))
def localSlashes(s):
if platform.system() in ('Windows', 'Microsoft'):
return s.replace("\\", "/")
return s
def ignore(assertion):
global simpleIgnoreList
for ig in simpleIgnoreList:
if assertion.find(ig) != -1:
return True
for (part1, part2) in twoPartIgnoreList:
if assertion.find(part1) != -1 and assertion.find(part2) != -1:
return True
return False
simpleIgnoreList = []
twoPartIgnoreList = []
getIgnores()
#print "detect_assertions is ready (ignoring %d strings without filenames and %d strings with filenames)" % (len(simpleIgnoreList), len(twoPartIgnoreList))
# ... rest of the code ...
|
adb81eb0d2f7587a721d6ab95c4912e778b84bcc
|
javaee/src/test/java/com/ticketmanor/ui/jsf/LocalDateTimeJsfConverterTest.java
|
javaee/src/test/java/com/ticketmanor/ui/jsf/LocalDateTimeJsfConverterTest.java
|
package com.ticketmanor.ui.jsf;
import static org.junit.Assert.*;
import java.time.LocalDateTime;
import org.junit.Before;
import org.junit.Test;
public class LocalDateTimeJsfConverterTest {
LocalDateTimeJsfConverter target;
final String stringDate = "June 1, 2015 7:30 PM";
final LocalDateTime localDateTime = LocalDateTime.of(2015, 06, 01, 19, 30, 56, 0);
@Before
public void setUp() throws Exception {
target = new LocalDateTimeJsfConverter();
}
@Test
public void testGetAsObject() {
final Object asObject = target.getAsObject(null, null, stringDate);
System.out.println(asObject);
}
@Test
public void testGetAsString() {
final String asString = target.getAsString(null, null, localDateTime);
System.out.println(asString);
assertEquals(stringDate, asString);
}
}
|
package com.ticketmanor.ui.jsf;
import static org.junit.Assert.*;
import java.time.LocalDateTime;
import org.junit.Before;
import org.junit.Test;
public class LocalDateTimeJsfConverterTest {
LocalDateTimeJsfConverter target;
final String stringDate = "June 1, 2015 7:30 PM";
final LocalDateTime localDateTime = LocalDateTime.of(2015, 06, 01, 19, 30, 56, 0);
@Before
public void setUp() throws Exception {
target = new LocalDateTimeJsfConverter();
}
@Test
public void testGetAsObject() {
LocalDateTime asObject = (LocalDateTime) target.getAsObject(null, null, stringDate);
System.out.println("Converted to LocalDateTime: " + asObject);
assertEquals(2015, asObject.getYear());
assertEquals(19, asObject.getHour());
}
@Test
public void testGetAsString() {
final String asString = target.getAsString(null, null, localDateTime);
System.out.println("Converted to String" + asString);
assertEquals(stringDate, asString);
}
}
|
Add asserts to test method.
|
Add asserts to test method.
|
Java
|
mit
|
LearningTree/TicketManorJava,LearningTree/TicketManorJava,LearningTree/TicketManorJava,LearningTree/TicketManorJava
|
java
|
## Code Before:
package com.ticketmanor.ui.jsf;
import static org.junit.Assert.*;
import java.time.LocalDateTime;
import org.junit.Before;
import org.junit.Test;
public class LocalDateTimeJsfConverterTest {
LocalDateTimeJsfConverter target;
final String stringDate = "June 1, 2015 7:30 PM";
final LocalDateTime localDateTime = LocalDateTime.of(2015, 06, 01, 19, 30, 56, 0);
@Before
public void setUp() throws Exception {
target = new LocalDateTimeJsfConverter();
}
@Test
public void testGetAsObject() {
final Object asObject = target.getAsObject(null, null, stringDate);
System.out.println(asObject);
}
@Test
public void testGetAsString() {
final String asString = target.getAsString(null, null, localDateTime);
System.out.println(asString);
assertEquals(stringDate, asString);
}
}
## Instruction:
Add asserts to test method.
## Code After:
package com.ticketmanor.ui.jsf;
import static org.junit.Assert.*;
import java.time.LocalDateTime;
import org.junit.Before;
import org.junit.Test;
public class LocalDateTimeJsfConverterTest {
LocalDateTimeJsfConverter target;
final String stringDate = "June 1, 2015 7:30 PM";
final LocalDateTime localDateTime = LocalDateTime.of(2015, 06, 01, 19, 30, 56, 0);
@Before
public void setUp() throws Exception {
target = new LocalDateTimeJsfConverter();
}
@Test
public void testGetAsObject() {
LocalDateTime asObject = (LocalDateTime) target.getAsObject(null, null, stringDate);
System.out.println("Converted to LocalDateTime: " + asObject);
assertEquals(2015, asObject.getYear());
assertEquals(19, asObject.getHour());
}
@Test
public void testGetAsString() {
final String asString = target.getAsString(null, null, localDateTime);
System.out.println("Converted to String" + asString);
assertEquals(stringDate, asString);
}
}
|
# ... existing code ...
@Test
public void testGetAsObject() {
LocalDateTime asObject = (LocalDateTime) target.getAsObject(null, null, stringDate);
System.out.println("Converted to LocalDateTime: " + asObject);
assertEquals(2015, asObject.getYear());
assertEquals(19, asObject.getHour());
}
@Test
public void testGetAsString() {
final String asString = target.getAsString(null, null, localDateTime);
System.out.println("Converted to String" + asString);
assertEquals(stringDate, asString);
}
}
# ... rest of the code ...
|
122bac32131b90b46673c6895dada3f01018f52b
|
setup.py
|
setup.py
|
import os.path as op
import re
from setuptools import setup
def read(name, only_open=False):
f = open(op.join(op.dirname(__file__), name))
return f if only_open else f.read()
ext_version = None
with read('flask_json.py', only_open=True) as f:
for line in f:
if line.startswith('__version__'):
ext_version, = re.findall(r"__version__\W*=\W*'([^']+)'", line)
break
setup(
name='Flask-JSON',
version=ext_version,
url='https://github.com/skozlovf/flask-json',
license='BSD',
author='Sergey Kozlov',
author_email='[email protected]',
description='Better JSON support for Flask',
long_description=read('README.rst'),
py_modules=['flask_json'],
zip_safe=False,
include_package_data=True,
platforms='any',
install_requires=['Flask>=0.10'],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
],
test_suite='nose.collector'
)
|
import os.path as op
import re
from setuptools import setup
def read(name, only_open=False):
f = open(op.join(op.dirname(__file__), name))
return f if only_open else f.read()
ext_version = None
with read('flask_json.py', only_open=True) as f:
for line in f:
if line.startswith('__version__'):
ext_version, = re.findall(r"__version__\W*=\W*'([^']+)'", line)
break
setup(
name='Flask-JSON',
version=ext_version,
url='https://github.com/skozlovf/flask-json',
license='BSD',
author='Sergey Kozlov',
author_email='[email protected]',
description='Better JSON support for Flask',
long_description=read('README.rst'),
py_modules=['flask_json'],
zip_safe=False,
include_package_data=True,
platforms='any',
install_requires=['Flask>=0.10'],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
],
tests_require=['nose>=1.0'],
test_suite='nose.collector'
)
|
Add 'nose' to required packages for testing.
|
Add 'nose' to required packages for testing.
|
Python
|
bsd-3-clause
|
craig552uk/flask-json
|
python
|
## Code Before:
import os.path as op
import re
from setuptools import setup
def read(name, only_open=False):
f = open(op.join(op.dirname(__file__), name))
return f if only_open else f.read()
ext_version = None
with read('flask_json.py', only_open=True) as f:
for line in f:
if line.startswith('__version__'):
ext_version, = re.findall(r"__version__\W*=\W*'([^']+)'", line)
break
setup(
name='Flask-JSON',
version=ext_version,
url='https://github.com/skozlovf/flask-json',
license='BSD',
author='Sergey Kozlov',
author_email='[email protected]',
description='Better JSON support for Flask',
long_description=read('README.rst'),
py_modules=['flask_json'],
zip_safe=False,
include_package_data=True,
platforms='any',
install_requires=['Flask>=0.10'],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
],
test_suite='nose.collector'
)
## Instruction:
Add 'nose' to required packages for testing.
## Code After:
import os.path as op
import re
from setuptools import setup
def read(name, only_open=False):
f = open(op.join(op.dirname(__file__), name))
return f if only_open else f.read()
ext_version = None
with read('flask_json.py', only_open=True) as f:
for line in f:
if line.startswith('__version__'):
ext_version, = re.findall(r"__version__\W*=\W*'([^']+)'", line)
break
setup(
name='Flask-JSON',
version=ext_version,
url='https://github.com/skozlovf/flask-json',
license='BSD',
author='Sergey Kozlov',
author_email='[email protected]',
description='Better JSON support for Flask',
long_description=read('README.rst'),
py_modules=['flask_json'],
zip_safe=False,
include_package_data=True,
platforms='any',
install_requires=['Flask>=0.10'],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
],
tests_require=['nose>=1.0'],
test_suite='nose.collector'
)
|
...
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
],
tests_require=['nose>=1.0'],
test_suite='nose.collector'
)
...
|
46972788b2f4c3b3ac79e2d2fb9b8dd6a3834148
|
src/yunohost/utils/error.py
|
src/yunohost/utils/error.py
|
from moulinette.core import MoulinetteError
from moulinette import m18n
class YunohostError(MoulinetteError):
"""Yunohost base exception"""
def __init__(self, key, __raw_msg__=False, *args, **kwargs):
if __raw_msg__:
msg = key
else:
msg = m18n.n(key, *args, **kwargs)
super(YunohostError, self).__init__(msg, __raw_msg__=True)
|
from moulinette.core import MoulinetteError
from moulinette import m18n
class YunohostError(MoulinetteError):
"""
Yunohost base exception
The (only?) main difference with MoulinetteError being that keys
are translated via m18n.n (namespace) instead of m18n.g (global?)
"""
def __init__(self, key, __raw_msg__=False, *args, **kwargs):
if __raw_msg__:
msg = key
else:
msg = m18n.n(key, *args, **kwargs)
super(YunohostError, self).__init__(msg, __raw_msg__=True)
|
Add comment about the motivation behind YunohostError
|
Add comment about the motivation behind YunohostError
|
Python
|
agpl-3.0
|
YunoHost/yunohost,YunoHost/yunohost,YunoHost/moulinette-yunohost,YunoHost/moulinette-yunohost,YunoHost/yunohost,YunoHost/moulinette-yunohost,YunoHost/yunohost,YunoHost/moulinette-yunohost,YunoHost/moulinette-yunohost
|
python
|
## Code Before:
from moulinette.core import MoulinetteError
from moulinette import m18n
class YunohostError(MoulinetteError):
"""Yunohost base exception"""
def __init__(self, key, __raw_msg__=False, *args, **kwargs):
if __raw_msg__:
msg = key
else:
msg = m18n.n(key, *args, **kwargs)
super(YunohostError, self).__init__(msg, __raw_msg__=True)
## Instruction:
Add comment about the motivation behind YunohostError
## Code After:
from moulinette.core import MoulinetteError
from moulinette import m18n
class YunohostError(MoulinetteError):
"""
Yunohost base exception
The (only?) main difference with MoulinetteError being that keys
are translated via m18n.n (namespace) instead of m18n.g (global?)
"""
def __init__(self, key, __raw_msg__=False, *args, **kwargs):
if __raw_msg__:
msg = key
else:
msg = m18n.n(key, *args, **kwargs)
super(YunohostError, self).__init__(msg, __raw_msg__=True)
|
...
from moulinette import m18n
class YunohostError(MoulinetteError):
"""
Yunohost base exception
The (only?) main difference with MoulinetteError being that keys
are translated via m18n.n (namespace) instead of m18n.g (global?)
"""
def __init__(self, key, __raw_msg__=False, *args, **kwargs):
if __raw_msg__:
msg = key
...
|
f25c0074a013255141371b46cff0a506ad0b2ab5
|
axiom/__init__.py
|
axiom/__init__.py
|
from axiom._version import __version__
from epsilon import asTwistedVersion
version = asTwistedVersion("axiom", __version__)
|
from axiom._version import __version__
from twisted.python import versions
def asTwistedVersion(packageName, versionString):
return versions.Version(packageName, *map(int, versionString.split(".")))
version = asTwistedVersion("axiom", __version__)
|
Add a local asTwistedVersion implementation to Axiom, so as to not add another setup-time dependency on Epsilon
|
Add a local asTwistedVersion implementation to Axiom, so as to not add another setup-time dependency on Epsilon
|
Python
|
mit
|
hawkowl/axiom,twisted/axiom
|
python
|
## Code Before:
from axiom._version import __version__
from epsilon import asTwistedVersion
version = asTwistedVersion("axiom", __version__)
## Instruction:
Add a local asTwistedVersion implementation to Axiom, so as to not add another setup-time dependency on Epsilon
## Code After:
from axiom._version import __version__
from twisted.python import versions
def asTwistedVersion(packageName, versionString):
return versions.Version(packageName, *map(int, versionString.split(".")))
version = asTwistedVersion("axiom", __version__)
|
...
from axiom._version import __version__
from twisted.python import versions
def asTwistedVersion(packageName, versionString):
return versions.Version(packageName, *map(int, versionString.split(".")))
version = asTwistedVersion("axiom", __version__)
...
|
df3e3d9524b7f21e36cd01ee9e8b2a0312bfa489
|
src/foam/core/ContextAgentRunnable.java
|
src/foam/core/ContextAgentRunnable.java
|
/**
* @license
* Copyright 2019 The FOAM Authors. All Rights Reserved.
* http://www.apache.org/licenses/LICENSE-2.0
*/
package foam.core;
public class ContextAgentRunnable
implements Runnable
{
final X x_;
final ContextAgent agent_;
final String description_;
public ContextAgentRunnable(X x, ContextAgent agent, String description) {
x_ = x;
agent_ = agent;
description_ = description;
}
public String toString() {
return description_;
}
public void run() {
agent_.execute(x_);
}
}
|
/**
* @license
* Copyright 2019 The FOAM Authors. All Rights Reserved.
* http://www.apache.org/licenses/LICENSE-2.0
*/
package foam.core;
public class ContextAgentRunnable
implements Runnable
{
final X x_;
final ContextAgent agent_;
final String description_;
public ContextAgentRunnable(X x, ContextAgent agent, String description) {
x_ = x;
agent_ = agent;
description_ = description;
}
public String toString() {
return description_;
}
public void run() {
XLocator.set(x_);
try {
agent_.execute(x_);
} finally {
XLocator.set(null);
}
}
}
|
Fix XLocator when from from AssemblyLine.
|
Fix XLocator when from from AssemblyLine.
|
Java
|
apache-2.0
|
foam-framework/foam2,foam-framework/foam2,jacksonic/vjlofvhjfgm,foam-framework/foam2,foam-framework/foam2,foam-framework/foam2,jacksonic/vjlofvhjfgm,jacksonic/vjlofvhjfgm
|
java
|
## Code Before:
/**
* @license
* Copyright 2019 The FOAM Authors. All Rights Reserved.
* http://www.apache.org/licenses/LICENSE-2.0
*/
package foam.core;
public class ContextAgentRunnable
implements Runnable
{
final X x_;
final ContextAgent agent_;
final String description_;
public ContextAgentRunnable(X x, ContextAgent agent, String description) {
x_ = x;
agent_ = agent;
description_ = description;
}
public String toString() {
return description_;
}
public void run() {
agent_.execute(x_);
}
}
## Instruction:
Fix XLocator when from from AssemblyLine.
## Code After:
/**
* @license
* Copyright 2019 The FOAM Authors. All Rights Reserved.
* http://www.apache.org/licenses/LICENSE-2.0
*/
package foam.core;
public class ContextAgentRunnable
implements Runnable
{
final X x_;
final ContextAgent agent_;
final String description_;
public ContextAgentRunnable(X x, ContextAgent agent, String description) {
x_ = x;
agent_ = agent;
description_ = description;
}
public String toString() {
return description_;
}
public void run() {
XLocator.set(x_);
try {
agent_.execute(x_);
} finally {
XLocator.set(null);
}
}
}
|
...
}
public void run() {
XLocator.set(x_);
try {
agent_.execute(x_);
} finally {
XLocator.set(null);
}
}
}
...
|
5723bf446c91e3b2892c5d2f72a8f59535da01ec
|
f-tep-api/src/main/java/com/cgi/eoss/ftep/model/projections/DetailedFtepService.java
|
f-tep-api/src/main/java/com/cgi/eoss/ftep/model/projections/DetailedFtepService.java
|
package com.cgi.eoss.ftep.model.projections;
import com.cgi.eoss.ftep.security.FtepAccess;
import com.cgi.eoss.ftep.model.FtepService;
import com.cgi.eoss.ftep.model.FtepServiceDescriptor;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.data.rest.core.config.Projection;
import org.springframework.hateoas.Identifiable;
/**
* <p>Comprehensive representation of an FtepService entity, including the full description of input and output fields, for embedding in REST
* responses.</p>
*/
@Projection(name = "detailedFtepService", types = FtepService.class)
public interface DetailedFtepService extends Identifiable<Long> {
String getName();
String getDescription();
ShortUser getOwner();
FtepService.Type getType();
String getDockerTag();
FtepService.Licence getLicence();
FtepService.Status getStatus();
FtepServiceDescriptor getServiceDescriptor();
@Value("#{@ftepSecurityService.getCurrentAccess(T(com.cgi.eoss.ftep.model.FtepService), target.id)}")
FtepAccess getAccess();
}
|
package com.cgi.eoss.ftep.model.projections;
import com.cgi.eoss.ftep.security.FtepAccess;
import com.cgi.eoss.ftep.model.FtepService;
import com.cgi.eoss.ftep.model.FtepServiceDescriptor;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.data.rest.core.config.Projection;
import org.springframework.hateoas.Identifiable;
/**
* <p>Comprehensive representation of an FtepService entity, including the full description of input and output fields, for embedding in REST
* responses.</p>
*/
@Projection(name = "detailedFtepService", types = FtepService.class)
public interface DetailedFtepService extends Identifiable<Long> {
String getName();
String getDescription();
ShortUser getOwner();
FtepService.Type getType();
String getDockerTag();
FtepService.Licence getLicence();
FtepService.Status getStatus();
String getApplicationPort();
FtepServiceDescriptor getServiceDescriptor();
@Value("#{@ftepSecurityService.getCurrentAccess(T(com.cgi.eoss.ftep.model.FtepService), target.id)}")
FtepAccess getAccess();
}
|
Add applicationPort to detailed FtepService projection
|
Add applicationPort to detailed FtepService projection
Change-Id: Ib4f954fef140540590cc49120d0100d2d1f46562
|
Java
|
agpl-3.0
|
cgi-eoss/ftep,cgi-eoss/ftep,cgi-eoss/ftep,cgi-eoss/ftep,cgi-eoss/ftep,cgi-eoss/ftep,cgi-eoss/ftep
|
java
|
## Code Before:
package com.cgi.eoss.ftep.model.projections;
import com.cgi.eoss.ftep.security.FtepAccess;
import com.cgi.eoss.ftep.model.FtepService;
import com.cgi.eoss.ftep.model.FtepServiceDescriptor;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.data.rest.core.config.Projection;
import org.springframework.hateoas.Identifiable;
/**
* <p>Comprehensive representation of an FtepService entity, including the full description of input and output fields, for embedding in REST
* responses.</p>
*/
@Projection(name = "detailedFtepService", types = FtepService.class)
public interface DetailedFtepService extends Identifiable<Long> {
String getName();
String getDescription();
ShortUser getOwner();
FtepService.Type getType();
String getDockerTag();
FtepService.Licence getLicence();
FtepService.Status getStatus();
FtepServiceDescriptor getServiceDescriptor();
@Value("#{@ftepSecurityService.getCurrentAccess(T(com.cgi.eoss.ftep.model.FtepService), target.id)}")
FtepAccess getAccess();
}
## Instruction:
Add applicationPort to detailed FtepService projection
Change-Id: Ib4f954fef140540590cc49120d0100d2d1f46562
## Code After:
package com.cgi.eoss.ftep.model.projections;
import com.cgi.eoss.ftep.security.FtepAccess;
import com.cgi.eoss.ftep.model.FtepService;
import com.cgi.eoss.ftep.model.FtepServiceDescriptor;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.data.rest.core.config.Projection;
import org.springframework.hateoas.Identifiable;
/**
* <p>Comprehensive representation of an FtepService entity, including the full description of input and output fields, for embedding in REST
* responses.</p>
*/
@Projection(name = "detailedFtepService", types = FtepService.class)
public interface DetailedFtepService extends Identifiable<Long> {
String getName();
String getDescription();
ShortUser getOwner();
FtepService.Type getType();
String getDockerTag();
FtepService.Licence getLicence();
FtepService.Status getStatus();
String getApplicationPort();
FtepServiceDescriptor getServiceDescriptor();
@Value("#{@ftepSecurityService.getCurrentAccess(T(com.cgi.eoss.ftep.model.FtepService), target.id)}")
FtepAccess getAccess();
}
|
# ... existing code ...
String getDockerTag();
FtepService.Licence getLicence();
FtepService.Status getStatus();
String getApplicationPort();
FtepServiceDescriptor getServiceDescriptor();
@Value("#{@ftepSecurityService.getCurrentAccess(T(com.cgi.eoss.ftep.model.FtepService), target.id)}")
FtepAccess getAccess();
# ... rest of the code ...
|
73af0eed3ce746154b957af5c05137f9e432c7a3
|
tests/test_pkgmanifest.py
|
tests/test_pkgmanifest.py
|
import requests
from platformio.util import get_api_result
def pytest_generate_tests(metafunc):
if "package_data" not in metafunc.fixturenames:
return
pkgs_manifest = get_api_result("/packages")
assert isinstance(pkgs_manifest, dict)
packages = []
for _, variants in pkgs_manifest.iteritems():
for item in variants:
packages.append(item)
metafunc.parametrize("package_data", packages)
def validate_response(req):
assert req.status_code == 200
assert int(req.headers['Content-Length']) > 0
def validate_package(url):
r = requests.head(url, allow_redirects=True)
validate_response(r)
assert r.headers['Content-Type'] == "application/x-gzip"
def test_package(package_data):
assert package_data['url'].endswith("%d.tar.gz" % package_data['version'])
validate_package(package_data['url'])
|
import requests
from platformio.util import get_api_result
def pytest_generate_tests(metafunc):
if "package_data" not in metafunc.fixturenames:
return
pkgs_manifest = get_api_result("/packages")
assert isinstance(pkgs_manifest, dict)
packages = []
for _, variants in pkgs_manifest.iteritems():
for item in variants:
packages.append(item)
metafunc.parametrize("package_data", packages)
def validate_response(req):
assert req.status_code == 200
assert int(req.headers['Content-Length']) > 0
def validate_package(url):
r = requests.head(url, allow_redirects=True)
validate_response(r)
assert r.headers['Content-Type'] in ("application/x-gzip",
"application/octet-stream")
def test_package(package_data):
assert package_data['url'].endswith("%d.tar.gz" % package_data['version'])
validate_package(package_data['url'])
|
Add "application/octet-stream" mime type for package
|
Add "application/octet-stream" mime type for package
|
Python
|
apache-2.0
|
bkudria/platformio,awong1900/platformio,bkudria/platformio,platformio/platformio,jrobeson/platformio,platformio/platformio-core,TimJay/platformio,jrobeson/platformio,eiginn/platformio,platformio/platformio-core,bkudria/platformio,TimJay/platformio,TimJay/platformio,awong1900/platformio,valeros/platformio,ZachMassia/platformio,TimJay/platformio,mseroczynski/platformio,awong1900/platformio,jrobeson/platformio,mcanthony/platformio,dkuku/platformio,mplewis/platformio,TimJay/platformio,bkudria/platformio,jrobeson/platformio,atyenoria/platformio
|
python
|
## Code Before:
import requests
from platformio.util import get_api_result
def pytest_generate_tests(metafunc):
if "package_data" not in metafunc.fixturenames:
return
pkgs_manifest = get_api_result("/packages")
assert isinstance(pkgs_manifest, dict)
packages = []
for _, variants in pkgs_manifest.iteritems():
for item in variants:
packages.append(item)
metafunc.parametrize("package_data", packages)
def validate_response(req):
assert req.status_code == 200
assert int(req.headers['Content-Length']) > 0
def validate_package(url):
r = requests.head(url, allow_redirects=True)
validate_response(r)
assert r.headers['Content-Type'] == "application/x-gzip"
def test_package(package_data):
assert package_data['url'].endswith("%d.tar.gz" % package_data['version'])
validate_package(package_data['url'])
## Instruction:
Add "application/octet-stream" mime type for package
## Code After:
import requests
from platformio.util import get_api_result
def pytest_generate_tests(metafunc):
if "package_data" not in metafunc.fixturenames:
return
pkgs_manifest = get_api_result("/packages")
assert isinstance(pkgs_manifest, dict)
packages = []
for _, variants in pkgs_manifest.iteritems():
for item in variants:
packages.append(item)
metafunc.parametrize("package_data", packages)
def validate_response(req):
assert req.status_code == 200
assert int(req.headers['Content-Length']) > 0
def validate_package(url):
r = requests.head(url, allow_redirects=True)
validate_response(r)
assert r.headers['Content-Type'] in ("application/x-gzip",
"application/octet-stream")
def test_package(package_data):
assert package_data['url'].endswith("%d.tar.gz" % package_data['version'])
validate_package(package_data['url'])
|
// ... existing code ...
def validate_package(url):
r = requests.head(url, allow_redirects=True)
validate_response(r)
assert r.headers['Content-Type'] in ("application/x-gzip",
"application/octet-stream")
def test_package(package_data):
// ... rest of the code ...
|
c4258c565dba86e07d53e897deefa4b085b6c820
|
sandbox/src/test/java/ru/stqa/pft/sandbox/EquationTests.java
|
sandbox/src/test/java/ru/stqa/pft/sandbox/EquationTests.java
|
package ru.stqa.pft.sandbox;
import org.testng.Assert;
import org.testng.annotations.Test;
public class EquationTests {
@Test
public void test0() {
Equation e = new Equation(1,1,1);
// Assert.assertEquals(e.rootNumber(), 0);
Assert.assertEquals(e.rootNumber(), 1);
}
@Test
public void test1() {
Equation e = new Equation(1,2,1);
Assert.assertEquals(e.rootNumber(), 1);
}
@Test
public void test3() {
Equation e = new Equation(1,5,6);
Assert.assertEquals(e.rootNumber(), 2);
}
@Test
public void testLinear() {
Equation e = new Equation(0,1,1);
Assert.assertEquals(e.rootNumber(), 1);
}
@Test
public void testConstant() {
Equation e = new Equation(0,0,1);
Assert.assertEquals(e.rootNumber(), 0);
}
@Test
public void testZero() {
Equation e = new Equation(0,0,0);
Assert.assertEquals(e.rootNumber(), -1);
}
}
|
package ru.stqa.pft.sandbox;
import org.testng.Assert;
import org.testng.annotations.Test;
public class EquationTests {
@Test
public void test0() {
Equation e = new Equation(1,1,1);
Assert.assertEquals(e.rootNumber(), 0);
}
@Test
public void test1() {
Equation e = new Equation(1,2,1);
Assert.assertEquals(e.rootNumber(), 1);
}
@Test
public void test3() {
Equation e = new Equation(1,5,6);
Assert.assertEquals(e.rootNumber(), 2);
}
@Test
public void testLinear() {
Equation e = new Equation(0,1,1);
Assert.assertEquals(e.rootNumber(), 1);
}
@Test
public void testConstant() {
Equation e = new Equation(0,0,1);
Assert.assertEquals(e.rootNumber(), 0);
}
@Test
public void testZero() {
Equation e = new Equation(0,0,0);
Assert.assertEquals(e.rootNumber(), -1);
}
}
|
Revert "Испорчен тест для проверки Jenkins"
|
Revert "Испорчен тест для проверки Jenkins"
This reverts commit f7ee8b4aa9bd893b6d00fff3f17922bd9673bc63.
|
Java
|
apache-2.0
|
thedsv/java_pft,thedsv/java_pft
|
java
|
## Code Before:
package ru.stqa.pft.sandbox;
import org.testng.Assert;
import org.testng.annotations.Test;
public class EquationTests {
@Test
public void test0() {
Equation e = new Equation(1,1,1);
// Assert.assertEquals(e.rootNumber(), 0);
Assert.assertEquals(e.rootNumber(), 1);
}
@Test
public void test1() {
Equation e = new Equation(1,2,1);
Assert.assertEquals(e.rootNumber(), 1);
}
@Test
public void test3() {
Equation e = new Equation(1,5,6);
Assert.assertEquals(e.rootNumber(), 2);
}
@Test
public void testLinear() {
Equation e = new Equation(0,1,1);
Assert.assertEquals(e.rootNumber(), 1);
}
@Test
public void testConstant() {
Equation e = new Equation(0,0,1);
Assert.assertEquals(e.rootNumber(), 0);
}
@Test
public void testZero() {
Equation e = new Equation(0,0,0);
Assert.assertEquals(e.rootNumber(), -1);
}
}
## Instruction:
Revert "Испорчен тест для проверки Jenkins"
This reverts commit f7ee8b4aa9bd893b6d00fff3f17922bd9673bc63.
## Code After:
package ru.stqa.pft.sandbox;
import org.testng.Assert;
import org.testng.annotations.Test;
public class EquationTests {
@Test
public void test0() {
Equation e = new Equation(1,1,1);
Assert.assertEquals(e.rootNumber(), 0);
}
@Test
public void test1() {
Equation e = new Equation(1,2,1);
Assert.assertEquals(e.rootNumber(), 1);
}
@Test
public void test3() {
Equation e = new Equation(1,5,6);
Assert.assertEquals(e.rootNumber(), 2);
}
@Test
public void testLinear() {
Equation e = new Equation(0,1,1);
Assert.assertEquals(e.rootNumber(), 1);
}
@Test
public void testConstant() {
Equation e = new Equation(0,0,1);
Assert.assertEquals(e.rootNumber(), 0);
}
@Test
public void testZero() {
Equation e = new Equation(0,0,0);
Assert.assertEquals(e.rootNumber(), -1);
}
}
|
# ... existing code ...
@Test
public void test0() {
Equation e = new Equation(1,1,1);
Assert.assertEquals(e.rootNumber(), 0);
}
@Test
# ... rest of the code ...
|
a1c570001e4214d1e2e2c4d34e2ee74721ecb2d5
|
xpserver_api/serializers.py
|
xpserver_api/serializers.py
|
from django.contrib.auth.models import User
from rest_framework import serializers, viewsets
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('url', 'email')
def create(self, validated_data):
user = User.objects.create(**validated_data)
user.username = validated_data['email']
user.save()
return user
class UserViewSet(viewsets.ModelViewSet):
queryset = User.objects.all()
serializer_class = UserSerializer
|
from django.contrib.auth.models import User
from rest_framework import serializers, viewsets
from xpserver_api.services import generate_activation_code, EmailSender
from xpserver_web.models import Profile
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('url', 'email')
def create(self, validated_data):
user = User.objects.create(**validated_data)
email = validated_data['email']
user.username = email
user.is_active = False
user.save()
profile = Profile.objects.create(user=user, activation_code=generate_activation_code())
profile.save()
email_sender = EmailSender()
email_sender.send_activation_email_with(profile=profile)
return user
class UserViewSet(viewsets.ModelViewSet):
queryset = User.objects.all()
serializer_class = UserSerializer
|
Add user profile when reg via api
|
Add user profile when reg via api
When user is registered via api it will create profile, activation link
and send it to given email just like a web registration flow.
|
Python
|
mit
|
xp2017-hackergarden/server,xp2017-hackergarden/server,xp2017-hackergarden/server,xp2017-hackergarden/server
|
python
|
## Code Before:
from django.contrib.auth.models import User
from rest_framework import serializers, viewsets
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('url', 'email')
def create(self, validated_data):
user = User.objects.create(**validated_data)
user.username = validated_data['email']
user.save()
return user
class UserViewSet(viewsets.ModelViewSet):
queryset = User.objects.all()
serializer_class = UserSerializer
## Instruction:
Add user profile when reg via api
When user is registered via api it will create profile, activation link
and send it to given email just like a web registration flow.
## Code After:
from django.contrib.auth.models import User
from rest_framework import serializers, viewsets
from xpserver_api.services import generate_activation_code, EmailSender
from xpserver_web.models import Profile
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('url', 'email')
def create(self, validated_data):
user = User.objects.create(**validated_data)
email = validated_data['email']
user.username = email
user.is_active = False
user.save()
profile = Profile.objects.create(user=user, activation_code=generate_activation_code())
profile.save()
email_sender = EmailSender()
email_sender.send_activation_email_with(profile=profile)
return user
class UserViewSet(viewsets.ModelViewSet):
queryset = User.objects.all()
serializer_class = UserSerializer
|
# ... existing code ...
from django.contrib.auth.models import User
from rest_framework import serializers, viewsets
from xpserver_api.services import generate_activation_code, EmailSender
from xpserver_web.models import Profile
class UserSerializer(serializers.HyperlinkedModelSerializer):
# ... modified code ...
def create(self, validated_data):
user = User.objects.create(**validated_data)
email = validated_data['email']
user.username = email
user.is_active = False
user.save()
profile = Profile.objects.create(user=user, activation_code=generate_activation_code())
profile.save()
email_sender = EmailSender()
email_sender.send_activation_email_with(profile=profile)
return user
# ... rest of the code ...
|
1779970872afd457336334231bef3c8629dcd375
|
gem/tests/test_profiles.py
|
gem/tests/test_profiles.py
|
from molo.core.tests.base import MoloTestCaseMixin
from django.test import TestCase, Client
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
class GemRegistrationViewTest(TestCase, MoloTestCaseMixin):
def setUp(self):
self.client = Client()
self.mk_main()
def test_user_info_displaying_after_registration(self):
self.user = User.objects.create_user(
username='tester',
email='[email protected]',
password='tester')
self.user.profile.gender = 'female'
self.user.profile.alias = 'useralias'
self.user.profile.save()
self.client.login(username='tester', password='tester')
response = self.client.get(reverse('edit_my_profile'))
print response
self.assertContains(response, 'useralias')
self.assertNotContains(response, '<option value="f">female</option>')
|
from molo.core.tests.base import MoloTestCaseMixin
from django.test import TestCase, Client
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
class GemRegistrationViewTest(TestCase, MoloTestCaseMixin):
def setUp(self):
self.client = Client()
self.mk_main()
def test_user_info_displaying_after_registration(self):
self.user = User.objects.create_user(
username='tester',
email='[email protected]',
password='tester')
self.client.login(username='tester', password='tester')
response = self.client.get(reverse('edit_my_profile'))
self.assertNotContains(response, 'useralias')
self.assertContains(response, '<option value="f">female</option>')
self.user.gem_profile.gender = 'f'
self.user.profile.alias = 'useralias'
self.user.gem_profile.save()
self.user.profile.save()
response = self.client.get(reverse('edit_my_profile'))
self.assertContains(response, 'useralias')
self.assertNotContains(response, '<option value="f">female</option>')
|
Update tests and fix the failing test
|
Update tests and fix the failing test
|
Python
|
bsd-2-clause
|
praekelt/molo-gem,praekelt/molo-gem,praekelt/molo-gem
|
python
|
## Code Before:
from molo.core.tests.base import MoloTestCaseMixin
from django.test import TestCase, Client
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
class GemRegistrationViewTest(TestCase, MoloTestCaseMixin):
def setUp(self):
self.client = Client()
self.mk_main()
def test_user_info_displaying_after_registration(self):
self.user = User.objects.create_user(
username='tester',
email='[email protected]',
password='tester')
self.user.profile.gender = 'female'
self.user.profile.alias = 'useralias'
self.user.profile.save()
self.client.login(username='tester', password='tester')
response = self.client.get(reverse('edit_my_profile'))
print response
self.assertContains(response, 'useralias')
self.assertNotContains(response, '<option value="f">female</option>')
## Instruction:
Update tests and fix the failing test
## Code After:
from molo.core.tests.base import MoloTestCaseMixin
from django.test import TestCase, Client
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
class GemRegistrationViewTest(TestCase, MoloTestCaseMixin):
def setUp(self):
self.client = Client()
self.mk_main()
def test_user_info_displaying_after_registration(self):
self.user = User.objects.create_user(
username='tester',
email='[email protected]',
password='tester')
self.client.login(username='tester', password='tester')
response = self.client.get(reverse('edit_my_profile'))
self.assertNotContains(response, 'useralias')
self.assertContains(response, '<option value="f">female</option>')
self.user.gem_profile.gender = 'f'
self.user.profile.alias = 'useralias'
self.user.gem_profile.save()
self.user.profile.save()
response = self.client.get(reverse('edit_my_profile'))
self.assertContains(response, 'useralias')
self.assertNotContains(response, '<option value="f">female</option>')
|
# ... existing code ...
username='tester',
email='[email protected]',
password='tester')
self.client.login(username='tester', password='tester')
response = self.client.get(reverse('edit_my_profile'))
self.assertNotContains(response, 'useralias')
self.assertContains(response, '<option value="f">female</option>')
self.user.gem_profile.gender = 'f'
self.user.profile.alias = 'useralias'
self.user.gem_profile.save()
self.user.profile.save()
response = self.client.get(reverse('edit_my_profile'))
self.assertContains(response, 'useralias')
self.assertNotContains(response, '<option value="f">female</option>')
# ... rest of the code ...
|
20224e4fe8b93dee087dd7a455f9709b9795a026
|
app/models.py
|
app/models.py
|
from app import database
class Talk(database.Model):
id = database.Column(database.Integer, primary_key=True, autoincrement=True)
title = database.Column(database.String(128), nullable=False)
description = database.Column(database.String(512))
speaker_facebook_id = database.Column(database.BIGINT, database.ForeignKey('speaker.facebook_id'),
nullable=False)
liked_by = database.relationship('Liker_Talk', backref='talk', lazy='dynamic')
def __repr__(self):
return '<Talk %r>' % self.id
class Speaker(database.Model):
facebook_id = database.Column(database.BIGINT, primary_key=True)
name = database.Column(database.String(128), nullable=False)
talks = database.relationship('Talk', backref='speaker', lazy='dynamic')
def __repr__(self):
return '<Speaker %r>' % self.facebook_id
class Liker_Talk(database.Model):
liker_facebook_id = database.Column(database.BIGINT, primary_key=True)
talk_id = database.Column(database.Integer, database.ForeignKey('talk.id'), primary_key=True)
def __repr__(self):
liker = repr(self.liker_facebook_id)
talk = repr(self.talk_id)
return '<Liker_Talk %r>' % ', '.join((liker, talk))
|
from app import database
class Talk(database.Model):
id = database.Column(database.Integer, primary_key=True, autoincrement=True)
title = database.Column(database.String(128), unique=True, nullable=False)
description = database.Column(database.String(512))
speaker_facebook_id = database.Column(database.BIGINT, database.ForeignKey('speaker.facebook_id'),
nullable=False)
liked_by = database.relationship('Liker_Talk', backref='talk', lazy='dynamic')
def __repr__(self):
return '<Talk %r>' % self.id
class Speaker(database.Model):
facebook_id = database.Column(database.BIGINT, primary_key=True)
name = database.Column(database.String(128), nullable=False)
talks = database.relationship('Talk', backref='speaker', lazy='dynamic')
def __repr__(self):
return '<Speaker %r>' % self.facebook_id
class Liker_Talk(database.Model):
liker_facebook_id = database.Column(database.BIGINT, primary_key=True)
talk_id = database.Column(database.Integer, database.ForeignKey('talk.id'), primary_key=True)
def __repr__(self):
liker = repr(self.liker_facebook_id)
talk = repr(self.talk_id)
return '<Liker_Talk %r>' % ', '.join((liker, talk))
|
Make title unique Talk property
|
Make title unique Talk property
|
Python
|
mit
|
Stark-Mountain/meetup-facebook-bot,Stark-Mountain/meetup-facebook-bot
|
python
|
## Code Before:
from app import database
class Talk(database.Model):
id = database.Column(database.Integer, primary_key=True, autoincrement=True)
title = database.Column(database.String(128), nullable=False)
description = database.Column(database.String(512))
speaker_facebook_id = database.Column(database.BIGINT, database.ForeignKey('speaker.facebook_id'),
nullable=False)
liked_by = database.relationship('Liker_Talk', backref='talk', lazy='dynamic')
def __repr__(self):
return '<Talk %r>' % self.id
class Speaker(database.Model):
facebook_id = database.Column(database.BIGINT, primary_key=True)
name = database.Column(database.String(128), nullable=False)
talks = database.relationship('Talk', backref='speaker', lazy='dynamic')
def __repr__(self):
return '<Speaker %r>' % self.facebook_id
class Liker_Talk(database.Model):
liker_facebook_id = database.Column(database.BIGINT, primary_key=True)
talk_id = database.Column(database.Integer, database.ForeignKey('talk.id'), primary_key=True)
def __repr__(self):
liker = repr(self.liker_facebook_id)
talk = repr(self.talk_id)
return '<Liker_Talk %r>' % ', '.join((liker, talk))
## Instruction:
Make title unique Talk property
## Code After:
from app import database
class Talk(database.Model):
id = database.Column(database.Integer, primary_key=True, autoincrement=True)
title = database.Column(database.String(128), unique=True, nullable=False)
description = database.Column(database.String(512))
speaker_facebook_id = database.Column(database.BIGINT, database.ForeignKey('speaker.facebook_id'),
nullable=False)
liked_by = database.relationship('Liker_Talk', backref='talk', lazy='dynamic')
def __repr__(self):
return '<Talk %r>' % self.id
class Speaker(database.Model):
facebook_id = database.Column(database.BIGINT, primary_key=True)
name = database.Column(database.String(128), nullable=False)
talks = database.relationship('Talk', backref='speaker', lazy='dynamic')
def __repr__(self):
return '<Speaker %r>' % self.facebook_id
class Liker_Talk(database.Model):
liker_facebook_id = database.Column(database.BIGINT, primary_key=True)
talk_id = database.Column(database.Integer, database.ForeignKey('talk.id'), primary_key=True)
def __repr__(self):
liker = repr(self.liker_facebook_id)
talk = repr(self.talk_id)
return '<Liker_Talk %r>' % ', '.join((liker, talk))
|
# ... existing code ...
class Talk(database.Model):
id = database.Column(database.Integer, primary_key=True, autoincrement=True)
title = database.Column(database.String(128), unique=True, nullable=False)
description = database.Column(database.String(512))
speaker_facebook_id = database.Column(database.BIGINT, database.ForeignKey('speaker.facebook_id'),
nullable=False)
# ... rest of the code ...
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.