commit
stringlengths 40
40
| old_file
stringlengths 4
106
| new_file
stringlengths 4
106
| old_contents
stringlengths 10
2.94k
| new_contents
stringlengths 21
2.95k
| subject
stringlengths 16
444
| message
stringlengths 17
2.63k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 7
43k
| ndiff
stringlengths 52
3.31k
| instruction
stringlengths 16
444
| content
stringlengths 133
4.32k
| diff
stringlengths 49
3.61k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
410c47921da205c1628cdff771f3385546edd503
|
src/engine/SCons/Platform/darwin.py
|
src/engine/SCons/Platform/darwin.py
|
#
# Copyright (c) 2001, 2002, 2003, 2004 Steven Knight
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
import posix
import os
def generate(env):
posix.generate(env)
env['SHLIBSUFFIX'] = '.dylib'
|
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
import posix
import os
def generate(env):
posix.generate(env)
env['SHLIBSUFFIX'] = '.dylib'
|
Fix __COPYRIGHT__ and __REVISION__ in new Darwin module.
|
Fix __COPYRIGHT__ and __REVISION__ in new Darwin module.
|
Python
|
mit
|
Distrotech/scons,Distrotech/scons,Distrotech/scons,Distrotech/scons,Distrotech/scons
|
#
- # Copyright (c) 2001, 2002, 2003, 2004 Steven Knight
+ # __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
+ __revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
+
import posix
import os
def generate(env):
posix.generate(env)
env['SHLIBSUFFIX'] = '.dylib'
|
Fix __COPYRIGHT__ and __REVISION__ in new Darwin module.
|
## Code Before:
#
# Copyright (c) 2001, 2002, 2003, 2004 Steven Knight
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
import posix
import os
def generate(env):
posix.generate(env)
env['SHLIBSUFFIX'] = '.dylib'
## Instruction:
Fix __COPYRIGHT__ and __REVISION__ in new Darwin module.
## Code After:
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
import posix
import os
def generate(env):
posix.generate(env)
env['SHLIBSUFFIX'] = '.dylib'
|
#
- # Copyright (c) 2001, 2002, 2003, 2004 Steven Knight
+ # __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
+ __revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
+
import posix
import os
def generate(env):
posix.generate(env)
env['SHLIBSUFFIX'] = '.dylib'
|
db0921e0242d478d29115179b9da2ffcd3fa35fb
|
micromanager/resources/__init__.py
|
micromanager/resources/__init__.py
|
from .base import ResourceBase # noqa F401
from .bucket import Bucket # noqa F401
from .sql import SQLInstance # noqa F401
from .bigquery import BQDataset # noqa F401
class Resource():
@staticmethod
def factory(resource_data):
resource_kind_map = {
'storage#bucket': Bucket,
'bigquery#dataset': BQDataset,
'sql#instance': SQLInstance
}
kind = resource_data.get('resource_kind')
if not kind:
assert 0, 'Unrecognized resource'
if kind not in resource_kind_map:
assert 0, 'Unrecognized resource'
cls = resource_kind_map.get(kind)
return cls(resource_data)
|
from .base import ResourceBase # noqa F401
from .bucket import Bucket # noqa F401
from .sql import SQLInstance # noqa F401
from .bigquery import BQDataset # noqa F401
class Resource():
@staticmethod
def factory(resource_data, **kargs):
resource_kind_map = {
'storage#bucket': Bucket,
'bigquery#dataset': BQDataset,
'sql#instance': SQLInstance
}
kind = resource_data.get('resource_kind')
if not kind:
assert 0, 'Unrecognized resource'
if kind not in resource_kind_map:
assert 0, 'Unrecognized resource'
cls = resource_kind_map.get(kind)
return cls(resource_data, **kargs)
|
Allow kargs in resource factory
|
Allow kargs in resource factory
|
Python
|
apache-2.0
|
forseti-security/resource-policy-evaluation-library
|
from .base import ResourceBase # noqa F401
from .bucket import Bucket # noqa F401
from .sql import SQLInstance # noqa F401
from .bigquery import BQDataset # noqa F401
class Resource():
@staticmethod
- def factory(resource_data):
+ def factory(resource_data, **kargs):
resource_kind_map = {
'storage#bucket': Bucket,
'bigquery#dataset': BQDataset,
'sql#instance': SQLInstance
}
kind = resource_data.get('resource_kind')
if not kind:
assert 0, 'Unrecognized resource'
if kind not in resource_kind_map:
assert 0, 'Unrecognized resource'
cls = resource_kind_map.get(kind)
- return cls(resource_data)
+ return cls(resource_data, **kargs)
|
Allow kargs in resource factory
|
## Code Before:
from .base import ResourceBase # noqa F401
from .bucket import Bucket # noqa F401
from .sql import SQLInstance # noqa F401
from .bigquery import BQDataset # noqa F401
class Resource():
@staticmethod
def factory(resource_data):
resource_kind_map = {
'storage#bucket': Bucket,
'bigquery#dataset': BQDataset,
'sql#instance': SQLInstance
}
kind = resource_data.get('resource_kind')
if not kind:
assert 0, 'Unrecognized resource'
if kind not in resource_kind_map:
assert 0, 'Unrecognized resource'
cls = resource_kind_map.get(kind)
return cls(resource_data)
## Instruction:
Allow kargs in resource factory
## Code After:
from .base import ResourceBase # noqa F401
from .bucket import Bucket # noqa F401
from .sql import SQLInstance # noqa F401
from .bigquery import BQDataset # noqa F401
class Resource():
@staticmethod
def factory(resource_data, **kargs):
resource_kind_map = {
'storage#bucket': Bucket,
'bigquery#dataset': BQDataset,
'sql#instance': SQLInstance
}
kind = resource_data.get('resource_kind')
if not kind:
assert 0, 'Unrecognized resource'
if kind not in resource_kind_map:
assert 0, 'Unrecognized resource'
cls = resource_kind_map.get(kind)
return cls(resource_data, **kargs)
|
from .base import ResourceBase # noqa F401
from .bucket import Bucket # noqa F401
from .sql import SQLInstance # noqa F401
from .bigquery import BQDataset # noqa F401
class Resource():
@staticmethod
- def factory(resource_data):
+ def factory(resource_data, **kargs):
? +++++++++
resource_kind_map = {
'storage#bucket': Bucket,
'bigquery#dataset': BQDataset,
'sql#instance': SQLInstance
}
kind = resource_data.get('resource_kind')
if not kind:
assert 0, 'Unrecognized resource'
if kind not in resource_kind_map:
assert 0, 'Unrecognized resource'
cls = resource_kind_map.get(kind)
- return cls(resource_data)
+ return cls(resource_data, **kargs)
? +++++++++
|
1c01b9e794445242c450534d1615a9dc755b89da
|
randcat.py
|
randcat.py
|
import random
random.seed()
while True:
print(chr(random.getrandbits(8)), end='')
|
import random
random.seed() # this initializes with the Date, which I think is a novel enough seed
while True: # if we're going with a mimicing of cat /dev/random, it'll pretty much just go until it's killed
print(chr(random.getrandbits(8)), end='')
|
Add some comments and a shebang on top.
|
Add some comments and a shebang on top.
|
Python
|
apache-2.0
|
Tombert/RandCat
|
import random
- random.seed()
+ random.seed() # this initializes with the Date, which I think is a novel enough seed
- while True:
+ while True: # if we're going with a mimicing of cat /dev/random, it'll pretty much just go until it's killed
- print(chr(random.getrandbits(8)), end='')
+ print(chr(random.getrandbits(8)), end='')
|
Add some comments and a shebang on top.
|
## Code Before:
import random
random.seed()
while True:
print(chr(random.getrandbits(8)), end='')
## Instruction:
Add some comments and a shebang on top.
## Code After:
import random
random.seed() # this initializes with the Date, which I think is a novel enough seed
while True: # if we're going with a mimicing of cat /dev/random, it'll pretty much just go until it's killed
print(chr(random.getrandbits(8)), end='')
|
import random
- random.seed()
+ random.seed() # this initializes with the Date, which I think is a novel enough seed
- while True:
+ while True: # if we're going with a mimicing of cat /dev/random, it'll pretty much just go until it's killed
- print(chr(random.getrandbits(8)), end='')
+ print(chr(random.getrandbits(8)), end='')
? +
|
d18d4f72ed24177143a9dbcf74fc1c001235ded5
|
batch_effect.py
|
batch_effect.py
|
import argparse
import csv
import shutil
import subprocess
import sys
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="Chain together Inkscape extensions")
parser.add_argument('--id', type=str, action='append', dest='ids', default=[],
help="id attribute of object to manipulate")
parser.add_argument('--csvpath', type=str, required=True,
help="Path to .csv file containing command lines")
parser.add_argument('svgpath', type=str, nargs='?', default='',
help="Path to temporary SVG file to use for input to the first extension")
args = parser.parse_args()
with open(args.csvpath, 'rb') as f:
# Make an argument list of the ids
id_args = []
for id in args.ids:
id_args.extend(('--id', id))
# Take input for the first call from temporary file or stdin
if args.svgpath:
stream = open(args.svgpath)
else:
stream = sys.stdin
# Execute all the calls
for row in csv.reader(f):
# Insert the ids into the call
call = row[:1] + id_args + row[1:]
# Make the call
p = subprocess.Popen(call, stdin=stream, stdout=subprocess.PIPE)
# Close our handle to the input pipe because we no longer need it
stream.close()
# Grab the output pipe for input into the next call
stream = p.stdout
# Send output from last call on stdout
shutil.copyfileobj(stream, sys.stdout)
|
import argparse
import csv
import shutil
import subprocess
import sys
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="Chain together Inkscape extensions")
parser.add_argument('--id', type=str, action='append', dest='ids', default=[],
help="ID attributes of objects to manipulate. Passed to all extensions.")
parser.add_argument('--csvpath', type=str, required=True,
help="Path to .csv file containing command lines")
parser.add_argument('svgpath', type=str, nargs='?', default='',
help="Path to temporary SVG file to use for input to the first extension")
args = parser.parse_args()
with open(args.csvpath, 'rb') as f:
# Make an argument list of the ids
id_args = []
for id in args.ids:
id_args.extend(('--id', id))
# Take input for the first call from temporary file or stdin
if args.svgpath:
stream = open(args.svgpath)
else:
stream = sys.stdin
# Execute all the calls
for row in csv.reader(f):
# Insert the ids into the call
call = row[:1] + id_args + row[1:]
# Make the call
p = subprocess.Popen(call, stdin=stream, stdout=subprocess.PIPE)
# Close our handle to the input pipe because we no longer need it
stream.close()
# Grab the output pipe for input into the next call
stream = p.stdout
# Send output from last call on stdout
shutil.copyfileobj(stream, sys.stdout)
|
Clarify --id parameter help text
|
Clarify --id parameter help text
|
Python
|
mit
|
jturner314/inkscape-batch-effect
|
import argparse
import csv
import shutil
import subprocess
import sys
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="Chain together Inkscape extensions")
parser.add_argument('--id', type=str, action='append', dest='ids', default=[],
- help="id attribute of object to manipulate")
+ help="ID attributes of objects to manipulate. Passed to all extensions.")
parser.add_argument('--csvpath', type=str, required=True,
help="Path to .csv file containing command lines")
parser.add_argument('svgpath', type=str, nargs='?', default='',
help="Path to temporary SVG file to use for input to the first extension")
args = parser.parse_args()
with open(args.csvpath, 'rb') as f:
# Make an argument list of the ids
id_args = []
for id in args.ids:
id_args.extend(('--id', id))
# Take input for the first call from temporary file or stdin
if args.svgpath:
stream = open(args.svgpath)
else:
stream = sys.stdin
# Execute all the calls
for row in csv.reader(f):
# Insert the ids into the call
call = row[:1] + id_args + row[1:]
# Make the call
p = subprocess.Popen(call, stdin=stream, stdout=subprocess.PIPE)
# Close our handle to the input pipe because we no longer need it
stream.close()
# Grab the output pipe for input into the next call
stream = p.stdout
# Send output from last call on stdout
shutil.copyfileobj(stream, sys.stdout)
|
Clarify --id parameter help text
|
## Code Before:
import argparse
import csv
import shutil
import subprocess
import sys
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="Chain together Inkscape extensions")
parser.add_argument('--id', type=str, action='append', dest='ids', default=[],
help="id attribute of object to manipulate")
parser.add_argument('--csvpath', type=str, required=True,
help="Path to .csv file containing command lines")
parser.add_argument('svgpath', type=str, nargs='?', default='',
help="Path to temporary SVG file to use for input to the first extension")
args = parser.parse_args()
with open(args.csvpath, 'rb') as f:
# Make an argument list of the ids
id_args = []
for id in args.ids:
id_args.extend(('--id', id))
# Take input for the first call from temporary file or stdin
if args.svgpath:
stream = open(args.svgpath)
else:
stream = sys.stdin
# Execute all the calls
for row in csv.reader(f):
# Insert the ids into the call
call = row[:1] + id_args + row[1:]
# Make the call
p = subprocess.Popen(call, stdin=stream, stdout=subprocess.PIPE)
# Close our handle to the input pipe because we no longer need it
stream.close()
# Grab the output pipe for input into the next call
stream = p.stdout
# Send output from last call on stdout
shutil.copyfileobj(stream, sys.stdout)
## Instruction:
Clarify --id parameter help text
## Code After:
import argparse
import csv
import shutil
import subprocess
import sys
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="Chain together Inkscape extensions")
parser.add_argument('--id', type=str, action='append', dest='ids', default=[],
help="ID attributes of objects to manipulate. Passed to all extensions.")
parser.add_argument('--csvpath', type=str, required=True,
help="Path to .csv file containing command lines")
parser.add_argument('svgpath', type=str, nargs='?', default='',
help="Path to temporary SVG file to use for input to the first extension")
args = parser.parse_args()
with open(args.csvpath, 'rb') as f:
# Make an argument list of the ids
id_args = []
for id in args.ids:
id_args.extend(('--id', id))
# Take input for the first call from temporary file or stdin
if args.svgpath:
stream = open(args.svgpath)
else:
stream = sys.stdin
# Execute all the calls
for row in csv.reader(f):
# Insert the ids into the call
call = row[:1] + id_args + row[1:]
# Make the call
p = subprocess.Popen(call, stdin=stream, stdout=subprocess.PIPE)
# Close our handle to the input pipe because we no longer need it
stream.close()
# Grab the output pipe for input into the next call
stream = p.stdout
# Send output from last call on stdout
shutil.copyfileobj(stream, sys.stdout)
|
import argparse
import csv
import shutil
import subprocess
import sys
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="Chain together Inkscape extensions")
parser.add_argument('--id', type=str, action='append', dest='ids', default=[],
- help="id attribute of object to manipulate")
? ^^
+ help="ID attributes of objects to manipulate. Passed to all extensions.")
? ^^ + + +++++++++++++++++++++++++++
parser.add_argument('--csvpath', type=str, required=True,
help="Path to .csv file containing command lines")
parser.add_argument('svgpath', type=str, nargs='?', default='',
help="Path to temporary SVG file to use for input to the first extension")
args = parser.parse_args()
with open(args.csvpath, 'rb') as f:
# Make an argument list of the ids
id_args = []
for id in args.ids:
id_args.extend(('--id', id))
# Take input for the first call from temporary file or stdin
if args.svgpath:
stream = open(args.svgpath)
else:
stream = sys.stdin
# Execute all the calls
for row in csv.reader(f):
# Insert the ids into the call
call = row[:1] + id_args + row[1:]
# Make the call
p = subprocess.Popen(call, stdin=stream, stdout=subprocess.PIPE)
# Close our handle to the input pipe because we no longer need it
stream.close()
# Grab the output pipe for input into the next call
stream = p.stdout
# Send output from last call on stdout
shutil.copyfileobj(stream, sys.stdout)
|
c790e1dd756495f0e34ff3c2cdadb02b4d6ee320
|
protocols/admin.py
|
protocols/admin.py
|
from django.contrib import admin
from .models import Protocol, Topic, Institution
class ProtocolAdmin(admin.ModelAdmin):
list_display = ['number', 'start_time', 'get_topics', 'information', 'majority', 'current_majority', 'get_absent']
list_display_links = ['number']
list_filter = ['topics']
search_fields =['number', 'information', 'topics__name', 'topics__description']
admin.site.register(Institution)
admin.site.register(Topic)
admin.site.register(Protocol, ProtocolAdmin)
|
from django.contrib import admin
from .models import Protocol, Topic, Institution
class ProtocolAdmin(admin.ModelAdmin):
list_display = ['number', 'start_time', 'get_topics', 'information', 'majority', 'current_majority', 'institution']
list_display_links = ['number']
list_filter = ['institution__name', 'topics']
search_fields =['number', 'institution__name', 'topics__name', 'information']
admin.site.register(Institution)
admin.site.register(Topic)
admin.site.register(Protocol, ProtocolAdmin)
|
Change search and list fields
|
Change search and list fields
|
Python
|
mit
|
Hackfmi/Diaphanum,Hackfmi/Diaphanum
|
from django.contrib import admin
from .models import Protocol, Topic, Institution
class ProtocolAdmin(admin.ModelAdmin):
- list_display = ['number', 'start_time', 'get_topics', 'information', 'majority', 'current_majority', 'get_absent']
+ list_display = ['number', 'start_time', 'get_topics', 'information', 'majority', 'current_majority', 'institution']
list_display_links = ['number']
- list_filter = ['topics']
+ list_filter = ['institution__name', 'topics']
- search_fields =['number', 'information', 'topics__name', 'topics__description']
+ search_fields =['number', 'institution__name', 'topics__name', 'information']
admin.site.register(Institution)
admin.site.register(Topic)
admin.site.register(Protocol, ProtocolAdmin)
|
Change search and list fields
|
## Code Before:
from django.contrib import admin
from .models import Protocol, Topic, Institution
class ProtocolAdmin(admin.ModelAdmin):
list_display = ['number', 'start_time', 'get_topics', 'information', 'majority', 'current_majority', 'get_absent']
list_display_links = ['number']
list_filter = ['topics']
search_fields =['number', 'information', 'topics__name', 'topics__description']
admin.site.register(Institution)
admin.site.register(Topic)
admin.site.register(Protocol, ProtocolAdmin)
## Instruction:
Change search and list fields
## Code After:
from django.contrib import admin
from .models import Protocol, Topic, Institution
class ProtocolAdmin(admin.ModelAdmin):
list_display = ['number', 'start_time', 'get_topics', 'information', 'majority', 'current_majority', 'institution']
list_display_links = ['number']
list_filter = ['institution__name', 'topics']
search_fields =['number', 'institution__name', 'topics__name', 'information']
admin.site.register(Institution)
admin.site.register(Topic)
admin.site.register(Protocol, ProtocolAdmin)
|
from django.contrib import admin
from .models import Protocol, Topic, Institution
class ProtocolAdmin(admin.ModelAdmin):
- list_display = ['number', 'start_time', 'get_topics', 'information', 'majority', 'current_majority', 'get_absent']
? ^^ ^^^^^ -
+ list_display = ['number', 'start_time', 'get_topics', 'information', 'majority', 'current_majority', 'institution']
? ^^^ ^^^^^^
list_display_links = ['number']
- list_filter = ['topics']
+ list_filter = ['institution__name', 'topics']
- search_fields =['number', 'information', 'topics__name', 'topics__description']
? ^^^^^ ^ ---------- ^^
+ search_fields =['number', 'institution__name', 'topics__name', 'information']
? ^^^^^ ++++++ ^^^ ^^
admin.site.register(Institution)
admin.site.register(Topic)
admin.site.register(Protocol, ProtocolAdmin)
|
33e40319b5d670c3fa1a1423bf7eed1865115d5c
|
sitetools/venv_hook/sitecustomize.py
|
sitetools/venv_hook/sitecustomize.py
|
import imp
import os
import sys
import warnings
try:
try:
import sitetools._startup
except ImportError:
# Pull in the sitetools that goes with this sitecustomize.
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
# Let this ImportError raise.
import sitetools._startup
except Exception as e:
warnings.warn("Error while importing sitetools._startup: %s" % e)
# Be a good citizen and find the next sitecustomize module.
my_path = os.path.dirname(os.path.abspath(__file__))
clean_path = [x for x in sys.path if os.path.abspath(x) != my_path]
try:
args = imp.find_module('sitecustomize', clean_path)
except ImportError:
pass
else:
imp.load_module('sitecustomize', *args)
|
import imp
import os
import sys
import warnings
try:
try:
import sitetools._startup
except ImportError:
# Pull in the sitetools that goes with this sitecustomize.
sys.path.append(os.path.abspath(os.path.join(
__file__,
'..', '..', '..'
)))
# Let this ImportError raise.
import sitetools._startup
except Exception as e:
warnings.warn("Error while importing sitetools._startup: %s" % e)
# Be a good citizen and find the next sitecustomize module.
my_path = os.path.dirname(os.path.abspath(__file__))
clean_path = [x for x in sys.path if os.path.abspath(x) != my_path]
try:
args = imp.find_module('sitecustomize', clean_path)
except ImportError:
pass
else:
imp.load_module('sitecustomize', *args)
|
Adjust venv_hook to work in new location
|
Adjust venv_hook to work in new location
|
Python
|
bsd-3-clause
|
westernx/sitetools,westernx/sitetools,mikeboers/sitetools
|
import imp
import os
import sys
import warnings
try:
try:
import sitetools._startup
except ImportError:
# Pull in the sitetools that goes with this sitecustomize.
- sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+ sys.path.append(os.path.abspath(os.path.join(
+ __file__,
+ '..', '..', '..'
+ )))
# Let this ImportError raise.
import sitetools._startup
except Exception as e:
warnings.warn("Error while importing sitetools._startup: %s" % e)
# Be a good citizen and find the next sitecustomize module.
my_path = os.path.dirname(os.path.abspath(__file__))
clean_path = [x for x in sys.path if os.path.abspath(x) != my_path]
try:
args = imp.find_module('sitecustomize', clean_path)
except ImportError:
pass
else:
imp.load_module('sitecustomize', *args)
|
Adjust venv_hook to work in new location
|
## Code Before:
import imp
import os
import sys
import warnings
try:
try:
import sitetools._startup
except ImportError:
# Pull in the sitetools that goes with this sitecustomize.
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
# Let this ImportError raise.
import sitetools._startup
except Exception as e:
warnings.warn("Error while importing sitetools._startup: %s" % e)
# Be a good citizen and find the next sitecustomize module.
my_path = os.path.dirname(os.path.abspath(__file__))
clean_path = [x for x in sys.path if os.path.abspath(x) != my_path]
try:
args = imp.find_module('sitecustomize', clean_path)
except ImportError:
pass
else:
imp.load_module('sitecustomize', *args)
## Instruction:
Adjust venv_hook to work in new location
## Code After:
import imp
import os
import sys
import warnings
try:
try:
import sitetools._startup
except ImportError:
# Pull in the sitetools that goes with this sitecustomize.
sys.path.append(os.path.abspath(os.path.join(
__file__,
'..', '..', '..'
)))
# Let this ImportError raise.
import sitetools._startup
except Exception as e:
warnings.warn("Error while importing sitetools._startup: %s" % e)
# Be a good citizen and find the next sitecustomize module.
my_path = os.path.dirname(os.path.abspath(__file__))
clean_path = [x for x in sys.path if os.path.abspath(x) != my_path]
try:
args = imp.find_module('sitecustomize', clean_path)
except ImportError:
pass
else:
imp.load_module('sitecustomize', *args)
|
import imp
import os
import sys
import warnings
try:
try:
import sitetools._startup
except ImportError:
# Pull in the sitetools that goes with this sitecustomize.
- sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+ sys.path.append(os.path.abspath(os.path.join(
+ __file__,
+ '..', '..', '..'
+ )))
# Let this ImportError raise.
import sitetools._startup
except Exception as e:
warnings.warn("Error while importing sitetools._startup: %s" % e)
# Be a good citizen and find the next sitecustomize module.
my_path = os.path.dirname(os.path.abspath(__file__))
clean_path = [x for x in sys.path if os.path.abspath(x) != my_path]
try:
args = imp.find_module('sitecustomize', clean_path)
except ImportError:
pass
else:
imp.load_module('sitecustomize', *args)
|
6fd5e51a797f3d85954f6a4c97eacc008b0e4d48
|
tohu/v5/namespace.py
|
tohu/v5/namespace.py
|
from bidict import bidict, ValueDuplicationError
def is_anonymous(name):
return name.startswith("ANONYMOUS_ANONYMOUS_ANONYMOUS_")
class TohuNamespaceError(Exception):
"""
Custom exception.
"""
class TohuNamespace:
def __init__(self):
self.generators = bidict()
def __len__(self):
return len(self.generators)
def __getitem__(self, key):
return self.generators[key]
def add_generator(self, g, name=None):
if name is None:
name = f"ANONYMOUS_ANONYMOUS_ANONYMOUS_{g.tohu_id}"
if name in self.generators and self.generators[name] is not g:
raise TohuNamespaceError("A different generator with the same name already exists.")
try:
self.generators[name] = g
except ValueDuplicationError:
existing_name = self.generators.inv[g]
if is_anonymous(existing_name) and not is_anonymous(name):
self.generators.inv[g] = name
def add_generator_with_dependencies(self, g, name=None):
self.add_generator(g, name=name)
for c in g._input_generators:
self.add_generator(c)
|
from mako.template import Template
import textwrap
from bidict import bidict, ValueDuplicationError
def is_anonymous(name):
return name.startswith("ANONYMOUS_ANONYMOUS_ANONYMOUS_")
class TohuNamespaceError(Exception):
"""
Custom exception.
"""
class TohuNamespace:
def __init__(self):
self.generators = bidict()
def __repr__(self):
s = Template(textwrap.dedent("""\
<TohuNameSpace:
%for name, g in items:
${name}: ${g}
%endfor
>
""")).render(items=self.generators.items())
return s
def __len__(self):
return len(self.generators)
def __getitem__(self, key):
return self.generators[key]
def add_generator(self, g, name=None):
if name is None:
name = f"ANONYMOUS_ANONYMOUS_ANONYMOUS_{g.tohu_id}"
if name in self.generators and self.generators[name] is not g:
raise TohuNamespaceError("A different generator with the same name already exists.")
try:
self.generators[name] = g
except ValueDuplicationError:
existing_name = self.generators.inv[g]
if is_anonymous(existing_name) and not is_anonymous(name):
self.generators.inv[g] = name
def add_generator_with_dependencies(self, g, name=None):
self.add_generator(g, name=name)
for c in g._input_generators:
self.add_generator(c)
|
Add repr method for TohuNamespace
|
Add repr method for TohuNamespace
|
Python
|
mit
|
maxalbert/tohu
|
+ from mako.template import Template
+ import textwrap
from bidict import bidict, ValueDuplicationError
def is_anonymous(name):
return name.startswith("ANONYMOUS_ANONYMOUS_ANONYMOUS_")
class TohuNamespaceError(Exception):
"""
Custom exception.
"""
class TohuNamespace:
def __init__(self):
self.generators = bidict()
+
+ def __repr__(self):
+ s = Template(textwrap.dedent("""\
+ <TohuNameSpace:
+ %for name, g in items:
+ ${name}: ${g}
+ %endfor
+ >
+ """)).render(items=self.generators.items())
+ return s
def __len__(self):
return len(self.generators)
def __getitem__(self, key):
return self.generators[key]
def add_generator(self, g, name=None):
if name is None:
name = f"ANONYMOUS_ANONYMOUS_ANONYMOUS_{g.tohu_id}"
if name in self.generators and self.generators[name] is not g:
raise TohuNamespaceError("A different generator with the same name already exists.")
try:
self.generators[name] = g
except ValueDuplicationError:
existing_name = self.generators.inv[g]
if is_anonymous(existing_name) and not is_anonymous(name):
self.generators.inv[g] = name
def add_generator_with_dependencies(self, g, name=None):
self.add_generator(g, name=name)
for c in g._input_generators:
self.add_generator(c)
|
Add repr method for TohuNamespace
|
## Code Before:
from bidict import bidict, ValueDuplicationError
def is_anonymous(name):
return name.startswith("ANONYMOUS_ANONYMOUS_ANONYMOUS_")
class TohuNamespaceError(Exception):
"""
Custom exception.
"""
class TohuNamespace:
def __init__(self):
self.generators = bidict()
def __len__(self):
return len(self.generators)
def __getitem__(self, key):
return self.generators[key]
def add_generator(self, g, name=None):
if name is None:
name = f"ANONYMOUS_ANONYMOUS_ANONYMOUS_{g.tohu_id}"
if name in self.generators and self.generators[name] is not g:
raise TohuNamespaceError("A different generator with the same name already exists.")
try:
self.generators[name] = g
except ValueDuplicationError:
existing_name = self.generators.inv[g]
if is_anonymous(existing_name) and not is_anonymous(name):
self.generators.inv[g] = name
def add_generator_with_dependencies(self, g, name=None):
self.add_generator(g, name=name)
for c in g._input_generators:
self.add_generator(c)
## Instruction:
Add repr method for TohuNamespace
## Code After:
from mako.template import Template
import textwrap
from bidict import bidict, ValueDuplicationError
def is_anonymous(name):
return name.startswith("ANONYMOUS_ANONYMOUS_ANONYMOUS_")
class TohuNamespaceError(Exception):
"""
Custom exception.
"""
class TohuNamespace:
def __init__(self):
self.generators = bidict()
def __repr__(self):
s = Template(textwrap.dedent("""\
<TohuNameSpace:
%for name, g in items:
${name}: ${g}
%endfor
>
""")).render(items=self.generators.items())
return s
def __len__(self):
return len(self.generators)
def __getitem__(self, key):
return self.generators[key]
def add_generator(self, g, name=None):
if name is None:
name = f"ANONYMOUS_ANONYMOUS_ANONYMOUS_{g.tohu_id}"
if name in self.generators and self.generators[name] is not g:
raise TohuNamespaceError("A different generator with the same name already exists.")
try:
self.generators[name] = g
except ValueDuplicationError:
existing_name = self.generators.inv[g]
if is_anonymous(existing_name) and not is_anonymous(name):
self.generators.inv[g] = name
def add_generator_with_dependencies(self, g, name=None):
self.add_generator(g, name=name)
for c in g._input_generators:
self.add_generator(c)
|
+ from mako.template import Template
+ import textwrap
from bidict import bidict, ValueDuplicationError
def is_anonymous(name):
return name.startswith("ANONYMOUS_ANONYMOUS_ANONYMOUS_")
class TohuNamespaceError(Exception):
"""
Custom exception.
"""
class TohuNamespace:
def __init__(self):
self.generators = bidict()
+
+ def __repr__(self):
+ s = Template(textwrap.dedent("""\
+ <TohuNameSpace:
+ %for name, g in items:
+ ${name}: ${g}
+ %endfor
+ >
+ """)).render(items=self.generators.items())
+ return s
def __len__(self):
return len(self.generators)
def __getitem__(self, key):
return self.generators[key]
def add_generator(self, g, name=None):
if name is None:
name = f"ANONYMOUS_ANONYMOUS_ANONYMOUS_{g.tohu_id}"
if name in self.generators and self.generators[name] is not g:
raise TohuNamespaceError("A different generator with the same name already exists.")
try:
self.generators[name] = g
except ValueDuplicationError:
existing_name = self.generators.inv[g]
if is_anonymous(existing_name) and not is_anonymous(name):
self.generators.inv[g] = name
def add_generator_with_dependencies(self, g, name=None):
self.add_generator(g, name=name)
for c in g._input_generators:
self.add_generator(c)
|
cd8407831091d169677d278d3ad9b5b92600b30a
|
generator/generator.py
|
generator/generator.py
|
from helper import Helper
from renderer import Renderer
class Generator(object):
@classmethod
def generate_statements(cls, class_list_def):
"""
:type class_list_def: str
"""
class_def_list = Helper.parse_definition_string(class_list_def)
member_def_statement = Renderer.gen_all_members(class_def_list)
constructor_statement = Renderer.gen_constructor_statement(class_def_list)
result = member_def_statement + constructor_statement
cls.copy_to_clipboard(result)
return result
@classmethod
def copy_to_clipboard(cls, result):
"""
If the dependent clipboard support is available, copy the result
to the system clipboard.
:param result:
:return:
"""
try:
from pyperclip.pyperclip import copy
copy(result)
except ImportError, Exception:
pass
|
from helper import Helper
from renderer import Renderer
class Generator(object):
@classmethod
def generate_statements(cls, class_list_def):
"""
:type class_list_def: str
"""
class_def_list = Helper.parse_definition_string(class_list_def)
member_def_statement = Renderer.gen_all_members(class_def_list)
constructor_statement = Renderer.gen_constructor_statement(class_def_list)
result = member_def_statement + constructor_statement
cls.copy_to_clipboard(result)
return result
@classmethod
def copy_to_clipboard(cls, result):
"""
If the dependent clipboard support is available, copy the result
to the system clipboard.
:param result:
:return:
"""
try:
from pyperclip.pyperclip import copy
copy(result)
except Exception:
pass
|
Fix exception handling syntax error
|
Fix exception handling syntax error
|
Python
|
apache-2.0
|
HappyRay/php-di-generator
|
from helper import Helper
from renderer import Renderer
class Generator(object):
@classmethod
def generate_statements(cls, class_list_def):
"""
:type class_list_def: str
"""
class_def_list = Helper.parse_definition_string(class_list_def)
member_def_statement = Renderer.gen_all_members(class_def_list)
constructor_statement = Renderer.gen_constructor_statement(class_def_list)
result = member_def_statement + constructor_statement
cls.copy_to_clipboard(result)
return result
@classmethod
def copy_to_clipboard(cls, result):
"""
If the dependent clipboard support is available, copy the result
to the system clipboard.
:param result:
:return:
"""
try:
from pyperclip.pyperclip import copy
copy(result)
- except ImportError, Exception:
+ except Exception:
pass
|
Fix exception handling syntax error
|
## Code Before:
from helper import Helper
from renderer import Renderer
class Generator(object):
@classmethod
def generate_statements(cls, class_list_def):
"""
:type class_list_def: str
"""
class_def_list = Helper.parse_definition_string(class_list_def)
member_def_statement = Renderer.gen_all_members(class_def_list)
constructor_statement = Renderer.gen_constructor_statement(class_def_list)
result = member_def_statement + constructor_statement
cls.copy_to_clipboard(result)
return result
@classmethod
def copy_to_clipboard(cls, result):
"""
If the dependent clipboard support is available, copy the result
to the system clipboard.
:param result:
:return:
"""
try:
from pyperclip.pyperclip import copy
copy(result)
except ImportError, Exception:
pass
## Instruction:
Fix exception handling syntax error
## Code After:
from helper import Helper
from renderer import Renderer
class Generator(object):
@classmethod
def generate_statements(cls, class_list_def):
"""
:type class_list_def: str
"""
class_def_list = Helper.parse_definition_string(class_list_def)
member_def_statement = Renderer.gen_all_members(class_def_list)
constructor_statement = Renderer.gen_constructor_statement(class_def_list)
result = member_def_statement + constructor_statement
cls.copy_to_clipboard(result)
return result
@classmethod
def copy_to_clipboard(cls, result):
"""
If the dependent clipboard support is available, copy the result
to the system clipboard.
:param result:
:return:
"""
try:
from pyperclip.pyperclip import copy
copy(result)
except Exception:
pass
|
from helper import Helper
from renderer import Renderer
class Generator(object):
@classmethod
def generate_statements(cls, class_list_def):
"""
:type class_list_def: str
"""
class_def_list = Helper.parse_definition_string(class_list_def)
member_def_statement = Renderer.gen_all_members(class_def_list)
constructor_statement = Renderer.gen_constructor_statement(class_def_list)
result = member_def_statement + constructor_statement
cls.copy_to_clipboard(result)
return result
@classmethod
def copy_to_clipboard(cls, result):
"""
If the dependent clipboard support is available, copy the result
to the system clipboard.
:param result:
:return:
"""
try:
from pyperclip.pyperclip import copy
copy(result)
- except ImportError, Exception:
? -------------
+ except Exception:
pass
|
f3978f2bee9fdbef4e2d415e4a6e584e451f4da4
|
nbtutor/__init__.py
|
nbtutor/__init__.py
|
import os
try:
from nbconvert.preprocessors.base import Preprocessor
except ImportError:
from IPython.nbconvert.preprocessors.base import Preprocessor
from traitlets import Unicode
class ClearExercisePreprocessor(Preprocessor):
solutions_dir = Unicode("_solutions").tag(config=True)
def __init__(self, **kw):
if not os.path.exists(self.solutions_dir):
os.makedirs(self.solutions_dir)
self.solution_count = 1
super(Preprocessor, self).__init__(**kw)
def preprocess_cell(self, cell, resources, index):
if 'clear_cell' in cell.metadata and cell.metadata.clear_cell:
fname = os.path.join(
self.solutions_dir, resources['metadata']['name'] + str(self.solution_count) + '.py')
with open(fname, 'w') as f:
f.write(cell['source'])
cell['source'] = ["# %load {0}".format(fname)]
cell['outputs'] = []
# cell['source'] = []
self.solution_count += 1
return cell, resources
|
import os
try:
from nbconvert.preprocessors.base import Preprocessor
except ImportError:
from IPython.nbconvert.preprocessors.base import Preprocessor
from traitlets import Unicode
class ClearExercisePreprocessor(Preprocessor):
solutions_dir = Unicode("_solutions").tag(config=True)
def __init__(self, **kw):
if not os.path.exists(self.solutions_dir):
os.makedirs(self.solutions_dir)
self.solution_count = 1
super(Preprocessor, self).__init__(**kw)
def preprocess_cell(self, cell, resources, index):
if 'tags' in cell.metadata and 'nbtutor-solution' in cell.metadata.tags:
fname = os.path.join(
self.solutions_dir, resources['metadata']['name'] + str(self.solution_count) + '.py')
with open(fname, 'w') as f:
f.write(cell['source'])
cell['source'] = ["# %load {0}".format(fname)]
cell['outputs'] = []
# cell['source'] = []
self.solution_count += 1
return cell, resources
|
Update to use tags instead of custom metadata
|
Update to use tags instead of custom metadata
|
Python
|
bsd-2-clause
|
jorisvandenbossche/nbtutor,jorisvandenbossche/nbtutor
|
import os
try:
from nbconvert.preprocessors.base import Preprocessor
except ImportError:
from IPython.nbconvert.preprocessors.base import Preprocessor
from traitlets import Unicode
class ClearExercisePreprocessor(Preprocessor):
solutions_dir = Unicode("_solutions").tag(config=True)
def __init__(self, **kw):
if not os.path.exists(self.solutions_dir):
os.makedirs(self.solutions_dir)
self.solution_count = 1
super(Preprocessor, self).__init__(**kw)
def preprocess_cell(self, cell, resources, index):
- if 'clear_cell' in cell.metadata and cell.metadata.clear_cell:
+ if 'tags' in cell.metadata and 'nbtutor-solution' in cell.metadata.tags:
fname = os.path.join(
self.solutions_dir, resources['metadata']['name'] + str(self.solution_count) + '.py')
with open(fname, 'w') as f:
f.write(cell['source'])
cell['source'] = ["# %load {0}".format(fname)]
cell['outputs'] = []
# cell['source'] = []
self.solution_count += 1
return cell, resources
|
Update to use tags instead of custom metadata
|
## Code Before:
import os
try:
from nbconvert.preprocessors.base import Preprocessor
except ImportError:
from IPython.nbconvert.preprocessors.base import Preprocessor
from traitlets import Unicode
class ClearExercisePreprocessor(Preprocessor):
solutions_dir = Unicode("_solutions").tag(config=True)
def __init__(self, **kw):
if not os.path.exists(self.solutions_dir):
os.makedirs(self.solutions_dir)
self.solution_count = 1
super(Preprocessor, self).__init__(**kw)
def preprocess_cell(self, cell, resources, index):
if 'clear_cell' in cell.metadata and cell.metadata.clear_cell:
fname = os.path.join(
self.solutions_dir, resources['metadata']['name'] + str(self.solution_count) + '.py')
with open(fname, 'w') as f:
f.write(cell['source'])
cell['source'] = ["# %load {0}".format(fname)]
cell['outputs'] = []
# cell['source'] = []
self.solution_count += 1
return cell, resources
## Instruction:
Update to use tags instead of custom metadata
## Code After:
import os
try:
from nbconvert.preprocessors.base import Preprocessor
except ImportError:
from IPython.nbconvert.preprocessors.base import Preprocessor
from traitlets import Unicode
class ClearExercisePreprocessor(Preprocessor):
solutions_dir = Unicode("_solutions").tag(config=True)
def __init__(self, **kw):
if not os.path.exists(self.solutions_dir):
os.makedirs(self.solutions_dir)
self.solution_count = 1
super(Preprocessor, self).__init__(**kw)
def preprocess_cell(self, cell, resources, index):
if 'tags' in cell.metadata and 'nbtutor-solution' in cell.metadata.tags:
fname = os.path.join(
self.solutions_dir, resources['metadata']['name'] + str(self.solution_count) + '.py')
with open(fname, 'w') as f:
f.write(cell['source'])
cell['source'] = ["# %load {0}".format(fname)]
cell['outputs'] = []
# cell['source'] = []
self.solution_count += 1
return cell, resources
|
import os
try:
from nbconvert.preprocessors.base import Preprocessor
except ImportError:
from IPython.nbconvert.preprocessors.base import Preprocessor
from traitlets import Unicode
class ClearExercisePreprocessor(Preprocessor):
solutions_dir = Unicode("_solutions").tag(config=True)
def __init__(self, **kw):
if not os.path.exists(self.solutions_dir):
os.makedirs(self.solutions_dir)
self.solution_count = 1
super(Preprocessor, self).__init__(**kw)
def preprocess_cell(self, cell, resources, index):
- if 'clear_cell' in cell.metadata and cell.metadata.clear_cell:
+ if 'tags' in cell.metadata and 'nbtutor-solution' in cell.metadata.tags:
fname = os.path.join(
self.solutions_dir, resources['metadata']['name'] + str(self.solution_count) + '.py')
with open(fname, 'w') as f:
f.write(cell['source'])
cell['source'] = ["# %load {0}".format(fname)]
cell['outputs'] = []
# cell['source'] = []
self.solution_count += 1
return cell, resources
|
c92d9c6da02dacdd91a21c3c5675940154c0e21a
|
cla_backend/apps/reports/db/backend/base.py
|
cla_backend/apps/reports/db/backend/base.py
|
from django.db.backends.postgresql_psycopg2.base import * # noqa
class DynamicTimezoneDatabaseWrapper(DatabaseWrapper):
'''
This exists to allow report generation SQL to set the time zone of the
connection without interference from Django, which normally tries to
ensure that all connections are UTC if `USE_TZ` is `True`.
'''
def create_cursor(self):
cursor = self.connection.cursor()
cursor.tzinfo_factory = None
return cursor
DatabaseWrapper = DynamicTimezoneDatabaseWrapper
|
from django.db.backends.postgresql_psycopg2.base import * # noqa
import pytz
def local_tzinfo_factory(offset):
'''
Create a tzinfo object using the offset of the db connection. This ensures
that the datetimes returned are timezone aware and will be printed in the
reports with timezone information.
'''
return pytz.FixedOffset(offset)
class DynamicTimezoneDatabaseWrapper(DatabaseWrapper):
'''
This exists to allow report generation SQL to set the time zone of the
connection without interference from Django, which normally tries to
ensure that all connections are UTC if `USE_TZ` is `True`.
'''
def create_cursor(self):
cursor = self.connection.cursor()
cursor.tzinfo_factory = local_tzinfo_factory
return cursor
DatabaseWrapper = DynamicTimezoneDatabaseWrapper
|
Add a tzinfo factory method to replica connection to create local tzinfos
|
Add a tzinfo factory method to replica connection to create local tzinfos
This is to ensure that the datetimes returned for report generation
are timezone aware and will thus be printed in the reports with
timezone information.
|
Python
|
mit
|
ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend
|
from django.db.backends.postgresql_psycopg2.base import * # noqa
+ import pytz
+
+
+ def local_tzinfo_factory(offset):
+ '''
+ Create a tzinfo object using the offset of the db connection. This ensures
+ that the datetimes returned are timezone aware and will be printed in the
+ reports with timezone information.
+ '''
+ return pytz.FixedOffset(offset)
class DynamicTimezoneDatabaseWrapper(DatabaseWrapper):
'''
This exists to allow report generation SQL to set the time zone of the
connection without interference from Django, which normally tries to
ensure that all connections are UTC if `USE_TZ` is `True`.
'''
def create_cursor(self):
cursor = self.connection.cursor()
- cursor.tzinfo_factory = None
+ cursor.tzinfo_factory = local_tzinfo_factory
return cursor
DatabaseWrapper = DynamicTimezoneDatabaseWrapper
|
Add a tzinfo factory method to replica connection to create local tzinfos
|
## Code Before:
from django.db.backends.postgresql_psycopg2.base import * # noqa
class DynamicTimezoneDatabaseWrapper(DatabaseWrapper):
'''
This exists to allow report generation SQL to set the time zone of the
connection without interference from Django, which normally tries to
ensure that all connections are UTC if `USE_TZ` is `True`.
'''
def create_cursor(self):
cursor = self.connection.cursor()
cursor.tzinfo_factory = None
return cursor
DatabaseWrapper = DynamicTimezoneDatabaseWrapper
## Instruction:
Add a tzinfo factory method to replica connection to create local tzinfos
## Code After:
from django.db.backends.postgresql_psycopg2.base import * # noqa
import pytz
def local_tzinfo_factory(offset):
'''
Create a tzinfo object using the offset of the db connection. This ensures
that the datetimes returned are timezone aware and will be printed in the
reports with timezone information.
'''
return pytz.FixedOffset(offset)
class DynamicTimezoneDatabaseWrapper(DatabaseWrapper):
'''
This exists to allow report generation SQL to set the time zone of the
connection without interference from Django, which normally tries to
ensure that all connections are UTC if `USE_TZ` is `True`.
'''
def create_cursor(self):
cursor = self.connection.cursor()
cursor.tzinfo_factory = local_tzinfo_factory
return cursor
DatabaseWrapper = DynamicTimezoneDatabaseWrapper
|
from django.db.backends.postgresql_psycopg2.base import * # noqa
+ import pytz
+
+
+ def local_tzinfo_factory(offset):
+ '''
+ Create a tzinfo object using the offset of the db connection. This ensures
+ that the datetimes returned are timezone aware and will be printed in the
+ reports with timezone information.
+ '''
+ return pytz.FixedOffset(offset)
class DynamicTimezoneDatabaseWrapper(DatabaseWrapper):
'''
This exists to allow report generation SQL to set the time zone of the
connection without interference from Django, which normally tries to
ensure that all connections are UTC if `USE_TZ` is `True`.
'''
def create_cursor(self):
cursor = self.connection.cursor()
- cursor.tzinfo_factory = None
? ^ ^
+ cursor.tzinfo_factory = local_tzinfo_factory
? ^ +++++++ ^^^^^^^^^^
return cursor
DatabaseWrapper = DynamicTimezoneDatabaseWrapper
|
8866de1785cc6961d2111f1e0f55b781a7de660d
|
_markerlib/__init__.py
|
_markerlib/__init__.py
|
"""Used by pkg_resources to interpret PEP 345 environment markers."""
from _markerlib.markers import default_environment, compile, interpret, as_function
|
"""Used by pkg_resources to interpret PEP 345 environment markers."""
from _markerlib.markers import default_environment, compile, interpret
|
Remove missing import (since b62968cd2666)
|
Remove missing import (since b62968cd2666)
--HG--
branch : distribute
extra : rebase_source : d1190f895d794dfcb838f7eb40a60ab07b8b309e
|
Python
|
mit
|
pypa/setuptools,pypa/setuptools,pypa/setuptools
|
"""Used by pkg_resources to interpret PEP 345 environment markers."""
- from _markerlib.markers import default_environment, compile, interpret, as_function
+ from _markerlib.markers import default_environment, compile, interpret
|
Remove missing import (since b62968cd2666)
|
## Code Before:
"""Used by pkg_resources to interpret PEP 345 environment markers."""
from _markerlib.markers import default_environment, compile, interpret, as_function
## Instruction:
Remove missing import (since b62968cd2666)
## Code After:
"""Used by pkg_resources to interpret PEP 345 environment markers."""
from _markerlib.markers import default_environment, compile, interpret
|
"""Used by pkg_resources to interpret PEP 345 environment markers."""
- from _markerlib.markers import default_environment, compile, interpret, as_function
? -------------
+ from _markerlib.markers import default_environment, compile, interpret
|
0c6930f1930dbba66ba928dab4ed195e6b6bf2cc
|
addons/crm/__terp__.py
|
addons/crm/__terp__.py
|
{
"name" : "Customer & Supplier Relationship Management",
"version" : "1.0",
"author" : "Tiny",
"website" : "http://tinyerp.com/module_crm.html",
"category" : "Generic Modules/CRM & SRM",
"description": """The Tiny ERP case and request tracker enables a group of
people to intelligently and efficiently manage tasks, issues, and requests.
It manages key tasks such as communication, identification, prioritization,
assignment, resolution and notification.
Tiny ERP ensures that all cases are successfly tracked by users, customers and
suppliers. It can automatically send reminders, escalate the request, trigger
specific methods and lots of others actions based on your enterprise own rules.
The greatest thing about this system is that users don't need to do anything
special. They can just send email to the request tracker. Tiny ERP will take
care of thanking them for their message, automatically routing it to the
appropriate staff, and making sure all future correspondence gets to the right
place.
The CRM module has a email gateway for the synchronisation interface
between mails and Tiny ERP.""",
"depends" : ["base", "account"],
"init_xml" : ["crm_data.xml"],
"demo_xml" : ["crm_demo.xml"],
"update_xml" : ["crm_view.xml", "crm_report.xml", "crm_wizard.xml"],
"active": False,
"installable": True
}
|
{
"name" : "Customer & Supplier Relationship Management",
"version" : "1.0",
"author" : "Tiny",
"website" : "http://tinyerp.com/module_crm.html",
"category" : "Generic Modules/CRM & SRM",
"description": """The Tiny ERP case and request tracker enables a group of
people to intelligently and efficiently manage tasks, issues, and requests.
It manages key tasks such as communication, identification, prioritization,
assignment, resolution and notification.
Tiny ERP ensures that all cases are successfly tracked by users, customers and
suppliers. It can automatically send reminders, escalate the request, trigger
specific methods and lots of others actions based on your enterprise own rules.
The greatest thing about this system is that users don't need to do anything
special. They can just send email to the request tracker. Tiny ERP will take
care of thanking them for their message, automatically routing it to the
appropriate staff, and making sure all future correspondence gets to the right
place.
The CRM module has a email gateway for the synchronisation interface
between mails and Tiny ERP.""",
"depends" : ["base", "account"],
"init_xml" : ["crm_data.xml"],
"demo_xml" : ["crm_demo.xml"],
"update_xml" : ["crm_view.xml", "crm_report.xml", "crm_wizard.xml","crm_security.xml"],
"active": False,
"installable": True
}
|
Add crm_security.xml file entry in update_xml section
|
Add crm_security.xml file entry in update_xml section
bzr revid: [email protected]
|
Python
|
agpl-3.0
|
rahuldhote/odoo,CopeX/odoo,CopeX/odoo,JonathanStein/odoo,colinnewell/odoo,Codefans-fan/odoo,mustafat/odoo-1,shivam1111/odoo,CubicERP/odoo,andreparames/odoo,gdgellatly/OCB1,PongPi/isl-odoo,Nick-OpusVL/odoo,damdam-s/OpenUpgrade,ecosoft-odoo/odoo,andreparames/odoo,grap/OCB,NL66278/OCB,Daniel-CA/odoo,OpusVL/odoo,luistorresm/odoo,diagramsoftware/odoo,klunwebale/odoo,apanju/odoo,colinnewell/odoo,srimai/odoo,cpyou/odoo,apocalypsebg/odoo,Nowheresly/odoo,rowemoore/odoo,makinacorpus/odoo,hip-odoo/odoo,rahuldhote/odoo,alexcuellar/odoo,ramadhane/odoo,dfang/odoo,Adel-Magebinary/odoo,Elico-Corp/odoo_OCB,jolevq/odoopub,Daniel-CA/odoo,draugiskisprendimai/odoo,shivam1111/odoo,hassoon3/odoo,alexcuellar/odoo,brijeshkesariya/odoo,gsmartway/odoo,tvtsoft/odoo8,rgeleta/odoo,waytai/odoo,BT-astauder/odoo,arthru/OpenUpgrade,dllsf/odootest,SerpentCS/odoo,abdellatifkarroum/odoo,tvibliani/odoo,gorjuce/odoo,fjbatresv/odoo,apanju/odoo,dsfsdgsbngfggb/odoo,ujjwalwahi/odoo,hoatle/odoo,damdam-s/OpenUpgrade,fjbatresv/odoo,optima-ict/odoo,hbrunn/OpenUpgrade,fuhongliang/odoo,BT-rmartin/odoo,hip-odoo/odoo,bealdav/OpenUpgrade,erkrishna9/odoo,AuyaJackie/odoo,0k/odoo,rowemoore/odoo,takis/odoo,Noviat/odoo,BT-fgarbely/odoo,Grirrane/odoo,Ernesto99/odoo,pedrobaeza/odoo,florian-dacosta/OpenUpgrade,tarzan0820/odoo,Gitlab11/odoo,ShineFan/odoo,gvb/odoo,jaxkodex/odoo,Elico-Corp/odoo_OCB,joshuajan/odoo,cloud9UG/odoo,tvtsoft/odoo8,camptocamp/ngo-addons-backport,joariasl/odoo,CatsAndDogsbvba/odoo,0k/OpenUpgrade,massot/odoo,nuuuboo/odoo,kittiu/odoo,ApuliaSoftware/odoo,ApuliaSoftware/odoo,VielSoft/odoo,numerigraphe/odoo,gorjuce/odoo,avoinsystems/odoo,KontorConsulting/odoo,elmerdpadilla/iv,sinbazhou/odoo,aviciimaxwell/odoo,joariasl/odoo,joshuajan/odoo,rubencabrera/odoo,papouso/odoo,abenzbiria/clients_odoo,Gitlab11/odoo,bkirui/odoo,spadae22/odoo,ShineFan/odoo,OpenPymeMx/OCB,hbrunn/OpenUpgrade,minhtuancn/odoo,ChanduERP/odoo,0k/odoo,dalegregory/odoo,stephen144/odoo,fuhongliang/odoo,guerrerocarlos/odoo,srimai/odoo,stephen144/odoo,shaufi/odoo,n0m4dz/odoo,erkrishna9/odoo,simongoffin/website_version,juanalfonsopr/odoo,rdeheele/odoo,colinnewell/odoo,mszewczy/odoo,leoliujie/odoo,codekaki/odoo,dezynetechnologies/odoo,ThinkOpen-Solutions/odoo,lgscofield/odoo,ramadhane/odoo,mmbtba/odoo,ShineFan/odoo,fevxie/odoo,osvalr/odoo,bobisme/odoo,stonegithubs/odoo,BT-ojossen/odoo,provaleks/o8,jesramirez/odoo,xujb/odoo,virgree/odoo,tvibliani/odoo,Grirrane/odoo,abenzbiria/clients_odoo,mmbtba/odoo,Drooids/odoo,credativUK/OCB,fuselock/odoo,synconics/odoo,arthru/OpenUpgrade,bguillot/OpenUpgrade,QianBIG/odoo,jesramirez/odoo,ecosoft-odoo/odoo,abdellatifkarroum/odoo,idncom/odoo,nuncjo/odoo,bwrsandman/OpenUpgrade,lombritz/odoo,dkubiak789/odoo,ingadhoc/odoo,funkring/fdoo,CatsAndDogsbvba/odoo,jiachenning/odoo,waytai/odoo,inspyration/odoo,stonegithubs/odoo,oihane/odoo,goliveirab/odoo,podemos-info/odoo,BT-rmartin/odoo,salaria/odoo,chiragjogi/odoo,bkirui/odoo,jiachenning/odoo,datenbetrieb/odoo,provaleks/o8,nuuuboo/odoo,collex100/odoo,goliveirab/odoo,collex100/odoo,Noviat/odoo,pedrobaeza/OpenUpgrade,ubic135/odoo-design,papouso/odoo,fevxie/odoo,omprakasha/odoo,ujjwalwahi/odoo,florentx/OpenUpgrade,xujb/odoo,dariemp/odoo,oliverhr/odoo,BT-ojossen/odoo,FlorianLudwig/odoo,BT-ojossen/odoo,ingadhoc/odoo,AuyaJackie/odoo,shivam1111/odoo,fuhongliang/odoo,srsman/odoo,ramitalat/odoo,hmen89/odoo,0k/odoo,hubsaysnuaa/odoo,salaria/odoo,OpenUpgrade-dev/OpenUpgrade,highco-groupe/odoo,lsinfo/odoo,ShineFan/odoo,ThinkOpen-Solutions/odoo,storm-computers/odoo,bakhtout/odoo-educ,jolevq/odoopub,rschnapka/odoo,acshan/odoo,jfpla/odoo,Adel-Magebinary/odoo,dezynetechnologies/odoo,cedk/odoo,dllsf/odootest,prospwro/odoo,hbrunn/OpenUpgrade,frouty/odoo_oph,hanicker/odoo,BT-fgarbely/odoo,factorlibre/OCB,grap/OCB,elmerdpadilla/iv,janocat/odoo,n0m4dz/odoo,virgree/odoo,shingonoide/odoo,juanalfonsopr/odoo,Nowheresly/odoo,collex100/odoo,jiangzhixiao/odoo,factorlibre/OCB,feroda/odoo,damdam-s/OpenUpgrade,jusdng/odoo,rowemoore/odoo,nuncjo/odoo,savoirfairelinux/odoo,gsmartway/odoo,inspyration/odoo,Antiun/odoo,Danisan/odoo-1,TRESCLOUD/odoopub,0k/OpenUpgrade,janocat/odoo,microcom/odoo,dfang/odoo,rschnapka/odoo,bakhtout/odoo-educ,NL66278/OCB,guerrerocarlos/odoo,abdellatifkarroum/odoo,gdgellatly/OCB1,frouty/odoogoeen,odoousers2014/odoo,ramadhane/odoo,salaria/odoo,Maspear/odoo,Kilhog/odoo,guerrerocarlos/odoo,osvalr/odoo,PongPi/isl-odoo,sadleader/odoo,patmcb/odoo,xujb/odoo,hubsaysnuaa/odoo,luistorresm/odoo,ChanduERP/odoo,rgeleta/odoo,numerigraphe/odoo,chiragjogi/odoo,xzYue/odoo,Drooids/odoo,dfang/odoo,glovebx/odoo,dfang/odoo,ihsanudin/odoo,shaufi/odoo,BT-ojossen/odoo,provaleks/o8,Grirrane/odoo,optima-ict/odoo,cpyou/odoo,dariemp/odoo,nhomar/odoo-mirror,pedrobaeza/OpenUpgrade,abdellatifkarroum/odoo,tinkhaven-organization/odoo,luistorresm/odoo,xujb/odoo,nuuuboo/odoo,Ernesto99/odoo,demon-ru/iml-crm,cedk/odoo,BT-rmartin/odoo,nexiles/odoo,Eric-Zhong/odoo,n0m4dz/odoo,havt/odoo,RafaelTorrealba/odoo,jpshort/odoo,wangjun/odoo,cysnake4713/odoo,tinkhaven-organization/odoo,javierTerry/odoo,shingonoide/odoo,havt/odoo,tvibliani/odoo,alexcuellar/odoo,ccomb/OpenUpgrade,hubsaysnuaa/odoo,elmerdpadilla/iv,bobisme/odoo,rgeleta/odoo,diagramsoftware/odoo,jfpla/odoo,Danisan/odoo-1,vrenaville/ngo-addons-backport,nexiles/odoo,wangjun/odoo,hopeall/odoo,waytai/odoo,oliverhr/odoo,cysnake4713/odoo,GauravSahu/odoo,bakhtout/odoo-educ,wangjun/odoo,bplancher/odoo,nitinitprof/odoo,pedrobaeza/OpenUpgrade,nuuuboo/odoo,papouso/odoo,OpenPymeMx/OCB,bguillot/OpenUpgrade,pplatek/odoo,kifcaliph/odoo,addition-it-solutions/project-all,nexiles/odoo,CubicERP/odoo,stephen144/odoo,nuncjo/odoo,oihane/odoo,gavin-feng/odoo,addition-it-solutions/project-all,avoinsystems/odoo,Ernesto99/odoo,oasiswork/odoo,hanicker/odoo,osvalr/odoo,sergio-incaser/odoo,optima-ict/odoo,nuncjo/odoo,Adel-Magebinary/odoo,oihane/odoo,mszewczy/odoo,grap/OpenUpgrade,funkring/fdoo,JonathanStein/odoo,tinkerthaler/odoo,dezynetechnologies/odoo,colinnewell/odoo,slevenhagen/odoo,charbeljc/OCB,rubencabrera/odoo,slevenhagen/odoo,OSSESAC/odoopubarquiluz,ehirt/odoo,JonathanStein/odoo,abdellatifkarroum/odoo,cdrooom/odoo,jiachenning/odoo,Nick-OpusVL/odoo,cysnake4713/odoo,lombritz/odoo,rdeheele/odoo,hassoon3/odoo,glovebx/odoo,RafaelTorrealba/odoo,Maspear/odoo,JonathanStein/odoo,Maspear/odoo,apocalypsebg/odoo,camptocamp/ngo-addons-backport,stonegithubs/odoo,dariemp/odoo,mlaitinen/odoo,leorochael/odoo,fevxie/odoo,bobisme/odoo,FlorianLudwig/odoo,FlorianLudwig/odoo,demon-ru/iml-crm,bobisme/odoo,luistorresm/odoo,credativUK/OCB,Kilhog/odoo,nagyistoce/odoo-dev-odoo,wangjun/odoo,ubic135/odoo-design,bkirui/odoo,bwrsandman/OpenUpgrade,ehirt/odoo,ApuliaSoftware/odoo,optima-ict/odoo,x111ong/odoo,GauravSahu/odoo,avoinsystems/odoo,grap/OpenUpgrade,codekaki/odoo,xujb/odoo,luiseduardohdbackup/odoo,joariasl/odoo,Adel-Magebinary/odoo,mszewczy/odoo,alqfahad/odoo,pplatek/odoo,massot/odoo,GauravSahu/odoo,shingonoide/odoo,SAM-IT-SA/odoo,Bachaco-ve/odoo,Maspear/odoo,frouty/odoogoeen,sysadminmatmoz/OCB,savoirfairelinux/OpenUpgrade,arthru/OpenUpgrade,VitalPet/odoo,dllsf/odootest,Bachaco-ve/odoo,jaxkodex/odoo,omprakasha/odoo,lsinfo/odoo,leorochael/odoo,leorochael/odoo,incaser/odoo-odoo,leoliujie/odoo,oliverhr/odoo,nuncjo/odoo,grap/OCB,slevenhagen/odoo-npg,microcom/odoo,hip-odoo/odoo,spadae22/odoo,fdvarela/odoo8,ramitalat/odoo,dgzurita/odoo,takis/odoo,vnsofthe/odoo,ovnicraft/odoo,microcom/odoo,mkieszek/odoo,mustafat/odoo-1,fuhongliang/odoo,fossoult/odoo,bealdav/OpenUpgrade,sebalix/OpenUpgrade,cpyou/odoo,windedge/odoo,tvtsoft/odoo8,ChanduERP/odoo,goliveirab/odoo,kirca/OpenUpgrade,hifly/OpenUpgrade,acshan/odoo,hoatle/odoo,OpenUpgrade/OpenUpgrade,rschnapka/odoo,Danisan/odoo-1,jusdng/odoo,rowemoore/odoo,jfpla/odoo,rschnapka/odoo,SAM-IT-SA/odoo,srsman/odoo,jpshort/odoo,lgscofield/odoo,rschnapka/odoo,sysadminmatmoz/OCB,factorlibre/OCB,vrenaville/ngo-addons-backport,ubic135/odoo-design,omprakasha/odoo,steedos/odoo,acshan/odoo,savoirfairelinux/OpenUpgrade,jesramirez/odoo,simongoffin/website_version,Antiun/odoo,sysadminmatmoz/OCB,jiachenning/odoo,Endika/OpenUpgrade,florentx/OpenUpgrade,havt/odoo,nuncjo/odoo,avoinsystems/odoo,bealdav/OpenUpgrade,zchking/odoo,steedos/odoo,tarzan0820/odoo,sve-odoo/odoo,hopeall/odoo,tinkerthaler/odoo,cysnake4713/odoo,sve-odoo/odoo,odooindia/odoo,apanju/GMIO_Odoo,NeovaHealth/odoo,mlaitinen/odoo,srsman/odoo,TRESCLOUD/odoopub,fossoult/odoo,jiangzhixiao/odoo,makinacorpus/odoo,apanju/GMIO_Odoo,Codefans-fan/odoo,luiseduardohdbackup/odoo,jeasoft/odoo,blaggacao/OpenUpgrade,csrocha/OpenUpgrade,nhomar/odoo-mirror,naousse/odoo,ygol/odoo,Noviat/odoo,PongPi/isl-odoo,papouso/odoo,pplatek/odoo,bkirui/odoo,n0m4dz/odoo,xzYue/odoo,poljeff/odoo,numerigraphe/odoo,odoousers2014/odoo,gsmartway/odoo,odooindia/odoo,javierTerry/odoo,codekaki/odoo,vnsofthe/odoo,laslabs/odoo,ClearCorp-dev/odoo,ramadhane/odoo,hassoon3/odoo,luiseduardohdbackup/odoo,Antiun/odoo,csrocha/OpenUpgrade,pplatek/odoo,makinacorpus/odoo,shingonoide/odoo,lightcn/odoo,kifcaliph/odoo,fossoult/odoo,brijeshkesariya/odoo,oasiswork/odoo,cloud9UG/odoo,OSSESAC/odoopubarquiluz,BT-fgarbely/odoo,kirca/OpenUpgrade,Danisan/odoo-1,lombritz/odoo,zchking/odoo,bguillot/OpenUpgrade,jiangzhixiao/odoo,frouty/odoogoeen,optima-ict/odoo,diagramsoftware/odoo,hbrunn/OpenUpgrade,rahuldhote/odoo,alhashash/odoo,kirca/OpenUpgrade,NeovaHealth/odoo,bakhtout/odoo-educ,nhomar/odoo-mirror,guerrerocarlos/odoo,grap/OCB,spadae22/odoo,cdrooom/odoo,fgesora/odoo,draugiskisprendimai/odoo,gvb/odoo,jolevq/odoopub,leoliujie/odoo,synconics/odoo,gdgellatly/OCB1,lgscofield/odoo,aviciimaxwell/odoo,glovebx/odoo,mmbtba/odoo,Nick-OpusVL/odoo,Bachaco-ve/odoo,realsaiko/odoo,matrixise/odoo,odooindia/odoo,MarcosCommunity/odoo,storm-computers/odoo,nhomar/odoo,doomsterinc/odoo,ClearCorp-dev/odoo,srimai/odoo,Grirrane/odoo,alhashash/odoo,frouty/odoo_oph,syci/OCB,chiragjogi/odoo,Ernesto99/odoo,windedge/odoo,dkubiak789/odoo,dalegregory/odoo,Ernesto99/odoo,tarzan0820/odoo,dsfsdgsbngfggb/odoo,gorjuce/odoo,gsmartway/odoo,bguillot/OpenUpgrade,fossoult/odoo,odootr/odoo,codekaki/odoo,patmcb/odoo,blaggacao/OpenUpgrade,guewen/OpenUpgrade,funkring/fdoo,odoousers2014/odoo,bkirui/odoo,florian-dacosta/OpenUpgrade,zchking/odoo,codekaki/odoo,camptocamp/ngo-addons-backport,glovebx/odoo,fossoult/odoo,Drooids/odoo,lgscofield/odoo,rgeleta/odoo,slevenhagen/odoo,dkubiak789/odoo,nuncjo/odoo,frouty/odoogoeen,grap/OpenUpgrade,poljeff/odoo,waytai/odoo,hassoon3/odoo,Eric-Zhong/odoo,makinacorpus/odoo,sebalix/OpenUpgrade,collex100/odoo,jaxkodex/odoo,odoo-turkiye/odoo,0k/odoo,dfang/odoo,fgesora/odoo,0k/odoo,Kilhog/odoo,ujjwalwahi/odoo,alexteodor/odoo,Daniel-CA/odoo,rahuldhote/odoo,oliverhr/odoo,VitalPet/odoo,minhtuancn/odoo,gvb/odoo,alqfahad/odoo,sysadminmatmoz/OCB,doomsterinc/odoo,nhomar/odoo,kybriainfotech/iSocioCRM,andreparames/odoo,fjbatresv/odoo,diagramsoftware/odoo,OpusVL/odoo,VitalPet/odoo,BT-fgarbely/odoo,draugiskisprendimai/odoo,abdellatifkarroum/odoo,abstract-open-solutions/OCB,xzYue/odoo,feroda/odoo,dalegregory/odoo,BT-ojossen/odoo,TRESCLOUD/odoopub,ccomb/OpenUpgrade,stephen144/odoo,Endika/odoo,takis/odoo,Codefans-fan/odoo,bplancher/odoo,SerpentCS/odoo,chiragjogi/odoo,damdam-s/OpenUpgrade,odootr/odoo,eino-makitalo/odoo,podemos-info/odoo,eino-makitalo/odoo,kittiu/odoo,fevxie/odoo,doomsterinc/odoo,NL66278/OCB,Elico-Corp/odoo_OCB,alhashash/odoo,hopeall/odoo,gorjuce/odoo,janocat/odoo,odoo-turkiye/odoo,csrocha/OpenUpgrade,alqfahad/odoo,ihsanudin/odoo,vnsofthe/odoo,odoo-turkiye/odoo,Danisan/odoo-1,shaufi/odoo,juanalfonsopr/odoo,abdellatifkarroum/odoo,hifly/OpenUpgrade,AuyaJackie/odoo,luiseduardohdbackup/odoo,odooindia/odoo,Codefans-fan/odoo,guewen/OpenUpgrade,sv-dev1/odoo,poljeff/odoo,oihane/odoo,nagyistoce/odoo-dev-odoo,PongPi/isl-odoo,CubicERP/odoo,shaufi/odoo,jeasoft/odoo,andreparames/odoo,grap/OCB,laslabs/odoo,GauravSahu/odoo,laslabs/odoo,salaria/odoo,diagramsoftware/odoo,pedrobaeza/OpenUpgrade,kifcaliph/odoo,fevxie/odoo,tvtsoft/odoo8,Nick-OpusVL/odoo,lightcn/odoo,shivam1111/odoo,laslabs/odoo,alexcuellar/odoo,abstract-open-solutions/OCB,abstract-open-solutions/OCB,feroda/odoo,ubic135/odoo-design,MarcosCommunity/odoo,tarzan0820/odoo,thanhacun/odoo,gavin-feng/odoo,dariemp/odoo,rahuldhote/odoo,addition-it-solutions/project-all,damdam-s/OpenUpgrade,sv-dev1/odoo,jpshort/odoo,ovnicraft/odoo,ApuliaSoftware/odoo,draugiskisprendimai/odoo,savoirfairelinux/OpenUpgrade,sv-dev1/odoo,hoatle/odoo,poljeff/odoo,papouso/odoo,windedge/odoo,jpshort/odoo,guewen/OpenUpgrade,camptocamp/ngo-addons-backport,jfpla/odoo,ehirt/odoo,florian-dacosta/OpenUpgrade,CubicERP/odoo,dezynetechnologies/odoo,mkieszek/odoo,christophlsa/odoo,markeTIC/OCB,Gitlab11/odoo,rowemoore/odoo,ramitalat/odoo,avoinsystems/odoo,bguillot/OpenUpgrade,shaufi/odoo,hubsaysnuaa/odoo,OpusVL/odoo,Nick-OpusVL/odoo,Eric-Zhong/odoo,nagyistoce/odoo-dev-odoo,poljeff/odoo,mszewczy/odoo,acshan/odoo,slevenhagen/odoo-npg,CopeX/odoo,CatsAndDogsbvba/odoo,massot/odoo,csrocha/OpenUpgrade,cdrooom/odoo,cedk/odoo,cloud9UG/odoo,klunwebale/odoo,bakhtout/odoo-educ,camptocamp/ngo-addons-backport,hip-odoo/odoo,luiseduardohdbackup/odoo,bakhtout/odoo-educ,addition-it-solutions/project-all,tvtsoft/odoo8,QianBIG/odoo,juanalfonsopr/odoo,fossoult/odoo,dsfsdgsbngfggb/odoo,dariemp/odoo,Ichag/odoo,fuhongliang/odoo,rgeleta/odoo,minhtuancn/odoo,nuuuboo/odoo,OSSESAC/odoopubarquiluz,addition-it-solutions/project-all,RafaelTorrealba/odoo,nagyistoce/odoo-dev-odoo,kittiu/odoo,ingadhoc/odoo,doomsterinc/odoo,aviciimaxwell/odoo,apanju/GMIO_Odoo,demon-ru/iml-crm,ApuliaSoftware/odoo,bwrsandman/OpenUpgrade,CatsAndDogsbvba/odoo,osvalr/odoo,blaggacao/OpenUpgrade,synconics/odoo,frouty/odoo_oph,klunwebale/odoo,ecosoft-odoo/odoo,bwrsandman/OpenUpgrade,Endika/odoo,lsinfo/odoo,n0m4dz/odoo,alexcuellar/odoo,collex100/odoo,podemos-info/odoo,numerigraphe/odoo,BT-ojossen/odoo,pedrobaeza/odoo,dkubiak789/odoo,klunwebale/odoo,markeTIC/OCB,brijeshkesariya/odoo,guerrerocarlos/odoo,simongoffin/website_version,cedk/odoo,mlaitinen/odoo,Endika/odoo,ojengwa/odoo,ramadhane/odoo,camptocamp/ngo-addons-backport,sinbazhou/odoo,mvaled/OpenUpgrade,florian-dacosta/OpenUpgrade,bguillot/OpenUpgrade,Eric-Zhong/odoo,GauravSahu/odoo,charbeljc/OCB,SerpentCS/odoo,brijeshkesariya/odoo,tarzan0820/odoo,CubicERP/odoo,MarcosCommunity/odoo,Noviat/odoo,BT-rmartin/odoo,slevenhagen/odoo-npg,hip-odoo/odoo,Noviat/odoo,fgesora/odoo,wangjun/odoo,ChanduERP/odoo,alqfahad/odoo,Endika/OpenUpgrade,camptocamp/ngo-addons-backport,ramitalat/odoo,x111ong/odoo,shaufi10/odoo,ihsanudin/odoo,sve-odoo/odoo,aviciimaxwell/odoo,OpenUpgrade/OpenUpgrade,osvalr/odoo,datenbetrieb/odoo,zchking/odoo,VielSoft/odoo,mmbtba/odoo,windedge/odoo,SAM-IT-SA/odoo,Ichag/odoo,dsfsdgsbngfggb/odoo,grap/OpenUpgrade,nexiles/odoo,virgree/odoo,odoo-turkiye/odoo,alhashash/odoo,Endika/OpenUpgrade,Endika/odoo,nhomar/odoo,xujb/odoo,lombritz/odoo,naousse/odoo,fuselock/odoo,VielSoft/odoo,BT-fgarbely/odoo,highco-groupe/odoo,simongoffin/website_version,ccomb/OpenUpgrade,hopeall/odoo,odootr/odoo,thanhacun/odoo,glovebx/odoo,Codefans-fan/odoo,hanicker/odoo,srimai/odoo,OpenUpgrade-dev/OpenUpgrade,papouso/odoo,Nowheresly/odoo,dgzurita/odoo,slevenhagen/odoo,ojengwa/odoo,tvtsoft/odoo8,ojengwa/odoo,matrixise/odoo,virgree/odoo,alqfahad/odoo,rahuldhote/odoo,jfpla/odoo,vrenaville/ngo-addons-backport,javierTerry/odoo,credativUK/OCB,BT-rmartin/odoo,minhtuancn/odoo,Elico-Corp/odoo_OCB,prospwro/odoo,doomsterinc/odoo,mustafat/odoo-1,highco-groupe/odoo,alexteodor/odoo,mszewczy/odoo,blaggacao/OpenUpgrade,jaxkodex/odoo,stonegithubs/odoo,JCA-Developpement/Odoo,luistorresm/odoo,kittiu/odoo,Nowheresly/odoo,apocalypsebg/odoo,factorlibre/OCB,ojengwa/odoo,papouso/odoo,fuselock/odoo,christophlsa/odoo,csrocha/OpenUpgrade,realsaiko/odoo,havt/odoo,jeasoft/odoo,vrenaville/ngo-addons-backport,Gitlab11/odoo,mmbtba/odoo,tangyiyong/odoo,spadae22/odoo,cpyou/odoo,codekaki/odoo,joshuajan/odoo,agrista/odoo-saas,mvaled/OpenUpgrade,srsman/odoo,n0m4dz/odoo,synconics/odoo,QianBIG/odoo,takis/odoo,abenzbiria/clients_odoo,shivam1111/odoo,ChanduERP/odoo,blaggacao/OpenUpgrade,mvaled/OpenUpgrade,charbeljc/OCB,QianBIG/odoo,dalegregory/odoo,numerigraphe/odoo,shaufi10/odoo,ShineFan/odoo,fjbatresv/odoo,ingadhoc/odoo,VielSoft/odoo,ramitalat/odoo,lombritz/odoo,sergio-incaser/odoo,windedge/odoo,Endika/odoo,brijeshkesariya/odoo,ClearCorp-dev/odoo,RafaelTorrealba/odoo,virgree/odoo,charbeljc/OCB,srimai/odoo,sv-dev1/odoo,factorlibre/OCB,savoirfairelinux/OpenUpgrade,agrista/odoo-saas,nhomar/odoo-mirror,acshan/odoo,FlorianLudwig/odoo,deKupini/erp,mlaitinen/odoo,ThinkOpen-Solutions/odoo,shaufi/odoo,bwrsandman/OpenUpgrade,idncom/odoo,cloud9UG/odoo,Ichag/odoo,BT-astauder/odoo,nitinitprof/odoo,nuuuboo/odoo,nitinitprof/odoo,wangjun/odoo,massot/odoo,kirca/OpenUpgrade,gavin-feng/odoo,Endika/OpenUpgrade,KontorConsulting/odoo,damdam-s/OpenUpgrade,andreparames/odoo,windedge/odoo,dariemp/odoo,funkring/fdoo,tangyiyong/odoo,sebalix/OpenUpgrade,sinbazhou/odoo,OpenPymeMx/OCB,steedos/odoo,GauravSahu/odoo,omprakasha/odoo,OpenUpgrade-dev/OpenUpgrade,odootr/odoo,alexcuellar/odoo,tinkhaven-organization/odoo,JonathanStein/odoo,hifly/OpenUpgrade,BT-rmartin/odoo,syci/OCB,rdeheele/odoo,ingadhoc/odoo,savoirfairelinux/OpenUpgrade,agrista/odoo-saas,Noviat/odoo,tinkerthaler/odoo,bealdav/OpenUpgrade,pplatek/odoo,podemos-info/odoo,srimai/odoo,NeovaHealth/odoo,FlorianLudwig/odoo,joariasl/odoo,fevxie/odoo,Endika/OpenUpgrade,sadleader/odoo,aviciimaxwell/odoo,laslabs/odoo,Nick-OpusVL/odoo,dgzurita/odoo,OpenUpgrade-dev/OpenUpgrade,mkieszek/odoo,KontorConsulting/odoo,Endika/OpenUpgrade,hoatle/odoo,thanhacun/odoo,draugiskisprendimai/odoo,nhomar/odoo,gavin-feng/odoo,Codefans-fan/odoo,pedrobaeza/odoo,florian-dacosta/OpenUpgrade,slevenhagen/odoo,GauravSahu/odoo,charbeljc/OCB,dezynetechnologies/odoo,datenbetrieb/odoo,ihsanudin/odoo,cloud9UG/odoo,CubicERP/odoo,Nowheresly/odoo,provaleks/o8,Ernesto99/odoo,lightcn/odoo,zchking/odoo,mszewczy/odoo,tangyiyong/odoo,sve-odoo/odoo,abenzbiria/clients_odoo,hifly/OpenUpgrade,fuselock/odoo,storm-computers/odoo,collex100/odoo,mlaitinen/odoo,hmen89/odoo,JCA-Developpement/Odoo,ChanduERP/odoo,inspyration/odoo,mkieszek/odoo,nagyistoce/odoo-dev-odoo,incaser/odoo-odoo,bkirui/odoo,shaufi10/odoo,goliveirab/odoo,ShineFan/odoo,kybriainfotech/iSocioCRM,pedrobaeza/OpenUpgrade,bealdav/OpenUpgrade,ThinkOpen-Solutions/odoo,AuyaJackie/odoo,FlorianLudwig/odoo,christophlsa/odoo,shingonoide/odoo,dgzurita/odoo,cedk/odoo,QianBIG/odoo,stonegithubs/odoo,tangyiyong/odoo,BT-ojossen/odoo,idncom/odoo,agrista/odoo-saas,rowemoore/odoo,rschnapka/odoo,shaufi10/odoo,lombritz/odoo,factorlibre/OCB,luiseduardohdbackup/odoo,rubencabrera/odoo,cysnake4713/odoo,florentx/OpenUpgrade,dgzurita/odoo,Daniel-CA/odoo,patmcb/odoo,andreparames/odoo,ehirt/odoo,takis/odoo,hanicker/odoo,microcom/odoo,0k/OpenUpgrade,mvaled/OpenUpgrade,OpenPymeMx/OCB,Maspear/odoo,dkubiak789/odoo,kifcaliph/odoo,zchking/odoo,andreparames/odoo,srimai/odoo,colinnewell/odoo,odoo-turkiye/odoo,colinnewell/odoo,0k/OpenUpgrade,funkring/fdoo,christophlsa/odoo,leoliujie/odoo,guewen/OpenUpgrade,janocat/odoo,naousse/odoo,javierTerry/odoo,Eric-Zhong/odoo,incaser/odoo-odoo,matrixise/odoo,tinkhaven-organization/odoo,Ichag/odoo,ojengwa/odoo,microcom/odoo,sinbazhou/odoo,realsaiko/odoo,patmcb/odoo,apocalypsebg/odoo,savoirfairelinux/odoo,erkrishna9/odoo,grap/OpenUpgrade,stephen144/odoo,sv-dev1/odoo,OSSESAC/odoopubarquiluz,luistorresm/odoo,csrocha/OpenUpgrade,MarcosCommunity/odoo,tarzan0820/odoo,ramitalat/odoo,sergio-incaser/odoo,zchking/odoo,alexteodor/odoo,diagramsoftware/odoo,optima-ict/odoo,aviciimaxwell/odoo,hassoon3/odoo,doomsterinc/odoo,bplancher/odoo,thanhacun/odoo,sebalix/OpenUpgrade,AuyaJackie/odoo,OpenPymeMx/OCB,apanju/odoo,gorjuce/odoo,kittiu/odoo,ygol/odoo,Nowheresly/odoo,arthru/OpenUpgrade,apanju/GMIO_Odoo,Kilhog/odoo,odoo-turkiye/odoo,agrista/odoo-saas,dalegregory/odoo,alqfahad/odoo,bealdav/OpenUpgrade,RafaelTorrealba/odoo,Daniel-CA/odoo,Antiun/odoo,syci/OCB,virgree/odoo,dezynetechnologies/odoo,Antiun/odoo,PongPi/isl-odoo,OSSESAC/odoopubarquiluz,rdeheele/odoo,osvalr/odoo,rschnapka/odoo,florentx/OpenUpgrade,Danisan/odoo-1,erkrishna9/odoo,PongPi/isl-odoo,provaleks/o8,OpenPymeMx/OCB,christophlsa/odoo,n0m4dz/odoo,leorochael/odoo,SAM-IT-SA/odoo,xzYue/odoo,odootr/odoo,oasiswork/odoo,pedrobaeza/odoo,apanju/GMIO_Odoo,cpyou/odoo,ujjwalwahi/odoo,storm-computers/odoo,makinacorpus/odoo,acshan/odoo,ThinkOpen-Solutions/odoo,colinnewell/odoo,kybriainfotech/iSocioCRM,hoatle/odoo,thanhacun/odoo,provaleks/o8,shaufi10/odoo,jiangzhixiao/odoo,hopeall/odoo,jiangzhixiao/odoo,ClearCorp-dev/odoo,Adel-Magebinary/odoo,waytai/odoo,janocat/odoo,JGarcia-Panach/odoo,bkirui/odoo,takis/odoo,minhtuancn/odoo,simongoffin/website_version,rahuldhote/odoo,markeTIC/OCB,codekaki/odoo,apanju/odoo,nhomar/odoo,vrenaville/ngo-addons-backport,brijeshkesariya/odoo,Gitlab11/odoo,odootr/odoo,incaser/odoo-odoo,tangyiyong/odoo,grap/OCB,grap/OpenUpgrade,fjbatresv/odoo,tarzan0820/odoo,SerpentCS/odoo,ecosoft-odoo/odoo,kifcaliph/odoo,xzYue/odoo,jaxkodex/odoo,ubic135/odoo-design,mustafat/odoo-1,vnsofthe/odoo,patmcb/odoo,dariemp/odoo,odootr/odoo,joshuajan/odoo,VitalPet/odoo,dllsf/odootest,minhtuancn/odoo,odooindia/odoo,florentx/OpenUpgrade,SerpentCS/odoo,jesramirez/odoo,arthru/OpenUpgrade,sebalix/OpenUpgrade,Grirrane/odoo,vrenaville/ngo-addons-backport,tinkhaven-organization/odoo,bplancher/odoo,MarcosCommunity/odoo,vnsofthe/odoo,hubsaysnuaa/odoo,pedrobaeza/OpenUpgrade,dalegregory/odoo,hubsaysnuaa/odoo,JCA-Developpement/Odoo,savoirfairelinux/odoo,dkubiak789/odoo,slevenhagen/odoo-npg,Drooids/odoo,charbeljc/OCB,joariasl/odoo,JonathanStein/odoo,matrixise/odoo,Adel-Magebinary/odoo,MarcosCommunity/odoo,jiachenning/odoo,incaser/odoo-odoo,hmen89/odoo,sadleader/odoo,apanju/GMIO_Odoo,Maspear/odoo,Bachaco-ve/odoo,BT-astauder/odoo,goliveirab/odoo,realsaiko/odoo,ehirt/odoo,diagramsoftware/odoo,pedrobaeza/odoo,hbrunn/OpenUpgrade,sysadminmatmoz/OCB,x111ong/odoo,Endika/odoo,arthru/OpenUpgrade,VitalPet/odoo,provaleks/o8,Elico-Corp/odoo_OCB,takis/odoo,cloud9UG/odoo,nexiles/odoo,SerpentCS/odoo,frouty/odoogoeen,hoatle/odoo,CatsAndDogsbvba/odoo,Antiun/odoo,joshuajan/odoo,sadleader/odoo,OpenUpgrade/OpenUpgrade,mszewczy/odoo,fgesora/odoo,ecosoft-odoo/odoo,demon-ru/iml-crm,christophlsa/odoo,apocalypsebg/odoo,avoinsystems/odoo,tinkhaven-organization/odoo,stonegithubs/odoo,synconics/odoo,fgesora/odoo,gavin-feng/odoo,vrenaville/ngo-addons-backport,incaser/odoo-odoo,SAM-IT-SA/odoo,srsman/odoo,VitalPet/odoo,dsfsdgsbngfggb/odoo,OpenPymeMx/OCB,feroda/odoo,alexteodor/odoo,hanicker/odoo,PongPi/isl-odoo,ovnicraft/odoo,naousse/odoo,highco-groupe/odoo,bakhtout/odoo-educ,BT-astauder/odoo,Bachaco-ve/odoo,xzYue/odoo,x111ong/odoo,rubencabrera/odoo,thanhacun/odoo,lgscofield/odoo,odoousers2014/odoo,VielSoft/odoo,ccomb/OpenUpgrade,Danisan/odoo-1,spadae22/odoo,stonegithubs/odoo,OpenUpgrade/OpenUpgrade,draugiskisprendimai/odoo,bobisme/odoo,spadae22/odoo,jolevq/odoopub,synconics/odoo,sadleader/odoo,lightcn/odoo,microcom/odoo,storm-computers/odoo,gavin-feng/odoo,sv-dev1/odoo,OpenUpgrade/OpenUpgrade,alhashash/odoo,hmen89/odoo,tangyiyong/odoo,waytai/odoo,oasiswork/odoo,abenzbiria/clients_odoo,mmbtba/odoo,odoo-turkiye/odoo,eino-makitalo/odoo,mlaitinen/odoo,Ichag/odoo,osvalr/odoo,syci/OCB,x111ong/odoo,gsmartway/odoo,jeasoft/odoo,windedge/odoo,eino-makitalo/odoo,apocalypsebg/odoo,Adel-Magebinary/odoo,hoatle/odoo,chiragjogi/odoo,RafaelTorrealba/odoo,srsman/odoo,jfpla/odoo,shingonoide/odoo,x111ong/odoo,mmbtba/odoo,KontorConsulting/odoo,CopeX/odoo,cedk/odoo,juanalfonsopr/odoo,ApuliaSoftware/odoo,nitinitprof/odoo,gdgellatly/OCB1,salaria/odoo,slevenhagen/odoo-npg,ujjwalwahi/odoo,jusdng/odoo,OpenUpgrade/OpenUpgrade,virgree/odoo,Daniel-CA/odoo,syci/OCB,nhomar/odoo,dgzurita/odoo,nagyistoce/odoo-dev-odoo,deKupini/erp,fevxie/odoo,omprakasha/odoo,savoirfairelinux/odoo,Elico-Corp/odoo_OCB,csrocha/OpenUpgrade,Ichag/odoo,Drooids/odoo,bobisme/odoo,prospwro/odoo,gavin-feng/odoo,gvb/odoo,bwrsandman/OpenUpgrade,jfpla/odoo,steedos/odoo,gsmartway/odoo,ojengwa/odoo,eino-makitalo/odoo,havt/odoo,incaser/odoo-odoo,Gitlab11/odoo,mlaitinen/odoo,hopeall/odoo,datenbetrieb/odoo,FlorianLudwig/odoo,ThinkOpen-Solutions/odoo,credativUK/OCB,kittiu/odoo,aviciimaxwell/odoo,salaria/odoo,xujb/odoo,eino-makitalo/odoo,ujjwalwahi/odoo,alqfahad/odoo,kybriainfotech/iSocioCRM,jiachenning/odoo,ecosoft-odoo/odoo,oihane/odoo,jesramirez/odoo,abstract-open-solutions/OCB,factorlibre/OCB,chiragjogi/odoo,oihane/odoo,shaufi10/odoo,Daniel-CA/odoo,deKupini/erp,sergio-incaser/odoo,ovnicraft/odoo,klunwebale/odoo,jpshort/odoo,frouty/odoogoeen,wangjun/odoo,jiangzhixiao/odoo,tinkerthaler/odoo,savoirfairelinux/odoo,odoousers2014/odoo,fjbatresv/odoo,pplatek/odoo,funkring/fdoo,prospwro/odoo,naousse/odoo,NL66278/OCB,CatsAndDogsbvba/odoo,hifly/OpenUpgrade,collex100/odoo,OpenUpgrade-dev/OpenUpgrade,ygol/odoo,idncom/odoo,cdrooom/odoo,joshuajan/odoo,ujjwalwahi/odoo,sinbazhou/odoo,codekaki/odoo,naousse/odoo,0k/OpenUpgrade,deKupini/erp,bwrsandman/OpenUpgrade,steedos/odoo,podemos-info/odoo,klunwebale/odoo,rubencabrera/odoo,xzYue/odoo,odoousers2014/odoo,markeTIC/OCB,fgesora/odoo,grap/OpenUpgrade,erkrishna9/odoo,JGarcia-Panach/odoo,tvibliani/odoo,credativUK/OCB,leorochael/odoo,lombritz/odoo,prospwro/odoo,fuselock/odoo,nitinitprof/odoo,BT-astauder/odoo,Bachaco-ve/odoo,dkubiak789/odoo,javierTerry/odoo,datenbetrieb/odoo,fuselock/odoo,savoirfairelinux/OpenUpgrade,VielSoft/odoo,BT-fgarbely/odoo,gsmartway/odoo,mvaled/OpenUpgrade,fuhongliang/odoo,SAM-IT-SA/odoo,Antiun/odoo,frouty/odoo_oph,alexteodor/odoo,oasiswork/odoo,OpenPymeMx/OCB,dgzurita/odoo,hifly/OpenUpgrade,JGarcia-Panach/odoo,ehirt/odoo,fdvarela/odoo8,Endika/OpenUpgrade,jusdng/odoo,luistorresm/odoo,sinbazhou/odoo,fossoult/odoo,numerigraphe/odoo,Kilhog/odoo,hip-odoo/odoo,elmerdpadilla/iv,JGarcia-Panach/odoo,OSSESAC/odoopubarquiluz,stephen144/odoo,sv-dev1/odoo,VitalPet/odoo,ygol/odoo,inspyration/odoo,datenbetrieb/odoo,markeTIC/OCB,janocat/odoo,NeovaHealth/odoo,shaufi/odoo,oliverhr/odoo,Ernesto99/odoo,ccomb/OpenUpgrade,hassoon3/odoo,ygol/odoo,feroda/odoo,klunwebale/odoo,oihane/odoo,lsinfo/odoo,nuuuboo/odoo,javierTerry/odoo,kirca/OpenUpgrade,camptocamp/ngo-addons-backport,janocat/odoo,QianBIG/odoo,poljeff/odoo,massot/odoo,havt/odoo,juanalfonsopr/odoo,sebalix/OpenUpgrade,sergio-incaser/odoo,javierTerry/odoo,salaria/odoo,gorjuce/odoo,vrenaville/ngo-addons-backport,damdam-s/OpenUpgrade,pplatek/odoo,ygol/odoo,apanju/odoo,kittiu/odoo,hanicker/odoo,hifly/OpenUpgrade,ApuliaSoftware/odoo,srsman/odoo,ccomb/OpenUpgrade,hubsaysnuaa/odoo,mvaled/OpenUpgrade,leorochael/odoo,RafaelTorrealba/odoo,OpenUpgrade/OpenUpgrade,prospwro/odoo,synconics/odoo,jiangzhixiao/odoo,jeasoft/odoo,Codefans-fan/odoo,naousse/odoo,rowemoore/odoo,sysadminmatmoz/OCB,ecosoft-odoo/odoo,SerpentCS/odoo,JCA-Developpement/Odoo,Drooids/odoo,tinkerthaler/odoo,dezynetechnologies/odoo,jusdng/odoo,podemos-info/odoo,jeasoft/odoo,BT-rmartin/odoo,abstract-open-solutions/OCB,jpshort/odoo,rubencabrera/odoo,jaxkodex/odoo,joariasl/odoo,BT-fgarbely/odoo,slevenhagen/odoo,CubicERP/odoo,rschnapka/odoo,CopeX/odoo,ihsanudin/odoo,TRESCLOUD/odoopub,JGarcia-Panach/odoo,glovebx/odoo,glovebx/odoo,apocalypsebg/odoo,Bachaco-ve/odoo,vnsofthe/odoo,OpusVL/odoo,pedrobaeza/odoo,gdgellatly/OCB1,ThinkOpen-Solutions/odoo,x111ong/odoo,sergio-incaser/odoo,nitinitprof/odoo,sve-odoo/odoo,fjbatresv/odoo,blaggacao/OpenUpgrade,sinbazhou/odoo,jpshort/odoo,leoliujie/odoo,gvb/odoo,credativUK/OCB,patmcb/odoo,rubencabrera/odoo,OpenUpgrade-dev/OpenUpgrade,markeTIC/OCB,oasiswork/odoo,mkieszek/odoo,mustafat/odoo-1,brijeshkesariya/odoo,syci/OCB,elmerdpadilla/iv,Eric-Zhong/odoo,bplancher/odoo,florian-dacosta/OpenUpgrade,cloud9UG/odoo,datenbetrieb/odoo,apanju/odoo,dsfsdgsbngfggb/odoo,ClearCorp-dev/odoo,nexiles/odoo,abstract-open-solutions/OCB,dfang/odoo,frouty/odoogoeen,NeovaHealth/odoo,demon-ru/iml-crm,christophlsa/odoo,CatsAndDogsbvba/odoo,draugiskisprendimai/odoo,fdvarela/odoo8,guewen/OpenUpgrade,Kilhog/odoo,mvaled/OpenUpgrade,grap/OCB,oasiswork/odoo,KontorConsulting/odoo,spadae22/odoo,feroda/odoo,Noviat/odoo,KontorConsulting/odoo,shaufi10/odoo,ihsanudin/odoo,lightcn/odoo,thanhacun/odoo,kybriainfotech/iSocioCRM,ovnicraft/odoo,ramadhane/odoo,makinacorpus/odoo,makinacorpus/odoo,tvibliani/odoo,guewen/OpenUpgrade,TRESCLOUD/odoopub,laslabs/odoo,apanju/odoo,waytai/odoo,Nowheresly/odoo,JonathanStein/odoo,MarcosCommunity/odoo,luiseduardohdbackup/odoo,dalegregory/odoo,blaggacao/OpenUpgrade,lightcn/odoo,alhashash/odoo,ojengwa/odoo,Maspear/odoo,havt/odoo,omprakasha/odoo,SAM-IT-SA/odoo,VielSoft/odoo,leorochael/odoo,rgeleta/odoo,ramadhane/odoo,mustafat/odoo-1,omprakasha/odoo,nitinitprof/odoo,florentx/OpenUpgrade,patmcb/odoo,sebalix/OpenUpgrade,ShineFan/odoo,kybriainfotech/iSocioCRM,bplancher/odoo,prospwro/odoo,alexcuellar/odoo,jeasoft/odoo,kirca/OpenUpgrade,shivam1111/odoo,ovnicraft/odoo,AuyaJackie/odoo,gorjuce/odoo,ovnicraft/odoo,funkring/fdoo,grap/OCB,Endika/odoo,ChanduERP/odoo,dsfsdgsbngfggb/odoo,CopeX/odoo,NeovaHealth/odoo,oliverhr/odoo,guerrerocarlos/odoo,goliveirab/odoo,kybriainfotech/iSocioCRM,lightcn/odoo,matrixise/odoo,frouty/odoogoeen,ygol/odoo,Grirrane/odoo,hmen89/odoo,lsinfo/odoo,podemos-info/odoo,storm-computers/odoo,Nick-OpusVL/odoo,slevenhagen/odoo-npg,ingadhoc/odoo,jaxkodex/odoo,jolevq/odoopub,Gitlab11/odoo,fgesora/odoo,eino-makitalo/odoo,tvibliani/odoo,ihsanudin/odoo,addition-it-solutions/project-all,slevenhagen/odoo-npg,idncom/odoo,leoliujie/odoo,lsinfo/odoo,nexiles/odoo,realsaiko/odoo,shivam1111/odoo,oliverhr/odoo,guerrerocarlos/odoo,JCA-Developpement/Odoo,doomsterinc/odoo,apanju/GMIO_Odoo,AuyaJackie/odoo,hbrunn/OpenUpgrade,credativUK/OCB,joariasl/odoo,bguillot/OpenUpgrade,gdgellatly/OCB1,rdeheele/odoo,Ichag/odoo,idncom/odoo,cedk/odoo,tinkerthaler/odoo,nagyistoce/odoo-dev-odoo,fdvarela/odoo8,tangyiyong/odoo,kirca/OpenUpgrade,ingadhoc/odoo,goliveirab/odoo,Eric-Zhong/odoo,avoinsystems/odoo,abstract-open-solutions/OCB,lgscofield/odoo,steedos/odoo,jusdng/odoo,fdvarela/odoo8,hopeall/odoo,ccomb/OpenUpgrade,numerigraphe/odoo,hanicker/odoo,credativUK/OCB,mkieszek/odoo,jeasoft/odoo,leoliujie/odoo,gvb/odoo,nhomar/odoo-mirror,shingonoide/odoo,Drooids/odoo,tvibliani/odoo,acshan/odoo,VitalPet/odoo,highco-groupe/odoo,gdgellatly/OCB1,JGarcia-Panach/odoo,MarcosCommunity/odoo,lgscofield/odoo,tinkerthaler/odoo,CopeX/odoo,gdgellatly/OCB1,mustafat/odoo-1,guewen/OpenUpgrade,poljeff/odoo,feroda/odoo,frouty/odoo_oph,dllsf/odootest,rgeleta/odoo,pedrobaeza/OpenUpgrade,KontorConsulting/odoo,slevenhagen/odoo,juanalfonsopr/odoo,deKupini/erp,ehirt/odoo,chiragjogi/odoo,JGarcia-Panach/odoo,gvb/odoo,lsinfo/odoo,charbeljc/OCB,minhtuancn/odoo,jusdng/odoo,tinkhaven-organization/odoo,markeTIC/OCB,NL66278/OCB,steedos/odoo,0k/OpenUpgrade,fuselock/odoo,vnsofthe/odoo,NeovaHealth/odoo,idncom/odoo,bobisme/odoo,frouty/odoo_oph,sysadminmatmoz/OCB,fuhongliang/odoo,savoirfairelinux/odoo,Kilhog/odoo
|
{
"name" : "Customer & Supplier Relationship Management",
"version" : "1.0",
"author" : "Tiny",
"website" : "http://tinyerp.com/module_crm.html",
"category" : "Generic Modules/CRM & SRM",
"description": """The Tiny ERP case and request tracker enables a group of
people to intelligently and efficiently manage tasks, issues, and requests.
It manages key tasks such as communication, identification, prioritization,
assignment, resolution and notification.
Tiny ERP ensures that all cases are successfly tracked by users, customers and
suppliers. It can automatically send reminders, escalate the request, trigger
specific methods and lots of others actions based on your enterprise own rules.
The greatest thing about this system is that users don't need to do anything
special. They can just send email to the request tracker. Tiny ERP will take
care of thanking them for their message, automatically routing it to the
appropriate staff, and making sure all future correspondence gets to the right
place.
The CRM module has a email gateway for the synchronisation interface
between mails and Tiny ERP.""",
"depends" : ["base", "account"],
"init_xml" : ["crm_data.xml"],
"demo_xml" : ["crm_demo.xml"],
- "update_xml" : ["crm_view.xml", "crm_report.xml", "crm_wizard.xml"],
+ "update_xml" : ["crm_view.xml", "crm_report.xml", "crm_wizard.xml","crm_security.xml"],
"active": False,
"installable": True
}
|
Add crm_security.xml file entry in update_xml section
|
## Code Before:
{
"name" : "Customer & Supplier Relationship Management",
"version" : "1.0",
"author" : "Tiny",
"website" : "http://tinyerp.com/module_crm.html",
"category" : "Generic Modules/CRM & SRM",
"description": """The Tiny ERP case and request tracker enables a group of
people to intelligently and efficiently manage tasks, issues, and requests.
It manages key tasks such as communication, identification, prioritization,
assignment, resolution and notification.
Tiny ERP ensures that all cases are successfly tracked by users, customers and
suppliers. It can automatically send reminders, escalate the request, trigger
specific methods and lots of others actions based on your enterprise own rules.
The greatest thing about this system is that users don't need to do anything
special. They can just send email to the request tracker. Tiny ERP will take
care of thanking them for their message, automatically routing it to the
appropriate staff, and making sure all future correspondence gets to the right
place.
The CRM module has a email gateway for the synchronisation interface
between mails and Tiny ERP.""",
"depends" : ["base", "account"],
"init_xml" : ["crm_data.xml"],
"demo_xml" : ["crm_demo.xml"],
"update_xml" : ["crm_view.xml", "crm_report.xml", "crm_wizard.xml"],
"active": False,
"installable": True
}
## Instruction:
Add crm_security.xml file entry in update_xml section
## Code After:
{
"name" : "Customer & Supplier Relationship Management",
"version" : "1.0",
"author" : "Tiny",
"website" : "http://tinyerp.com/module_crm.html",
"category" : "Generic Modules/CRM & SRM",
"description": """The Tiny ERP case and request tracker enables a group of
people to intelligently and efficiently manage tasks, issues, and requests.
It manages key tasks such as communication, identification, prioritization,
assignment, resolution and notification.
Tiny ERP ensures that all cases are successfly tracked by users, customers and
suppliers. It can automatically send reminders, escalate the request, trigger
specific methods and lots of others actions based on your enterprise own rules.
The greatest thing about this system is that users don't need to do anything
special. They can just send email to the request tracker. Tiny ERP will take
care of thanking them for their message, automatically routing it to the
appropriate staff, and making sure all future correspondence gets to the right
place.
The CRM module has a email gateway for the synchronisation interface
between mails and Tiny ERP.""",
"depends" : ["base", "account"],
"init_xml" : ["crm_data.xml"],
"demo_xml" : ["crm_demo.xml"],
"update_xml" : ["crm_view.xml", "crm_report.xml", "crm_wizard.xml","crm_security.xml"],
"active": False,
"installable": True
}
|
{
"name" : "Customer & Supplier Relationship Management",
"version" : "1.0",
"author" : "Tiny",
"website" : "http://tinyerp.com/module_crm.html",
"category" : "Generic Modules/CRM & SRM",
"description": """The Tiny ERP case and request tracker enables a group of
people to intelligently and efficiently manage tasks, issues, and requests.
It manages key tasks such as communication, identification, prioritization,
assignment, resolution and notification.
Tiny ERP ensures that all cases are successfly tracked by users, customers and
suppliers. It can automatically send reminders, escalate the request, trigger
specific methods and lots of others actions based on your enterprise own rules.
The greatest thing about this system is that users don't need to do anything
special. They can just send email to the request tracker. Tiny ERP will take
care of thanking them for their message, automatically routing it to the
appropriate staff, and making sure all future correspondence gets to the right
place.
The CRM module has a email gateway for the synchronisation interface
between mails and Tiny ERP.""",
"depends" : ["base", "account"],
"init_xml" : ["crm_data.xml"],
"demo_xml" : ["crm_demo.xml"],
- "update_xml" : ["crm_view.xml", "crm_report.xml", "crm_wizard.xml"],
+ "update_xml" : ["crm_view.xml", "crm_report.xml", "crm_wizard.xml","crm_security.xml"],
? +++++++++++++++++++
"active": False,
"installable": True
}
|
e45b3d3a2428d3703260c25b4275359bf6786a37
|
launcher.py
|
launcher.py
|
from pract2d.game import gamemanager
if __name__ == '__main__':
game = gamemanager.GameManager()
game.run()
|
from pract2d.game import gamemanager
from pract2d.core import files
from platform import system
import os
if __name__ == '__main__':
try:
if system() == 'Windows' or not os.environ["PYSDL2_DLL_PATH"]:
os.environ["PYSDL2_DLL_PATH"] = files.get_path()
except KeyError:
pass
game = gamemanager.GameManager()
game.run()
|
Set the default sdl2 library locations.
|
Set the default sdl2 library locations.
|
Python
|
bsd-2-clause
|
mdsitton/pract2d
|
from pract2d.game import gamemanager
+ from pract2d.core import files
+ from platform import system
+ import os
if __name__ == '__main__':
+ try:
+ if system() == 'Windows' or not os.environ["PYSDL2_DLL_PATH"]:
+ os.environ["PYSDL2_DLL_PATH"] = files.get_path()
+ except KeyError:
+ pass
game = gamemanager.GameManager()
game.run()
|
Set the default sdl2 library locations.
|
## Code Before:
from pract2d.game import gamemanager
if __name__ == '__main__':
game = gamemanager.GameManager()
game.run()
## Instruction:
Set the default sdl2 library locations.
## Code After:
from pract2d.game import gamemanager
from pract2d.core import files
from platform import system
import os
if __name__ == '__main__':
try:
if system() == 'Windows' or not os.environ["PYSDL2_DLL_PATH"]:
os.environ["PYSDL2_DLL_PATH"] = files.get_path()
except KeyError:
pass
game = gamemanager.GameManager()
game.run()
|
from pract2d.game import gamemanager
+ from pract2d.core import files
+ from platform import system
+ import os
if __name__ == '__main__':
+ try:
+ if system() == 'Windows' or not os.environ["PYSDL2_DLL_PATH"]:
+ os.environ["PYSDL2_DLL_PATH"] = files.get_path()
+ except KeyError:
+ pass
game = gamemanager.GameManager()
game.run()
|
c246f0e9add0a5b6d7fce9b9e2107671440b5f90
|
mica/starcheck/tests/make_database.py
|
mica/starcheck/tests/make_database.py
|
import os
import tempfile
from Chandra.Time import DateTime
from Ska.Shell import bash
import mica.common
# Override MICA_ARCHIVE with a temporary directory
TESTDIR = tempfile.mkdtemp()
mica.common.MICA_ARCHIVE = TESTDIR
# import mica.starcheck.starcheck after setting MICA_ARCHIVE
import mica.starcheck.process
# Just ingest files from the last couple of weeks or so
# This still uses the silly find files newer than this other file method, so
# set the time stamp on that reference file
if not os.path.exists(os.path.join(TESTDIR, 'starcheck')):
os.makedirs(os.path.join(TESTDIR, 'starcheck'))
bash("touch -d {} {}".format(DateTime(-15).iso, mica.starcheck.process.FILES['touch_file']))
# And just check that the update script didn't raise any exceptions
mica.starcheck.process.update()
# Cleanup manually
bash("rm -r {}".format(TESTDIR))
|
import os
import tempfile
from Chandra.Time import DateTime
from Ska.Shell import bash
import mica.common
# Override MICA_ARCHIVE with a temporary directory
TESTDIR = tempfile.mkdtemp()
mica.common.MICA_ARCHIVE = TESTDIR
# import mica.starcheck.starcheck after setting MICA_ARCHIVE
import mica.starcheck.process
# Just ingest files from the last couple of weeks or so
# And just check (implicitly) that the update script didn't raise any exceptions
config = mica.starcheck.process.DEFAULT_CONFIG
config['start'] = DateTime() - 30
mica.starcheck.process.update(config)
# Cleanup manually
bash("rm -r {}".format(TESTDIR))
|
Update test script to use a provided start time
|
Update test script to use a provided start time
|
Python
|
bsd-3-clause
|
sot/mica,sot/mica
|
import os
import tempfile
from Chandra.Time import DateTime
from Ska.Shell import bash
import mica.common
# Override MICA_ARCHIVE with a temporary directory
TESTDIR = tempfile.mkdtemp()
mica.common.MICA_ARCHIVE = TESTDIR
# import mica.starcheck.starcheck after setting MICA_ARCHIVE
import mica.starcheck.process
# Just ingest files from the last couple of weeks or so
- # This still uses the silly find files newer than this other file method, so
- # set the time stamp on that reference file
- if not os.path.exists(os.path.join(TESTDIR, 'starcheck')):
- os.makedirs(os.path.join(TESTDIR, 'starcheck'))
- bash("touch -d {} {}".format(DateTime(-15).iso, mica.starcheck.process.FILES['touch_file']))
- # And just check that the update script didn't raise any exceptions
+ # And just check (implicitly) that the update script didn't raise any exceptions
+ config = mica.starcheck.process.DEFAULT_CONFIG
+ config['start'] = DateTime() - 30
- mica.starcheck.process.update()
+ mica.starcheck.process.update(config)
# Cleanup manually
bash("rm -r {}".format(TESTDIR))
|
Update test script to use a provided start time
|
## Code Before:
import os
import tempfile
from Chandra.Time import DateTime
from Ska.Shell import bash
import mica.common
# Override MICA_ARCHIVE with a temporary directory
TESTDIR = tempfile.mkdtemp()
mica.common.MICA_ARCHIVE = TESTDIR
# import mica.starcheck.starcheck after setting MICA_ARCHIVE
import mica.starcheck.process
# Just ingest files from the last couple of weeks or so
# This still uses the silly find files newer than this other file method, so
# set the time stamp on that reference file
if not os.path.exists(os.path.join(TESTDIR, 'starcheck')):
os.makedirs(os.path.join(TESTDIR, 'starcheck'))
bash("touch -d {} {}".format(DateTime(-15).iso, mica.starcheck.process.FILES['touch_file']))
# And just check that the update script didn't raise any exceptions
mica.starcheck.process.update()
# Cleanup manually
bash("rm -r {}".format(TESTDIR))
## Instruction:
Update test script to use a provided start time
## Code After:
import os
import tempfile
from Chandra.Time import DateTime
from Ska.Shell import bash
import mica.common
# Override MICA_ARCHIVE with a temporary directory
TESTDIR = tempfile.mkdtemp()
mica.common.MICA_ARCHIVE = TESTDIR
# import mica.starcheck.starcheck after setting MICA_ARCHIVE
import mica.starcheck.process
# Just ingest files from the last couple of weeks or so
# And just check (implicitly) that the update script didn't raise any exceptions
config = mica.starcheck.process.DEFAULT_CONFIG
config['start'] = DateTime() - 30
mica.starcheck.process.update(config)
# Cleanup manually
bash("rm -r {}".format(TESTDIR))
|
import os
import tempfile
from Chandra.Time import DateTime
from Ska.Shell import bash
import mica.common
# Override MICA_ARCHIVE with a temporary directory
TESTDIR = tempfile.mkdtemp()
mica.common.MICA_ARCHIVE = TESTDIR
# import mica.starcheck.starcheck after setting MICA_ARCHIVE
import mica.starcheck.process
# Just ingest files from the last couple of weeks or so
- # This still uses the silly find files newer than this other file method, so
- # set the time stamp on that reference file
- if not os.path.exists(os.path.join(TESTDIR, 'starcheck')):
- os.makedirs(os.path.join(TESTDIR, 'starcheck'))
- bash("touch -d {} {}".format(DateTime(-15).iso, mica.starcheck.process.FILES['touch_file']))
- # And just check that the update script didn't raise any exceptions
+ # And just check (implicitly) that the update script didn't raise any exceptions
? +++++++++++++
+ config = mica.starcheck.process.DEFAULT_CONFIG
+ config['start'] = DateTime() - 30
- mica.starcheck.process.update()
+ mica.starcheck.process.update(config)
? ++++++
# Cleanup manually
bash("rm -r {}".format(TESTDIR))
|
dbe7c01ed649abb1cbd8efe07a6633951cb1943e
|
tests/integration/states/test_handle_error.py
|
tests/integration/states/test_handle_error.py
|
'''
tests for host state
'''
# Import Python libs
from __future__ import absolute_import
# Import Salt Testing libs
from tests.support.case import ModuleCase
class HandleErrorTest(ModuleCase):
'''
Validate that ordering works correctly
'''
def test_handle_error(self):
'''
Test how an error can be recovered
'''
# without sync_states, the custom state may not be installed
# (resulting in :
# State salttest.hello found in sls issue-... is unavailable
ret = self.run_function('state.sls', ['issue-9983-handleerror'])
self.assertTrue(
'An exception occurred in this state: Traceback'
in ret[[a for a in ret][0]]['comment'])
|
'''
tests for host state
'''
# Import Python libs
from __future__ import absolute_import, unicode_literals
# Import Salt Testing libs
from tests.support.case import ModuleCase
class HandleErrorTest(ModuleCase):
'''
Validate that ordering works correctly
'''
def test_function_do_not_return_dictionary_type(self):
'''
Handling a case when function returns anything but a dictionary type
'''
ret = self.run_function('state.sls', ['issue-9983-handleerror'])
self.assertTrue('Data must be a dictionary type' in ret[[a for a in ret][0]]['comment'])
self.assertTrue(not ret[[a for a in ret][0]]['result'])
self.assertTrue(ret[[a for a in ret][0]]['changes'] == {})
|
Update integration test: docs, add more checks, rename
|
Update integration test: docs, add more checks, rename
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
'''
tests for host state
'''
# Import Python libs
- from __future__ import absolute_import
+ from __future__ import absolute_import, unicode_literals
# Import Salt Testing libs
from tests.support.case import ModuleCase
class HandleErrorTest(ModuleCase):
'''
Validate that ordering works correctly
'''
- def test_handle_error(self):
+ def test_function_do_not_return_dictionary_type(self):
'''
- Test how an error can be recovered
+ Handling a case when function returns anything but a dictionary type
'''
- # without sync_states, the custom state may not be installed
- # (resulting in :
- # State salttest.hello found in sls issue-... is unavailable
ret = self.run_function('state.sls', ['issue-9983-handleerror'])
- self.assertTrue(
- 'An exception occurred in this state: Traceback'
- in ret[[a for a in ret][0]]['comment'])
+ self.assertTrue('Data must be a dictionary type' in ret[[a for a in ret][0]]['comment'])
+ self.assertTrue(not ret[[a for a in ret][0]]['result'])
+ self.assertTrue(ret[[a for a in ret][0]]['changes'] == {})
|
Update integration test: docs, add more checks, rename
|
## Code Before:
'''
tests for host state
'''
# Import Python libs
from __future__ import absolute_import
# Import Salt Testing libs
from tests.support.case import ModuleCase
class HandleErrorTest(ModuleCase):
'''
Validate that ordering works correctly
'''
def test_handle_error(self):
'''
Test how an error can be recovered
'''
# without sync_states, the custom state may not be installed
# (resulting in :
# State salttest.hello found in sls issue-... is unavailable
ret = self.run_function('state.sls', ['issue-9983-handleerror'])
self.assertTrue(
'An exception occurred in this state: Traceback'
in ret[[a for a in ret][0]]['comment'])
## Instruction:
Update integration test: docs, add more checks, rename
## Code After:
'''
tests for host state
'''
# Import Python libs
from __future__ import absolute_import, unicode_literals
# Import Salt Testing libs
from tests.support.case import ModuleCase
class HandleErrorTest(ModuleCase):
'''
Validate that ordering works correctly
'''
def test_function_do_not_return_dictionary_type(self):
'''
Handling a case when function returns anything but a dictionary type
'''
ret = self.run_function('state.sls', ['issue-9983-handleerror'])
self.assertTrue('Data must be a dictionary type' in ret[[a for a in ret][0]]['comment'])
self.assertTrue(not ret[[a for a in ret][0]]['result'])
self.assertTrue(ret[[a for a in ret][0]]['changes'] == {})
|
'''
tests for host state
'''
# Import Python libs
- from __future__ import absolute_import
+ from __future__ import absolute_import, unicode_literals
? ++++++++++++++++++
# Import Salt Testing libs
from tests.support.case import ModuleCase
class HandleErrorTest(ModuleCase):
'''
Validate that ordering works correctly
'''
- def test_handle_error(self):
+ def test_function_do_not_return_dictionary_type(self):
'''
- Test how an error can be recovered
+ Handling a case when function returns anything but a dictionary type
'''
- # without sync_states, the custom state may not be installed
- # (resulting in :
- # State salttest.hello found in sls issue-... is unavailable
ret = self.run_function('state.sls', ['issue-9983-handleerror'])
- self.assertTrue(
- 'An exception occurred in this state: Traceback'
- in ret[[a for a in ret][0]]['comment'])
+ self.assertTrue('Data must be a dictionary type' in ret[[a for a in ret][0]]['comment'])
+ self.assertTrue(not ret[[a for a in ret][0]]['result'])
+ self.assertTrue(ret[[a for a in ret][0]]['changes'] == {})
|
51533420b6422515ea10fb323cb318c104a99650
|
pypi/models.py
|
pypi/models.py
|
from django.db import models
class Package(models.Model):
name = models.CharField(max_length=100)
version = models.CharField(max_length=100)
released_at = models.DateTimeField()
class Meta:
get_latest_by = 'released_at'
unique_together = ('name', 'version')
def __unicode__(self):
return "%s %s" % (self.name, self.version)
|
from django.db import models
class Package(models.Model):
name = models.CharField(max_length=100)
version = models.CharField(max_length=100)
released_at = models.DateTimeField()
class Meta:
get_latest_by = 'released_at'
ordering = ('-version',)
unique_together = ('name', 'version')
def __unicode__(self):
return "%s %s" % (self.name, self.version)
|
Order by version instead, it should mostly be what we want.
|
Order by version instead, it should mostly be what we want.
|
Python
|
mit
|
kitsunde/django-pypi
|
from django.db import models
class Package(models.Model):
name = models.CharField(max_length=100)
version = models.CharField(max_length=100)
released_at = models.DateTimeField()
class Meta:
get_latest_by = 'released_at'
+ ordering = ('-version',)
unique_together = ('name', 'version')
def __unicode__(self):
return "%s %s" % (self.name, self.version)
|
Order by version instead, it should mostly be what we want.
|
## Code Before:
from django.db import models
class Package(models.Model):
name = models.CharField(max_length=100)
version = models.CharField(max_length=100)
released_at = models.DateTimeField()
class Meta:
get_latest_by = 'released_at'
unique_together = ('name', 'version')
def __unicode__(self):
return "%s %s" % (self.name, self.version)
## Instruction:
Order by version instead, it should mostly be what we want.
## Code After:
from django.db import models
class Package(models.Model):
name = models.CharField(max_length=100)
version = models.CharField(max_length=100)
released_at = models.DateTimeField()
class Meta:
get_latest_by = 'released_at'
ordering = ('-version',)
unique_together = ('name', 'version')
def __unicode__(self):
return "%s %s" % (self.name, self.version)
|
from django.db import models
class Package(models.Model):
name = models.CharField(max_length=100)
version = models.CharField(max_length=100)
released_at = models.DateTimeField()
class Meta:
get_latest_by = 'released_at'
+ ordering = ('-version',)
unique_together = ('name', 'version')
def __unicode__(self):
return "%s %s" % (self.name, self.version)
|
d6c4a38e172894a2240a658fe73ea9816e89cd03
|
deduplicated/web/__init__.py
|
deduplicated/web/__init__.py
|
from flask import Flask, render_template
import jinja2
from .. import Directory, directory_list, str_size
# Init app
jinja2.filters.FILTERS['str_size'] = str_size
app = Flask(__name__)
# Pages
@app.route('/')
def dirlist():
directories = [(d[0], Directory(d[1])) for d in directory_list()]
directories.sort(key=lambda d: str(d[1]).lower())
return render_template('dirlist.html', directories=directories)
# Run
def main():
import sys
from gunicorn.app.wsgiapp import run
sys.argv = ['gunicorn',
'--access-logfile=-',
'--error-logfile=-',
'-b', '127.0.0.1:5050',
'deduplicated.web:app']
run()
|
from flask import Flask, redirect, render_template, request
import jinja2
from .. import Directory, directory_list, str_size
# Init app
jinja2.filters.FILTERS['str_size'] = str_size
app = Flask(__name__)
# Pages
@app.route('/')
def dirlist():
directories = [(d[0], Directory(d[1])) for d in directory_list()]
directories.sort(key=lambda d: str(d[1]).lower())
return render_template('dirlist.html', directories=directories)
@app.route('/dir/add', methods=['post'])
def diradd():
dirname = request.form.get('directory', '')
if dirname:
Directory(dirname)
return redirect('/')
# Run
def main():
import sys
from gunicorn.app.wsgiapp import run
sys.argv = ['gunicorn',
'--access-logfile=-',
'--error-logfile=-',
'-b', '127.0.0.1:5050',
'deduplicated.web:app']
run()
|
Add web function for add new directory
|
Add web function for add new directory
|
Python
|
mit
|
eduardoklosowski/deduplicated,eduardoklosowski/deduplicated
|
- from flask import Flask, render_template
+ from flask import Flask, redirect, render_template, request
import jinja2
from .. import Directory, directory_list, str_size
# Init app
jinja2.filters.FILTERS['str_size'] = str_size
app = Flask(__name__)
# Pages
@app.route('/')
def dirlist():
directories = [(d[0], Directory(d[1])) for d in directory_list()]
directories.sort(key=lambda d: str(d[1]).lower())
return render_template('dirlist.html', directories=directories)
+ @app.route('/dir/add', methods=['post'])
+ def diradd():
+ dirname = request.form.get('directory', '')
+ if dirname:
+ Directory(dirname)
+ return redirect('/')
+
+
# Run
def main():
import sys
from gunicorn.app.wsgiapp import run
sys.argv = ['gunicorn',
'--access-logfile=-',
'--error-logfile=-',
'-b', '127.0.0.1:5050',
'deduplicated.web:app']
run()
|
Add web function for add new directory
|
## Code Before:
from flask import Flask, render_template
import jinja2
from .. import Directory, directory_list, str_size
# Init app
jinja2.filters.FILTERS['str_size'] = str_size
app = Flask(__name__)
# Pages
@app.route('/')
def dirlist():
directories = [(d[0], Directory(d[1])) for d in directory_list()]
directories.sort(key=lambda d: str(d[1]).lower())
return render_template('dirlist.html', directories=directories)
# Run
def main():
import sys
from gunicorn.app.wsgiapp import run
sys.argv = ['gunicorn',
'--access-logfile=-',
'--error-logfile=-',
'-b', '127.0.0.1:5050',
'deduplicated.web:app']
run()
## Instruction:
Add web function for add new directory
## Code After:
from flask import Flask, redirect, render_template, request
import jinja2
from .. import Directory, directory_list, str_size
# Init app
jinja2.filters.FILTERS['str_size'] = str_size
app = Flask(__name__)
# Pages
@app.route('/')
def dirlist():
directories = [(d[0], Directory(d[1])) for d in directory_list()]
directories.sort(key=lambda d: str(d[1]).lower())
return render_template('dirlist.html', directories=directories)
@app.route('/dir/add', methods=['post'])
def diradd():
dirname = request.form.get('directory', '')
if dirname:
Directory(dirname)
return redirect('/')
# Run
def main():
import sys
from gunicorn.app.wsgiapp import run
sys.argv = ['gunicorn',
'--access-logfile=-',
'--error-logfile=-',
'-b', '127.0.0.1:5050',
'deduplicated.web:app']
run()
|
- from flask import Flask, render_template
+ from flask import Flask, redirect, render_template, request
? ++++++++++ +++++++++
import jinja2
from .. import Directory, directory_list, str_size
# Init app
jinja2.filters.FILTERS['str_size'] = str_size
app = Flask(__name__)
# Pages
@app.route('/')
def dirlist():
directories = [(d[0], Directory(d[1])) for d in directory_list()]
directories.sort(key=lambda d: str(d[1]).lower())
return render_template('dirlist.html', directories=directories)
+ @app.route('/dir/add', methods=['post'])
+ def diradd():
+ dirname = request.form.get('directory', '')
+ if dirname:
+ Directory(dirname)
+ return redirect('/')
+
+
# Run
def main():
import sys
from gunicorn.app.wsgiapp import run
sys.argv = ['gunicorn',
'--access-logfile=-',
'--error-logfile=-',
'-b', '127.0.0.1:5050',
'deduplicated.web:app']
run()
|
1b58fed32fe583863812613604383eb9d8821ee1
|
tools/sci.py
|
tools/sci.py
|
from __future__ import division, print_function
import numpy as np
from scipy.integrate import ode
def zodeint(func, y0, t):
"""Simple wraper around scipy.integrate.ode for complex valued problems.
:param func: Right hand side of the equation dy/dt = f(t, y)
:param y0: Initial value at t = t[0]
:param t: Sequence of time points for whihc to solve for y
:returns: y[len(t), len(y0)]
"""
integrator = ode(func) \
.set_integrator('zvode', with_jacobian=False) \
.set_initial_value(y0)
y = np.empty((len(t), len(y0)), dtype=complex)
y[0] = y0
for i in xrange(1, len(t)):
integrator.integrate(t[i])
if not integrator.successful():
print('WARNING: Integrator failed')
break
y[i] = integrator.y
return t[:i+1], y[:i+1]
|
from __future__ import division, print_function
import numpy as np
from scipy.integrate import ode
def zodeint(func, y0, t, **kwargs):
"""Simple wraper around scipy.integrate.ode for complex valued problems.
:param func: Right hand side of the equation dy/dt = f(t, y)
:param y0: Initial value at t = t[0]
:param t: Sequence of time points for whihc to solve for y
:returns: y[len(t), len(y0)]
"""
y0 = np.array([y0]) if np.isscalar(y0) else y0
integrator = ode(func) \
.set_integrator('zvode', with_jacobian=False, **kwargs) \
.set_initial_value(y0)
y = np.empty((len(t), len(y0)), dtype=complex)
y[0] = y0
for i in xrange(1, len(t)):
integrator.integrate(t[i])
if not integrator.successful():
print('WARNING: Integrator failed')
break
y[i] = integrator.y
return t[:i+1], y[:i+1]
|
Correct complex integrator for scalar equations
|
Correct complex integrator for scalar equations
|
Python
|
unlicense
|
dseuss/pythonlibs
|
from __future__ import division, print_function
import numpy as np
from scipy.integrate import ode
- def zodeint(func, y0, t):
+ def zodeint(func, y0, t, **kwargs):
"""Simple wraper around scipy.integrate.ode for complex valued problems.
:param func: Right hand side of the equation dy/dt = f(t, y)
:param y0: Initial value at t = t[0]
:param t: Sequence of time points for whihc to solve for y
:returns: y[len(t), len(y0)]
"""
+ y0 = np.array([y0]) if np.isscalar(y0) else y0
integrator = ode(func) \
- .set_integrator('zvode', with_jacobian=False) \
+ .set_integrator('zvode', with_jacobian=False, **kwargs) \
.set_initial_value(y0)
y = np.empty((len(t), len(y0)), dtype=complex)
y[0] = y0
for i in xrange(1, len(t)):
integrator.integrate(t[i])
if not integrator.successful():
print('WARNING: Integrator failed')
break
y[i] = integrator.y
return t[:i+1], y[:i+1]
|
Correct complex integrator for scalar equations
|
## Code Before:
from __future__ import division, print_function
import numpy as np
from scipy.integrate import ode
def zodeint(func, y0, t):
"""Simple wraper around scipy.integrate.ode for complex valued problems.
:param func: Right hand side of the equation dy/dt = f(t, y)
:param y0: Initial value at t = t[0]
:param t: Sequence of time points for whihc to solve for y
:returns: y[len(t), len(y0)]
"""
integrator = ode(func) \
.set_integrator('zvode', with_jacobian=False) \
.set_initial_value(y0)
y = np.empty((len(t), len(y0)), dtype=complex)
y[0] = y0
for i in xrange(1, len(t)):
integrator.integrate(t[i])
if not integrator.successful():
print('WARNING: Integrator failed')
break
y[i] = integrator.y
return t[:i+1], y[:i+1]
## Instruction:
Correct complex integrator for scalar equations
## Code After:
from __future__ import division, print_function
import numpy as np
from scipy.integrate import ode
def zodeint(func, y0, t, **kwargs):
"""Simple wraper around scipy.integrate.ode for complex valued problems.
:param func: Right hand side of the equation dy/dt = f(t, y)
:param y0: Initial value at t = t[0]
:param t: Sequence of time points for whihc to solve for y
:returns: y[len(t), len(y0)]
"""
y0 = np.array([y0]) if np.isscalar(y0) else y0
integrator = ode(func) \
.set_integrator('zvode', with_jacobian=False, **kwargs) \
.set_initial_value(y0)
y = np.empty((len(t), len(y0)), dtype=complex)
y[0] = y0
for i in xrange(1, len(t)):
integrator.integrate(t[i])
if not integrator.successful():
print('WARNING: Integrator failed')
break
y[i] = integrator.y
return t[:i+1], y[:i+1]
|
from __future__ import division, print_function
import numpy as np
from scipy.integrate import ode
- def zodeint(func, y0, t):
+ def zodeint(func, y0, t, **kwargs):
? ++++++++++
"""Simple wraper around scipy.integrate.ode for complex valued problems.
:param func: Right hand side of the equation dy/dt = f(t, y)
:param y0: Initial value at t = t[0]
:param t: Sequence of time points for whihc to solve for y
:returns: y[len(t), len(y0)]
"""
+ y0 = np.array([y0]) if np.isscalar(y0) else y0
integrator = ode(func) \
- .set_integrator('zvode', with_jacobian=False) \
+ .set_integrator('zvode', with_jacobian=False, **kwargs) \
? ++++++++++
.set_initial_value(y0)
y = np.empty((len(t), len(y0)), dtype=complex)
y[0] = y0
for i in xrange(1, len(t)):
integrator.integrate(t[i])
if not integrator.successful():
print('WARNING: Integrator failed')
break
y[i] = integrator.y
return t[:i+1], y[:i+1]
|
3d5fd3233ecaf2bae5fbd5a1ae349c55d2f4cdc7
|
scistack/scistack.py
|
scistack/scistack.py
|
import os
import flask
app = flask.Flask(__name__)
@app.route("/")
def hello():
return "Choose a domain!"
if __name__ == "__main__":
app.run()
|
import os
import flask
import inspect
app = flask.Flask(__name__, static_url_path='')
# Home of any pre-build docker files
docker_file_path = os.path.join(os.path.dirname(os.path.abspath(
inspect.getfile(inspect.currentframe()))), "..", "dockerfiles")
@app.route("/")
def hello():
return "Choose a domain!"
@app.route("/dfview/<path:fname>")
def send_dockerfile(fname):
with open(os.path.join(docker_file_path, fname)) as dfile:
return_value = dfile.read()
return return_value
if __name__ == "__main__":
app.run()
|
Return pre-built dockerfile to user
|
Return pre-built dockerfile to user
|
Python
|
mit
|
callaghanmt/research-stacks,callaghanmt/research-stacks,callaghanmt/research-stacks
|
import os
import flask
+ import inspect
- app = flask.Flask(__name__)
+ app = flask.Flask(__name__, static_url_path='')
+
+ # Home of any pre-build docker files
+ docker_file_path = os.path.join(os.path.dirname(os.path.abspath(
+ inspect.getfile(inspect.currentframe()))), "..", "dockerfiles")
@app.route("/")
def hello():
return "Choose a domain!"
+ @app.route("/dfview/<path:fname>")
+ def send_dockerfile(fname):
+ with open(os.path.join(docker_file_path, fname)) as dfile:
+ return_value = dfile.read()
+ return return_value
+
if __name__ == "__main__":
app.run()
|
Return pre-built dockerfile to user
|
## Code Before:
import os
import flask
app = flask.Flask(__name__)
@app.route("/")
def hello():
return "Choose a domain!"
if __name__ == "__main__":
app.run()
## Instruction:
Return pre-built dockerfile to user
## Code After:
import os
import flask
import inspect
app = flask.Flask(__name__, static_url_path='')
# Home of any pre-build docker files
docker_file_path = os.path.join(os.path.dirname(os.path.abspath(
inspect.getfile(inspect.currentframe()))), "..", "dockerfiles")
@app.route("/")
def hello():
return "Choose a domain!"
@app.route("/dfview/<path:fname>")
def send_dockerfile(fname):
with open(os.path.join(docker_file_path, fname)) as dfile:
return_value = dfile.read()
return return_value
if __name__ == "__main__":
app.run()
|
import os
import flask
+ import inspect
- app = flask.Flask(__name__)
+ app = flask.Flask(__name__, static_url_path='')
+
+ # Home of any pre-build docker files
+ docker_file_path = os.path.join(os.path.dirname(os.path.abspath(
+ inspect.getfile(inspect.currentframe()))), "..", "dockerfiles")
@app.route("/")
def hello():
return "Choose a domain!"
+ @app.route("/dfview/<path:fname>")
+ def send_dockerfile(fname):
+ with open(os.path.join(docker_file_path, fname)) as dfile:
+ return_value = dfile.read()
+ return return_value
+
if __name__ == "__main__":
app.run()
|
c908c943f66468f91cb8abb450bca36ead731885
|
test_app.py
|
test_app.py
|
import unittest
from unittest import TestCase
from user import User
from bucketlist import BucketList
from flask import url_for
from app import app
class BucketListTest(TestCase):
def setUp(self):
# creates a test client
self.client = app.test_client()
self.client.testing = True
def test_success(self):
# sends HTTP GET request to the application
# on the specified path
result = self.client.get('/login')
self.assertEqual(result.status_code, 200)
def test_failure(self):
# sends HTTP GET request to the application
# on the specified path
result = self.client.get('/nonexistant.html')
self.assertEqual(result.status_code, 404)
def test_login_page_loads(self):
# assert login page loads correctly
result = self.client.get('/login')
self.assertTrue(b'The best way to keep track of your dreams and goals' in result.data)
'''
def test_signup(self):
# register a new account
response = self.client.post(url_for('/signup'), data={
'username': 'hermano',
'email': '[email protected]',
'password': 'hard',
'confirm_password': 'hard'
})
self.assertTrue(response.status_code == 302)
'''
if __name__ == '__main__':
unittest.main()
|
import unittest
from unittest import TestCase
from user import User
from bucketlist import BucketList
from flask import url_for, session
from app import app
class BucketListTest(TestCase):
def setUp(self):
app.config['SECRET_KEY'] = 'seasasaskrit!'
# creates a test client
self.client = app.test_client()
self.client.testing = True
def test_success(self):
# sends HTTP GET request to the application
# on the specified path
result = self.client.get('/login')
self.assertEqual(result.status_code, 200)
def test_failure(self):
# sends HTTP GET request to the application
# on the specified path
result = self.client.get('/nonexistant.html')
self.assertEqual(result.status_code, 404)
def test_login_page_loads(self):
# assert login page loads correctly
result = self.client.get('/login')
self.assertTrue(b'The best way to keep track of your dreams and goals' in result.data)
def test_sign_page_posts_and_redirects(self):
result = self.client.post('signup', data={
'username': 'hermano',
'email': '[email protected]',
'password': 'hard',
'confirm_password': 'hard'
})
self.assertTrue(result.status_code == 302)
if __name__ == '__main__':
unittest.main()
|
Add test for signup success
|
Add test for signup success
|
Python
|
mit
|
mkiterian/bucket-list-app,mkiterian/bucket-list-app,mkiterian/bucket-list-app
|
import unittest
from unittest import TestCase
from user import User
from bucketlist import BucketList
- from flask import url_for
+ from flask import url_for, session
from app import app
class BucketListTest(TestCase):
- def setUp(self):
+ def setUp(self):
+ app.config['SECRET_KEY'] = 'seasasaskrit!'
# creates a test client
self.client = app.test_client()
self.client.testing = True
def test_success(self):
# sends HTTP GET request to the application
# on the specified path
result = self.client.get('/login')
self.assertEqual(result.status_code, 200)
def test_failure(self):
# sends HTTP GET request to the application
# on the specified path
result = self.client.get('/nonexistant.html')
self.assertEqual(result.status_code, 404)
def test_login_page_loads(self):
# assert login page loads correctly
result = self.client.get('/login')
self.assertTrue(b'The best way to keep track of your dreams and goals' in result.data)
- '''
- def test_signup(self):
- # register a new account
-
+ def test_sign_page_posts_and_redirects(self):
- response = self.client.post(url_for('/signup'), data={
+ result = self.client.post('signup', data={
'username': 'hermano',
'email': '[email protected]',
'password': 'hard',
'confirm_password': 'hard'
- })
+ })
- self.assertTrue(response.status_code == 302)
+ self.assertTrue(result.status_code == 302)
+
- '''
-
if __name__ == '__main__':
unittest.main()
|
Add test for signup success
|
## Code Before:
import unittest
from unittest import TestCase
from user import User
from bucketlist import BucketList
from flask import url_for
from app import app
class BucketListTest(TestCase):
def setUp(self):
# creates a test client
self.client = app.test_client()
self.client.testing = True
def test_success(self):
# sends HTTP GET request to the application
# on the specified path
result = self.client.get('/login')
self.assertEqual(result.status_code, 200)
def test_failure(self):
# sends HTTP GET request to the application
# on the specified path
result = self.client.get('/nonexistant.html')
self.assertEqual(result.status_code, 404)
def test_login_page_loads(self):
# assert login page loads correctly
result = self.client.get('/login')
self.assertTrue(b'The best way to keep track of your dreams and goals' in result.data)
'''
def test_signup(self):
# register a new account
response = self.client.post(url_for('/signup'), data={
'username': 'hermano',
'email': '[email protected]',
'password': 'hard',
'confirm_password': 'hard'
})
self.assertTrue(response.status_code == 302)
'''
if __name__ == '__main__':
unittest.main()
## Instruction:
Add test for signup success
## Code After:
import unittest
from unittest import TestCase
from user import User
from bucketlist import BucketList
from flask import url_for, session
from app import app
class BucketListTest(TestCase):
def setUp(self):
app.config['SECRET_KEY'] = 'seasasaskrit!'
# creates a test client
self.client = app.test_client()
self.client.testing = True
def test_success(self):
# sends HTTP GET request to the application
# on the specified path
result = self.client.get('/login')
self.assertEqual(result.status_code, 200)
def test_failure(self):
# sends HTTP GET request to the application
# on the specified path
result = self.client.get('/nonexistant.html')
self.assertEqual(result.status_code, 404)
def test_login_page_loads(self):
# assert login page loads correctly
result = self.client.get('/login')
self.assertTrue(b'The best way to keep track of your dreams and goals' in result.data)
def test_sign_page_posts_and_redirects(self):
result = self.client.post('signup', data={
'username': 'hermano',
'email': '[email protected]',
'password': 'hard',
'confirm_password': 'hard'
})
self.assertTrue(result.status_code == 302)
if __name__ == '__main__':
unittest.main()
|
import unittest
from unittest import TestCase
from user import User
from bucketlist import BucketList
- from flask import url_for
+ from flask import url_for, session
? +++++++++
from app import app
class BucketListTest(TestCase):
- def setUp(self):
+ def setUp(self):
? ++++++++
+ app.config['SECRET_KEY'] = 'seasasaskrit!'
# creates a test client
self.client = app.test_client()
self.client.testing = True
def test_success(self):
# sends HTTP GET request to the application
# on the specified path
result = self.client.get('/login')
self.assertEqual(result.status_code, 200)
def test_failure(self):
# sends HTTP GET request to the application
# on the specified path
result = self.client.get('/nonexistant.html')
self.assertEqual(result.status_code, 404)
def test_login_page_loads(self):
# assert login page loads correctly
result = self.client.get('/login')
self.assertTrue(b'The best way to keep track of your dreams and goals' in result.data)
- '''
- def test_signup(self):
- # register a new account
-
+ def test_sign_page_posts_and_redirects(self):
- response = self.client.post(url_for('/signup'), data={
? ---- ^^^^^ -------- - -
+ result = self.client.post('signup', data={
? ^^^
'username': 'hermano',
'email': '[email protected]',
'password': 'hard',
'confirm_password': 'hard'
- })
? ----
+ })
- self.assertTrue(response.status_code == 302)
? ---- ^^^^^
+ self.assertTrue(result.status_code == 302)
? ^^^
+
- '''
-
if __name__ == '__main__':
unittest.main()
|
8025b6cad403ace01eb71af05d284d5fa0fa4ff7
|
pandoc-figref.py
|
pandoc-figref.py
|
from pandocfilters import toJSONFilter, Str
import re
REF_PAT = re.compile('(.*)\{#([a-z]):(\w*)\}(.*)')
known_labels = {}
def figref(key, val, fmt, meta):
if key == 'Str' and REF_PAT.match(val):
start, kind, label, end = REF_PAT.match(val).groups()
if kind in known_labels:
if label not in known_labels[kind]:
known_labels[kind][label] = str(len(known_labels[kind])\
+ 1)
else:
known_labels[kind] = {}
known_labels[kind][label] = "1"
return [Str(start)] + [Str(known_labels[kind][label])] + \
[Str(end)]
if __name__ == '__main__':
toJSONFilter(figref)
|
from pandocfilters import toJSONFilter, Str
import re
REF_PAT = re.compile('(.*)\{#([a-z]):(\w*)\}(.*)')
known_labels = {}
def figref(key, val, fmt, meta):
if key == 'Str' and REF_PAT.match(val):
start, kind, label, end = REF_PAT.match(val).groups()
if kind in known_labels:
if label not in known_labels[kind]:
known_labels[kind][label] = str(len(known_labels[kind])\
+ 1)
else:
known_labels[kind] = {}
known_labels[kind][label] = "1"
return [Str(start)] + [Str(known_labels[kind][label])] + \
[Str(end)] + [Str(" ")]
if __name__ == '__main__':
toJSONFilter(figref)
|
Fix issue with missing space after replacement
|
Fix issue with missing space after replacement
|
Python
|
mit
|
scotthartley/pandoc-figref
|
from pandocfilters import toJSONFilter, Str
import re
REF_PAT = re.compile('(.*)\{#([a-z]):(\w*)\}(.*)')
known_labels = {}
def figref(key, val, fmt, meta):
if key == 'Str' and REF_PAT.match(val):
start, kind, label, end = REF_PAT.match(val).groups()
if kind in known_labels:
if label not in known_labels[kind]:
known_labels[kind][label] = str(len(known_labels[kind])\
+ 1)
else:
known_labels[kind] = {}
known_labels[kind][label] = "1"
return [Str(start)] + [Str(known_labels[kind][label])] + \
- [Str(end)]
+ [Str(end)] + [Str(" ")]
if __name__ == '__main__':
toJSONFilter(figref)
|
Fix issue with missing space after replacement
|
## Code Before:
from pandocfilters import toJSONFilter, Str
import re
REF_PAT = re.compile('(.*)\{#([a-z]):(\w*)\}(.*)')
known_labels = {}
def figref(key, val, fmt, meta):
if key == 'Str' and REF_PAT.match(val):
start, kind, label, end = REF_PAT.match(val).groups()
if kind in known_labels:
if label not in known_labels[kind]:
known_labels[kind][label] = str(len(known_labels[kind])\
+ 1)
else:
known_labels[kind] = {}
known_labels[kind][label] = "1"
return [Str(start)] + [Str(known_labels[kind][label])] + \
[Str(end)]
if __name__ == '__main__':
toJSONFilter(figref)
## Instruction:
Fix issue with missing space after replacement
## Code After:
from pandocfilters import toJSONFilter, Str
import re
REF_PAT = re.compile('(.*)\{#([a-z]):(\w*)\}(.*)')
known_labels = {}
def figref(key, val, fmt, meta):
if key == 'Str' and REF_PAT.match(val):
start, kind, label, end = REF_PAT.match(val).groups()
if kind in known_labels:
if label not in known_labels[kind]:
known_labels[kind][label] = str(len(known_labels[kind])\
+ 1)
else:
known_labels[kind] = {}
known_labels[kind][label] = "1"
return [Str(start)] + [Str(known_labels[kind][label])] + \
[Str(end)] + [Str(" ")]
if __name__ == '__main__':
toJSONFilter(figref)
|
from pandocfilters import toJSONFilter, Str
import re
REF_PAT = re.compile('(.*)\{#([a-z]):(\w*)\}(.*)')
known_labels = {}
def figref(key, val, fmt, meta):
if key == 'Str' and REF_PAT.match(val):
start, kind, label, end = REF_PAT.match(val).groups()
if kind in known_labels:
if label not in known_labels[kind]:
known_labels[kind][label] = str(len(known_labels[kind])\
+ 1)
else:
known_labels[kind] = {}
known_labels[kind][label] = "1"
return [Str(start)] + [Str(known_labels[kind][label])] + \
- [Str(end)]
+ [Str(end)] + [Str(" ")]
? +++++++++++++
if __name__ == '__main__':
toJSONFilter(figref)
|
ce703cbe3040770ee105fb0d953f85eebb92bdc9
|
us_ignite/sections/templatetags/sections_tags.py
|
us_ignite/sections/templatetags/sections_tags.py
|
from django import template
from django.template.loader import render_to_string
from us_ignite.sections.models import Sponsor
register = template.Library()
class RenderingNode(template.Node):
def __init__(self, template_name):
self.template_name = template_name
def render(self, context):
template_name = self.template_name.resolve(context)
context = {
'object_list': Sponsor.objects.all()
}
return render_to_string(template_name, context)
def _render_sponsors(parser, token):
"""Tag to render the latest ``Articles``.
Usage:
{% render_sponsors "sections/sponsor_list.html" %}
Where the second argument is a template path.
"""
bits = token.split_contents()
if not len(bits) == 2:
raise template.TemplateSyntaxError(
"%r tag only accepts a template argument." % bits[0])
# Determine the template name (could be a variable or a string):
template_name = parser.compile_filter(bits[1])
return RenderingNode(template_name)
register.tag('render_sponsors', _render_sponsors)
|
from django import template
from django.template.loader import render_to_string
from us_ignite.sections.models import Sponsor
register = template.Library()
class RenderingNode(template.Node):
def __init__(self, template_name):
self.template_name = template_name
def render(self, context):
template_name = self.template_name.resolve(context)
template_context = {
'object_list': Sponsor.objects.all()
}
return render_to_string(template_name, template_context)
def _render_sponsors(parser, token):
"""Tag to render the latest ``Articles``.
Usage:
{% render_sponsors "sections/sponsor_list.html" %}
Where the second argument is a template path.
"""
bits = token.split_contents()
if not len(bits) == 2:
raise template.TemplateSyntaxError(
"%r tag only accepts a template argument." % bits[0])
# Determine the template name (could be a variable or a string):
template_name = parser.compile_filter(bits[1])
return RenderingNode(template_name)
register.tag('render_sponsors', _render_sponsors)
|
Make sure the ``context`` is not overriden.
|
Bugfix: Make sure the ``context`` is not overriden.
The variable name for the string template context was removing
the actual ``context`` of the tempalte where the tag was
embeded.
|
Python
|
bsd-3-clause
|
us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite
|
from django import template
from django.template.loader import render_to_string
from us_ignite.sections.models import Sponsor
register = template.Library()
class RenderingNode(template.Node):
def __init__(self, template_name):
self.template_name = template_name
def render(self, context):
template_name = self.template_name.resolve(context)
- context = {
+ template_context = {
'object_list': Sponsor.objects.all()
}
- return render_to_string(template_name, context)
+ return render_to_string(template_name, template_context)
def _render_sponsors(parser, token):
"""Tag to render the latest ``Articles``.
Usage:
{% render_sponsors "sections/sponsor_list.html" %}
Where the second argument is a template path.
"""
bits = token.split_contents()
if not len(bits) == 2:
raise template.TemplateSyntaxError(
"%r tag only accepts a template argument." % bits[0])
# Determine the template name (could be a variable or a string):
template_name = parser.compile_filter(bits[1])
return RenderingNode(template_name)
-
register.tag('render_sponsors', _render_sponsors)
|
Make sure the ``context`` is not overriden.
|
## Code Before:
from django import template
from django.template.loader import render_to_string
from us_ignite.sections.models import Sponsor
register = template.Library()
class RenderingNode(template.Node):
def __init__(self, template_name):
self.template_name = template_name
def render(self, context):
template_name = self.template_name.resolve(context)
context = {
'object_list': Sponsor.objects.all()
}
return render_to_string(template_name, context)
def _render_sponsors(parser, token):
"""Tag to render the latest ``Articles``.
Usage:
{% render_sponsors "sections/sponsor_list.html" %}
Where the second argument is a template path.
"""
bits = token.split_contents()
if not len(bits) == 2:
raise template.TemplateSyntaxError(
"%r tag only accepts a template argument." % bits[0])
# Determine the template name (could be a variable or a string):
template_name = parser.compile_filter(bits[1])
return RenderingNode(template_name)
register.tag('render_sponsors', _render_sponsors)
## Instruction:
Make sure the ``context`` is not overriden.
## Code After:
from django import template
from django.template.loader import render_to_string
from us_ignite.sections.models import Sponsor
register = template.Library()
class RenderingNode(template.Node):
def __init__(self, template_name):
self.template_name = template_name
def render(self, context):
template_name = self.template_name.resolve(context)
template_context = {
'object_list': Sponsor.objects.all()
}
return render_to_string(template_name, template_context)
def _render_sponsors(parser, token):
"""Tag to render the latest ``Articles``.
Usage:
{% render_sponsors "sections/sponsor_list.html" %}
Where the second argument is a template path.
"""
bits = token.split_contents()
if not len(bits) == 2:
raise template.TemplateSyntaxError(
"%r tag only accepts a template argument." % bits[0])
# Determine the template name (could be a variable or a string):
template_name = parser.compile_filter(bits[1])
return RenderingNode(template_name)
register.tag('render_sponsors', _render_sponsors)
|
from django import template
from django.template.loader import render_to_string
from us_ignite.sections.models import Sponsor
register = template.Library()
class RenderingNode(template.Node):
def __init__(self, template_name):
self.template_name = template_name
def render(self, context):
template_name = self.template_name.resolve(context)
- context = {
+ template_context = {
? +++++++++
'object_list': Sponsor.objects.all()
}
- return render_to_string(template_name, context)
+ return render_to_string(template_name, template_context)
? +++++++++
def _render_sponsors(parser, token):
"""Tag to render the latest ``Articles``.
Usage:
{% render_sponsors "sections/sponsor_list.html" %}
Where the second argument is a template path.
"""
bits = token.split_contents()
if not len(bits) == 2:
raise template.TemplateSyntaxError(
"%r tag only accepts a template argument." % bits[0])
# Determine the template name (could be a variable or a string):
template_name = parser.compile_filter(bits[1])
return RenderingNode(template_name)
-
register.tag('render_sponsors', _render_sponsors)
|
d2f1b9311b546c079490e5f0bdb45b9c9d570bb1
|
system/test_coupling_fields.py
|
system/test_coupling_fields.py
|
from __future__ import print_function
import os
import netCDF4 as nc
from model_test_helper import ModelTestHelper
class TestCouplingFields(ModelTestHelper):
def __init__(self):
super(TestCouplingFields, self).__init__()
def test_swflx(self):
"""
Compare short wave flux over a geographic area between low and hi res
models.
"""
hi_fields = os.path.join(self.paths['cm_1440x1080-test']['output'], 'ice',
'fields_a2i_in_ice.nc')
lo_fields = os.path.join(self.paths['cm_360x300-test']['output'], 'ice',
'fields_a2i_in_ice.nc')
f_hi = nc.Dataset(hi_fields)
f_hi.close()
f_lo = nc.Dataset(lo_fields)
f_lo.close()
|
from __future__ import print_function
import os
import netCDF4 as nc
from model_test_helper import ModelTestHelper
class TestCouplingFields(ModelTestHelper):
def __init__(self):
super(TestCouplingFields, self).__init__()
def test_swflx(self):
"""
Compare short wave flux over a geographic area between low and hi res
models.
"""
hi_paths = self.make_paths('cm_1440x1080-test')
lo_paths = self.make_paths('cm_360x300-test')
hi_fields = os.path.join(hi_paths['output'], 'ice',
'fields_a2i_in_ice.nc')
lo_fields = os.path.join(lo_paths['output'], 'ice',
'fields_a2i_in_ice.nc')
f_hi = nc.Dataset(hi_fields)
f_hi.close()
f_lo = nc.Dataset(lo_fields)
f_lo.close()
|
Fix up paths in system test.
|
Fix up paths in system test.
|
Python
|
apache-2.0
|
CWSL/access-om
|
from __future__ import print_function
import os
import netCDF4 as nc
from model_test_helper import ModelTestHelper
class TestCouplingFields(ModelTestHelper):
def __init__(self):
super(TestCouplingFields, self).__init__()
def test_swflx(self):
"""
Compare short wave flux over a geographic area between low and hi res
models.
"""
+ hi_paths = self.make_paths('cm_1440x1080-test')
+ lo_paths = self.make_paths('cm_360x300-test')
+
- hi_fields = os.path.join(self.paths['cm_1440x1080-test']['output'], 'ice',
+ hi_fields = os.path.join(hi_paths['output'], 'ice',
'fields_a2i_in_ice.nc')
- lo_fields = os.path.join(self.paths['cm_360x300-test']['output'], 'ice',
+ lo_fields = os.path.join(lo_paths['output'], 'ice',
'fields_a2i_in_ice.nc')
f_hi = nc.Dataset(hi_fields)
f_hi.close()
f_lo = nc.Dataset(lo_fields)
f_lo.close()
|
Fix up paths in system test.
|
## Code Before:
from __future__ import print_function
import os
import netCDF4 as nc
from model_test_helper import ModelTestHelper
class TestCouplingFields(ModelTestHelper):
def __init__(self):
super(TestCouplingFields, self).__init__()
def test_swflx(self):
"""
Compare short wave flux over a geographic area between low and hi res
models.
"""
hi_fields = os.path.join(self.paths['cm_1440x1080-test']['output'], 'ice',
'fields_a2i_in_ice.nc')
lo_fields = os.path.join(self.paths['cm_360x300-test']['output'], 'ice',
'fields_a2i_in_ice.nc')
f_hi = nc.Dataset(hi_fields)
f_hi.close()
f_lo = nc.Dataset(lo_fields)
f_lo.close()
## Instruction:
Fix up paths in system test.
## Code After:
from __future__ import print_function
import os
import netCDF4 as nc
from model_test_helper import ModelTestHelper
class TestCouplingFields(ModelTestHelper):
def __init__(self):
super(TestCouplingFields, self).__init__()
def test_swflx(self):
"""
Compare short wave flux over a geographic area between low and hi res
models.
"""
hi_paths = self.make_paths('cm_1440x1080-test')
lo_paths = self.make_paths('cm_360x300-test')
hi_fields = os.path.join(hi_paths['output'], 'ice',
'fields_a2i_in_ice.nc')
lo_fields = os.path.join(lo_paths['output'], 'ice',
'fields_a2i_in_ice.nc')
f_hi = nc.Dataset(hi_fields)
f_hi.close()
f_lo = nc.Dataset(lo_fields)
f_lo.close()
|
from __future__ import print_function
import os
import netCDF4 as nc
from model_test_helper import ModelTestHelper
class TestCouplingFields(ModelTestHelper):
def __init__(self):
super(TestCouplingFields, self).__init__()
def test_swflx(self):
"""
Compare short wave flux over a geographic area between low and hi res
models.
"""
+ hi_paths = self.make_paths('cm_1440x1080-test')
+ lo_paths = self.make_paths('cm_360x300-test')
+
- hi_fields = os.path.join(self.paths['cm_1440x1080-test']['output'], 'ice',
? ^^^^^ ---------------------
+ hi_fields = os.path.join(hi_paths['output'], 'ice',
? ^^^
'fields_a2i_in_ice.nc')
- lo_fields = os.path.join(self.paths['cm_360x300-test']['output'], 'ice',
? -- ^^ -------------------
+ lo_fields = os.path.join(lo_paths['output'], 'ice',
? ^^
'fields_a2i_in_ice.nc')
f_hi = nc.Dataset(hi_fields)
f_hi.close()
f_lo = nc.Dataset(lo_fields)
f_lo.close()
|
aaaa857642fa4ce2631fb47f3c929d3197037231
|
falcom/generate_pageview.py
|
falcom/generate_pageview.py
|
class Pagetags:
default_confidence = 100
def generate_pageview (self):
return ""
def add_raw_tags (self, tag_data):
pass
|
class Pagetags:
def __init__ (self):
self.default_confidence = 100
@property
def default_confidence (self):
return self.__default_confid
@default_confidence.setter
def default_confidence (self, value):
self.__default_confid = value
def generate_pageview (self):
return ""
def add_raw_tags (self, tag_data):
pass
|
Make default_confidence into a @property
|
:muscle: Make default_confidence into a @property
|
Python
|
bsd-3-clause
|
mlibrary/image-conversion-and-validation,mlibrary/image-conversion-and-validation
|
class Pagetags:
+ def __init__ (self):
- default_confidence = 100
+ self.default_confidence = 100
+
+ @property
+ def default_confidence (self):
+ return self.__default_confid
+
+ @default_confidence.setter
+ def default_confidence (self, value):
+ self.__default_confid = value
def generate_pageview (self):
return ""
def add_raw_tags (self, tag_data):
pass
|
Make default_confidence into a @property
|
## Code Before:
class Pagetags:
default_confidence = 100
def generate_pageview (self):
return ""
def add_raw_tags (self, tag_data):
pass
## Instruction:
Make default_confidence into a @property
## Code After:
class Pagetags:
def __init__ (self):
self.default_confidence = 100
@property
def default_confidence (self):
return self.__default_confid
@default_confidence.setter
def default_confidence (self, value):
self.__default_confid = value
def generate_pageview (self):
return ""
def add_raw_tags (self, tag_data):
pass
|
class Pagetags:
+ def __init__ (self):
- default_confidence = 100
+ self.default_confidence = 100
? +++++++++
+
+ @property
+ def default_confidence (self):
+ return self.__default_confid
+
+ @default_confidence.setter
+ def default_confidence (self, value):
+ self.__default_confid = value
def generate_pageview (self):
return ""
def add_raw_tags (self, tag_data):
pass
|
174c570d69d0958aa734794ffb7712ea37e70c6f
|
parse.py
|
parse.py
|
import sys
import configparser
def main():
config = configparser.ConfigParser(strict=False)
try:
section = sys.argv[1]
config_key = sys.argv[2]
config_value = sys.argv[3]
except IndexError:
print("Usage: cat test.ini | python parse.py <section> <option> <value>")
sys.exit(1)
try:
config.read_string(''.join(sys.stdin))
"""
Try to update or remove a config option from a section, or add the option to a new section.
"""
if section in config:
if config_key in config[section]:
if config_value == 'delete':
config.remove_option(section, config_key)
else:
config[section][config_key] = config_value
else:
config.add_section(section)
config.set(section, config_key, config_value)
config.write(sys.stdout)
except:
print("There was an error parsing the config.")
if __name__ == "__main__":
main()
|
import sys
import configparser
def main():
config = configparser.ConfigParser(strict=False)
try:
section = sys.argv[1]
config_key = sys.argv[2]
config_value = sys.argv[3]
except IndexError:
print("Usage: cat test.ini | python parse.py <section> <option> <value>")
sys.exit(1)
try:
config.read_string(''.join(sys.stdin))
"""Try to update or remove a config option from a section, or add the option to a new section."""
if section in config:
if config_key in config[section]:
if config_value == 'delete':
config.remove_option(section, config_key)
else:
config[section][config_key] = config_value
else:
config.set(section, config_key, config_value)
else:
config.add_section(section)
config.set(section, config_key, config_value)
config.write(sys.stdout)
except:
print("There was an error parsing the config.")
if __name__ == "__main__":
main()
|
Add new key to existing section.
|
Add new key to existing section.
|
Python
|
mit
|
tonigrigoriu/ini-parser
|
import sys
import configparser
def main():
config = configparser.ConfigParser(strict=False)
try:
section = sys.argv[1]
config_key = sys.argv[2]
config_value = sys.argv[3]
except IndexError:
print("Usage: cat test.ini | python parse.py <section> <option> <value>")
sys.exit(1)
try:
config.read_string(''.join(sys.stdin))
- """
- Try to update or remove a config option from a section, or add the option to a new section.
+ """Try to update or remove a config option from a section, or add the option to a new section."""
-
- """
if section in config:
if config_key in config[section]:
if config_value == 'delete':
config.remove_option(section, config_key)
else:
config[section][config_key] = config_value
+ else:
+ config.set(section, config_key, config_value)
else:
config.add_section(section)
config.set(section, config_key, config_value)
config.write(sys.stdout)
except:
print("There was an error parsing the config.")
if __name__ == "__main__":
main()
|
Add new key to existing section.
|
## Code Before:
import sys
import configparser
def main():
config = configparser.ConfigParser(strict=False)
try:
section = sys.argv[1]
config_key = sys.argv[2]
config_value = sys.argv[3]
except IndexError:
print("Usage: cat test.ini | python parse.py <section> <option> <value>")
sys.exit(1)
try:
config.read_string(''.join(sys.stdin))
"""
Try to update or remove a config option from a section, or add the option to a new section.
"""
if section in config:
if config_key in config[section]:
if config_value == 'delete':
config.remove_option(section, config_key)
else:
config[section][config_key] = config_value
else:
config.add_section(section)
config.set(section, config_key, config_value)
config.write(sys.stdout)
except:
print("There was an error parsing the config.")
if __name__ == "__main__":
main()
## Instruction:
Add new key to existing section.
## Code After:
import sys
import configparser
def main():
config = configparser.ConfigParser(strict=False)
try:
section = sys.argv[1]
config_key = sys.argv[2]
config_value = sys.argv[3]
except IndexError:
print("Usage: cat test.ini | python parse.py <section> <option> <value>")
sys.exit(1)
try:
config.read_string(''.join(sys.stdin))
"""Try to update or remove a config option from a section, or add the option to a new section."""
if section in config:
if config_key in config[section]:
if config_value == 'delete':
config.remove_option(section, config_key)
else:
config[section][config_key] = config_value
else:
config.set(section, config_key, config_value)
else:
config.add_section(section)
config.set(section, config_key, config_value)
config.write(sys.stdout)
except:
print("There was an error parsing the config.")
if __name__ == "__main__":
main()
|
import sys
import configparser
def main():
config = configparser.ConfigParser(strict=False)
try:
section = sys.argv[1]
config_key = sys.argv[2]
config_value = sys.argv[3]
except IndexError:
print("Usage: cat test.ini | python parse.py <section> <option> <value>")
sys.exit(1)
try:
config.read_string(''.join(sys.stdin))
- """
- Try to update or remove a config option from a section, or add the option to a new section.
+ """Try to update or remove a config option from a section, or add the option to a new section."""
? +++ +++
-
- """
if section in config:
if config_key in config[section]:
if config_value == 'delete':
config.remove_option(section, config_key)
else:
config[section][config_key] = config_value
+ else:
+ config.set(section, config_key, config_value)
else:
config.add_section(section)
config.set(section, config_key, config_value)
config.write(sys.stdout)
except:
print("There was an error parsing the config.")
if __name__ == "__main__":
main()
|
bf70f8e3235c140589e9b0110b34da8427ab409b
|
child_sync_typo3/wizard/delegate_child_wizard.py
|
child_sync_typo3/wizard/delegate_child_wizard.py
|
from openerp.osv import orm
from ..model.sync_typo3 import Sync_typo3
class delegate_child_wizard(orm.TransientModel):
_inherit = 'delegate.child.wizard'
def delegate(self, cr, uid, ids, context=None):
child_ids = self._default_child_ids(cr, uid, context)
child_obj = self.pool.get('compassion.child')
typo3_to_remove_ids = list()
for child in child_obj.browse(cr, uid, child_ids, context):
if (child.state == 'I'):
typo3_to_remove_ids.append(child.id)
if typo3_to_remove_ids:
res = child_obj.child_remove_from_typo3(
cr, uid, typo3_to_remove_ids, context)
res = super(delegate_child_wizard, self).delegate(
cr, uid, ids, context)
return res or Sync_typo3.typo3_index_error(cr, uid, self, context)
|
from openerp.osv import orm
from ..model.sync_typo3 import Sync_typo3
class delegate_child_wizard(orm.TransientModel):
_inherit = 'delegate.child.wizard'
def delegate(self, cr, uid, ids, context=None):
child_ids = self._default_child_ids(cr, uid, context)
child_obj = self.pool.get('compassion.child')
typo3_to_remove_ids = list()
for child in child_obj.browse(cr, uid, child_ids, context):
if (child.state == 'I'):
typo3_to_remove_ids.append(child.id)
if typo3_to_remove_ids:
res = child_obj.child_remove_from_typo3(
cr, uid, typo3_to_remove_ids, context)
res = super(delegate_child_wizard, self).delegate(
cr, uid, ids, context) and res
return res or Sync_typo3.typo3_index_error(cr, uid, self, context)
|
Fix res returned on delegate
|
Fix res returned on delegate
|
Python
|
agpl-3.0
|
MickSandoz/compassion-switzerland,ndtran/compassion-switzerland,eicher31/compassion-switzerland,ecino/compassion-switzerland,Secheron/compassion-switzerland,ecino/compassion-switzerland,Secheron/compassion-switzerland,CompassionCH/compassion-switzerland,MickSandoz/compassion-switzerland,ndtran/compassion-switzerland,ecino/compassion-switzerland,eicher31/compassion-switzerland,CompassionCH/compassion-switzerland,eicher31/compassion-switzerland,CompassionCH/compassion-switzerland
|
from openerp.osv import orm
from ..model.sync_typo3 import Sync_typo3
class delegate_child_wizard(orm.TransientModel):
_inherit = 'delegate.child.wizard'
def delegate(self, cr, uid, ids, context=None):
child_ids = self._default_child_ids(cr, uid, context)
child_obj = self.pool.get('compassion.child')
typo3_to_remove_ids = list()
for child in child_obj.browse(cr, uid, child_ids, context):
if (child.state == 'I'):
typo3_to_remove_ids.append(child.id)
if typo3_to_remove_ids:
res = child_obj.child_remove_from_typo3(
cr, uid, typo3_to_remove_ids, context)
res = super(delegate_child_wizard, self).delegate(
- cr, uid, ids, context)
+ cr, uid, ids, context) and res
+
return res or Sync_typo3.typo3_index_error(cr, uid, self, context)
|
Fix res returned on delegate
|
## Code Before:
from openerp.osv import orm
from ..model.sync_typo3 import Sync_typo3
class delegate_child_wizard(orm.TransientModel):
_inherit = 'delegate.child.wizard'
def delegate(self, cr, uid, ids, context=None):
child_ids = self._default_child_ids(cr, uid, context)
child_obj = self.pool.get('compassion.child')
typo3_to_remove_ids = list()
for child in child_obj.browse(cr, uid, child_ids, context):
if (child.state == 'I'):
typo3_to_remove_ids.append(child.id)
if typo3_to_remove_ids:
res = child_obj.child_remove_from_typo3(
cr, uid, typo3_to_remove_ids, context)
res = super(delegate_child_wizard, self).delegate(
cr, uid, ids, context)
return res or Sync_typo3.typo3_index_error(cr, uid, self, context)
## Instruction:
Fix res returned on delegate
## Code After:
from openerp.osv import orm
from ..model.sync_typo3 import Sync_typo3
class delegate_child_wizard(orm.TransientModel):
_inherit = 'delegate.child.wizard'
def delegate(self, cr, uid, ids, context=None):
child_ids = self._default_child_ids(cr, uid, context)
child_obj = self.pool.get('compassion.child')
typo3_to_remove_ids = list()
for child in child_obj.browse(cr, uid, child_ids, context):
if (child.state == 'I'):
typo3_to_remove_ids.append(child.id)
if typo3_to_remove_ids:
res = child_obj.child_remove_from_typo3(
cr, uid, typo3_to_remove_ids, context)
res = super(delegate_child_wizard, self).delegate(
cr, uid, ids, context) and res
return res or Sync_typo3.typo3_index_error(cr, uid, self, context)
|
from openerp.osv import orm
from ..model.sync_typo3 import Sync_typo3
class delegate_child_wizard(orm.TransientModel):
_inherit = 'delegate.child.wizard'
def delegate(self, cr, uid, ids, context=None):
child_ids = self._default_child_ids(cr, uid, context)
child_obj = self.pool.get('compassion.child')
typo3_to_remove_ids = list()
for child in child_obj.browse(cr, uid, child_ids, context):
if (child.state == 'I'):
typo3_to_remove_ids.append(child.id)
if typo3_to_remove_ids:
res = child_obj.child_remove_from_typo3(
cr, uid, typo3_to_remove_ids, context)
res = super(delegate_child_wizard, self).delegate(
- cr, uid, ids, context)
+ cr, uid, ids, context) and res
? ++++++++
+
return res or Sync_typo3.typo3_index_error(cr, uid, self, context)
|
4ebc13ac1913dfe3fcd7bdb7c7235b7b88718574
|
fastdraw/api/commands.py
|
fastdraw/api/commands.py
|
def answer(delay=0):
"""Answer the channel.
:param delay: The number of milliseconds to wait before moving to the next
priority.
:type delay: int
"""
res = 'same => n,Answer(%d)' % delay
return res
def goto(context, exten='s', priority=1):
res = 'same => n,Goto(%s,%s,%d)' % (context, exten, priority)
return res
def hangup(cause=''):
"""Hangup the calling channel.
:param cause: Hangup cause code to use for the channel.
:type cause: str
"""
res = 'same => n,Hangup(%s)' % cause
return res
|
def answer(delay=0):
"""Answer the channel.
:param delay: The number of milliseconds to wait before moving to the next
priority.
:type delay: int
"""
res = 'same => n,Answer(%d)' % delay
return res
def goto(context, exten='s', priority=1):
"""Goto another point in the dialplan
:param context: The context or label to jump to
:type context: string
:param exten: The extension within that context to goto (default: s)
:type exten: string
:param priority: The line within the extension (default: 1)
:type priority: int
"""
res = 'same => n,Goto(%s,%s,%d)' % (context, exten, priority)
return res
def hangup(cause=''):
"""Hangup the calling channel.
:param cause: Hangup cause code to use for the channel.
:type cause: str
"""
res = 'same => n,Hangup(%s)' % cause
return res
|
Add documentation for goto command
|
Add documentation for goto command
Change-Id: I94e280eef509abe65f552b6e78f21eabfe4192e3
Signed-off-by: Sarah Liske <[email protected]>
|
Python
|
apache-2.0
|
kickstandproject/fastdraw
|
def answer(delay=0):
"""Answer the channel.
:param delay: The number of milliseconds to wait before moving to the next
priority.
:type delay: int
"""
res = 'same => n,Answer(%d)' % delay
return res
def goto(context, exten='s', priority=1):
+ """Goto another point in the dialplan
+
+ :param context: The context or label to jump to
+ :type context: string
+ :param exten: The extension within that context to goto (default: s)
+ :type exten: string
+ :param priority: The line within the extension (default: 1)
+ :type priority: int
+ """
res = 'same => n,Goto(%s,%s,%d)' % (context, exten, priority)
return res
def hangup(cause=''):
"""Hangup the calling channel.
:param cause: Hangup cause code to use for the channel.
:type cause: str
"""
res = 'same => n,Hangup(%s)' % cause
return res
|
Add documentation for goto command
|
## Code Before:
def answer(delay=0):
"""Answer the channel.
:param delay: The number of milliseconds to wait before moving to the next
priority.
:type delay: int
"""
res = 'same => n,Answer(%d)' % delay
return res
def goto(context, exten='s', priority=1):
res = 'same => n,Goto(%s,%s,%d)' % (context, exten, priority)
return res
def hangup(cause=''):
"""Hangup the calling channel.
:param cause: Hangup cause code to use for the channel.
:type cause: str
"""
res = 'same => n,Hangup(%s)' % cause
return res
## Instruction:
Add documentation for goto command
## Code After:
def answer(delay=0):
"""Answer the channel.
:param delay: The number of milliseconds to wait before moving to the next
priority.
:type delay: int
"""
res = 'same => n,Answer(%d)' % delay
return res
def goto(context, exten='s', priority=1):
"""Goto another point in the dialplan
:param context: The context or label to jump to
:type context: string
:param exten: The extension within that context to goto (default: s)
:type exten: string
:param priority: The line within the extension (default: 1)
:type priority: int
"""
res = 'same => n,Goto(%s,%s,%d)' % (context, exten, priority)
return res
def hangup(cause=''):
"""Hangup the calling channel.
:param cause: Hangup cause code to use for the channel.
:type cause: str
"""
res = 'same => n,Hangup(%s)' % cause
return res
|
def answer(delay=0):
"""Answer the channel.
:param delay: The number of milliseconds to wait before moving to the next
priority.
:type delay: int
"""
res = 'same => n,Answer(%d)' % delay
return res
def goto(context, exten='s', priority=1):
+ """Goto another point in the dialplan
+
+ :param context: The context or label to jump to
+ :type context: string
+ :param exten: The extension within that context to goto (default: s)
+ :type exten: string
+ :param priority: The line within the extension (default: 1)
+ :type priority: int
+ """
res = 'same => n,Goto(%s,%s,%d)' % (context, exten, priority)
return res
def hangup(cause=''):
"""Hangup the calling channel.
:param cause: Hangup cause code to use for the channel.
:type cause: str
"""
res = 'same => n,Hangup(%s)' % cause
return res
|
27b0a5b95e188a5bd77ae662bbb43e06dfde4749
|
slack/views.py
|
slack/views.py
|
from flask import Flask, request
import requests
from urllib import unquote
app = Flask(__name__)
@app.route("/")
def meme():
domain = request.args["team_domain"]
slackbot = request.args["slackbot"]
text = request.args["text"]
channel = request.args["channel_name"]
text = text[:-1] if text[-1] == ";" else text
params = text.split(";")
params = [x.strip().replace(" ", "-") for x in params]
params = [unquote(x) for x in params]
if not len(params) == 3:
response = "Your syntax should be in the form: /meme template; top; bottom;"
else:
template = params[0]
top = params[1]
bottom = params[2]
response = "http://memegen.link/{0}/{1}/{2}.jpg".format(template, top, bottom)
url = "https://{0}.slack.com/services/hooks/slackbot?token={1}&channel=%23{2}".format(domain, slackbot, channel)
requests.post(url, data=response)
return "ok", 200
|
from flask import Flask, request
import requests
from urllib import unquote
app = Flask(__name__)
@app.route("/")
def meme():
domain = request.args["team_domain"]
slackbot = request.args["slackbot"]
text = request.args["text"]
channel = request.args["channel_id"]
text = unquote(text)
text = text[:-1] if text[-1] == ";" else text
params = text.split(";")
params = [x.strip().replace(" ", "-") for x in params]
if not len(params) == 3:
response = "Your syntax should be in the form: /meme template; top; bottom;"
else:
template = params[0]
top = params[1]
bottom = params[2]
response = "http://memegen.link/{0}/{1}/{2}.jpg".format(template, top, bottom)
url = "https://{0}.slack.com/services/hooks/slackbot?token={1}&channel={2}".format(domain, slackbot, channel)
requests.post(url, data=response)
return "ok", 200
|
Use the id of the channel and unquote all of the text first.
|
Use the id of the channel and unquote all of the text first.
|
Python
|
mit
|
DuaneGarber/slack-meme,joeynebula/slack-meme,tezzutezzu/slack-meme,nicolewhite/slack-meme
|
from flask import Flask, request
import requests
from urllib import unquote
app = Flask(__name__)
@app.route("/")
def meme():
domain = request.args["team_domain"]
slackbot = request.args["slackbot"]
text = request.args["text"]
- channel = request.args["channel_name"]
+ channel = request.args["channel_id"]
+ text = unquote(text)
text = text[:-1] if text[-1] == ";" else text
params = text.split(";")
params = [x.strip().replace(" ", "-") for x in params]
- params = [unquote(x) for x in params]
if not len(params) == 3:
response = "Your syntax should be in the form: /meme template; top; bottom;"
else:
template = params[0]
top = params[1]
bottom = params[2]
response = "http://memegen.link/{0}/{1}/{2}.jpg".format(template, top, bottom)
- url = "https://{0}.slack.com/services/hooks/slackbot?token={1}&channel=%23{2}".format(domain, slackbot, channel)
+ url = "https://{0}.slack.com/services/hooks/slackbot?token={1}&channel={2}".format(domain, slackbot, channel)
requests.post(url, data=response)
return "ok", 200
|
Use the id of the channel and unquote all of the text first.
|
## Code Before:
from flask import Flask, request
import requests
from urllib import unquote
app = Flask(__name__)
@app.route("/")
def meme():
domain = request.args["team_domain"]
slackbot = request.args["slackbot"]
text = request.args["text"]
channel = request.args["channel_name"]
text = text[:-1] if text[-1] == ";" else text
params = text.split(";")
params = [x.strip().replace(" ", "-") for x in params]
params = [unquote(x) for x in params]
if not len(params) == 3:
response = "Your syntax should be in the form: /meme template; top; bottom;"
else:
template = params[0]
top = params[1]
bottom = params[2]
response = "http://memegen.link/{0}/{1}/{2}.jpg".format(template, top, bottom)
url = "https://{0}.slack.com/services/hooks/slackbot?token={1}&channel=%23{2}".format(domain, slackbot, channel)
requests.post(url, data=response)
return "ok", 200
## Instruction:
Use the id of the channel and unquote all of the text first.
## Code After:
from flask import Flask, request
import requests
from urllib import unquote
app = Flask(__name__)
@app.route("/")
def meme():
domain = request.args["team_domain"]
slackbot = request.args["slackbot"]
text = request.args["text"]
channel = request.args["channel_id"]
text = unquote(text)
text = text[:-1] if text[-1] == ";" else text
params = text.split(";")
params = [x.strip().replace(" ", "-") for x in params]
if not len(params) == 3:
response = "Your syntax should be in the form: /meme template; top; bottom;"
else:
template = params[0]
top = params[1]
bottom = params[2]
response = "http://memegen.link/{0}/{1}/{2}.jpg".format(template, top, bottom)
url = "https://{0}.slack.com/services/hooks/slackbot?token={1}&channel={2}".format(domain, slackbot, channel)
requests.post(url, data=response)
return "ok", 200
|
from flask import Flask, request
import requests
from urllib import unquote
app = Flask(__name__)
@app.route("/")
def meme():
domain = request.args["team_domain"]
slackbot = request.args["slackbot"]
text = request.args["text"]
- channel = request.args["channel_name"]
? ^^^^
+ channel = request.args["channel_id"]
? ^^
+ text = unquote(text)
text = text[:-1] if text[-1] == ";" else text
params = text.split(";")
params = [x.strip().replace(" ", "-") for x in params]
- params = [unquote(x) for x in params]
if not len(params) == 3:
response = "Your syntax should be in the form: /meme template; top; bottom;"
else:
template = params[0]
top = params[1]
bottom = params[2]
response = "http://memegen.link/{0}/{1}/{2}.jpg".format(template, top, bottom)
- url = "https://{0}.slack.com/services/hooks/slackbot?token={1}&channel=%23{2}".format(domain, slackbot, channel)
? ---
+ url = "https://{0}.slack.com/services/hooks/slackbot?token={1}&channel={2}".format(domain, slackbot, channel)
requests.post(url, data=response)
return "ok", 200
|
32ac109aec82210ccfa617b438a844b0f300157c
|
comics/core/context_processors.py
|
comics/core/context_processors.py
|
from django.conf import settings
from django.db.models import Count, Max
from comics.core.models import Comic
def site_settings(request):
return {
'site_title': settings.COMICS_SITE_TITLE,
'site_tagline': settings.COMICS_SITE_TAGLINE,
'google_analytics_code': settings.COMICS_GOOGLE_ANALYTICS_CODE,
}
def all_comics(request):
all_comics = Comic.objects.sort_by_name()
all_comics = all_comics.annotate(Max('release__fetched'))
all_comics = all_comics.annotate(Count('release'))
return {'all_comics': all_comics}
|
from django.conf import settings
from django.db.models import Count, Max
from comics.core.models import Comic
def site_settings(request):
return {
'site_title': settings.COMICS_SITE_TITLE,
'site_tagline': settings.COMICS_SITE_TAGLINE,
'google_analytics_code': settings.COMICS_GOOGLE_ANALYTICS_CODE,
'search_enabled': 'comics.search' in settings.INSTALLED_APPS,
}
def all_comics(request):
all_comics = Comic.objects.sort_by_name()
all_comics = all_comics.annotate(Max('release__fetched'))
all_comics = all_comics.annotate(Count('release'))
return {'all_comics': all_comics}
|
Add search_enabled to site settings context processor
|
Add search_enabled to site settings context processor
|
Python
|
agpl-3.0
|
jodal/comics,datagutten/comics,jodal/comics,datagutten/comics,klette/comics,klette/comics,datagutten/comics,klette/comics,jodal/comics,datagutten/comics,jodal/comics
|
from django.conf import settings
from django.db.models import Count, Max
from comics.core.models import Comic
def site_settings(request):
return {
'site_title': settings.COMICS_SITE_TITLE,
'site_tagline': settings.COMICS_SITE_TAGLINE,
'google_analytics_code': settings.COMICS_GOOGLE_ANALYTICS_CODE,
+ 'search_enabled': 'comics.search' in settings.INSTALLED_APPS,
}
def all_comics(request):
all_comics = Comic.objects.sort_by_name()
all_comics = all_comics.annotate(Max('release__fetched'))
all_comics = all_comics.annotate(Count('release'))
return {'all_comics': all_comics}
|
Add search_enabled to site settings context processor
|
## Code Before:
from django.conf import settings
from django.db.models import Count, Max
from comics.core.models import Comic
def site_settings(request):
return {
'site_title': settings.COMICS_SITE_TITLE,
'site_tagline': settings.COMICS_SITE_TAGLINE,
'google_analytics_code': settings.COMICS_GOOGLE_ANALYTICS_CODE,
}
def all_comics(request):
all_comics = Comic.objects.sort_by_name()
all_comics = all_comics.annotate(Max('release__fetched'))
all_comics = all_comics.annotate(Count('release'))
return {'all_comics': all_comics}
## Instruction:
Add search_enabled to site settings context processor
## Code After:
from django.conf import settings
from django.db.models import Count, Max
from comics.core.models import Comic
def site_settings(request):
return {
'site_title': settings.COMICS_SITE_TITLE,
'site_tagline': settings.COMICS_SITE_TAGLINE,
'google_analytics_code': settings.COMICS_GOOGLE_ANALYTICS_CODE,
'search_enabled': 'comics.search' in settings.INSTALLED_APPS,
}
def all_comics(request):
all_comics = Comic.objects.sort_by_name()
all_comics = all_comics.annotate(Max('release__fetched'))
all_comics = all_comics.annotate(Count('release'))
return {'all_comics': all_comics}
|
from django.conf import settings
from django.db.models import Count, Max
from comics.core.models import Comic
def site_settings(request):
return {
'site_title': settings.COMICS_SITE_TITLE,
'site_tagline': settings.COMICS_SITE_TAGLINE,
'google_analytics_code': settings.COMICS_GOOGLE_ANALYTICS_CODE,
+ 'search_enabled': 'comics.search' in settings.INSTALLED_APPS,
}
def all_comics(request):
all_comics = Comic.objects.sort_by_name()
all_comics = all_comics.annotate(Max('release__fetched'))
all_comics = all_comics.annotate(Count('release'))
return {'all_comics': all_comics}
|
4524b88eef8a46d40c4d353c3561401ac3689878
|
bookmarks/urls.py
|
bookmarks/urls.py
|
from django.conf.urls import patterns, url
# for voting
from voting.views import vote_on_object
from bookmarks.models import Bookmark
urlpatterns = patterns('',
url(r'^$', 'bookmarks.views.bookmarks', name="all_bookmarks"),
url(r'^your_bookmarks/$', 'bookmarks.views.your_bookmarks', name="your_bookmarks"),
url(r'^add/$', 'bookmarks.views.add', name="add_bookmark"),
url(r'^(\d+)/delete/$', 'bookmarks.views.delete', name="delete_bookmark_instance"),
# for voting
(r'^(?P<object_id>\d+)/(?P<direction>up|down|clear)vote/?$',
vote_on_object, dict(
model=Bookmark,
template_object_name='bookmark',
template_name='kb/link_confirm_vote.html',
allow_xmlhttprequest=True)),
)
|
from django.conf.urls import patterns, url
from django.views.decorators.csrf import csrf_exempt
# for voting
from voting.views import vote_on_object
from bookmarks.models import Bookmark
urlpatterns = patterns('',
url(r'^$', 'bookmarks.views.bookmarks', name="all_bookmarks"),
url(r'^your_bookmarks/$', 'bookmarks.views.your_bookmarks', name="your_bookmarks"),
url(r'^add/$', 'bookmarks.views.add', name="add_bookmark"),
url(r'^(\d+)/delete/$', 'bookmarks.views.delete', name="delete_bookmark_instance"),
# for voting
(r'^(?P<object_id>\d+)/(?P<direction>up|down|clear)vote/?$',
csrf_exempt(vote_on_object), dict(
model=Bookmark,
template_object_name='bookmark',
template_name='kb/link_confirm_vote.html',
allow_xmlhttprequest=True)),
)
|
Disable csrf checks for voting
|
Disable csrf checks for voting
|
Python
|
mit
|
incuna/incuna-bookmarks,incuna/incuna-bookmarks
|
from django.conf.urls import patterns, url
+ from django.views.decorators.csrf import csrf_exempt
# for voting
from voting.views import vote_on_object
from bookmarks.models import Bookmark
urlpatterns = patterns('',
url(r'^$', 'bookmarks.views.bookmarks', name="all_bookmarks"),
url(r'^your_bookmarks/$', 'bookmarks.views.your_bookmarks', name="your_bookmarks"),
url(r'^add/$', 'bookmarks.views.add', name="add_bookmark"),
url(r'^(\d+)/delete/$', 'bookmarks.views.delete', name="delete_bookmark_instance"),
# for voting
(r'^(?P<object_id>\d+)/(?P<direction>up|down|clear)vote/?$',
- vote_on_object, dict(
+ csrf_exempt(vote_on_object), dict(
model=Bookmark,
template_object_name='bookmark',
template_name='kb/link_confirm_vote.html',
allow_xmlhttprequest=True)),
)
|
Disable csrf checks for voting
|
## Code Before:
from django.conf.urls import patterns, url
# for voting
from voting.views import vote_on_object
from bookmarks.models import Bookmark
urlpatterns = patterns('',
url(r'^$', 'bookmarks.views.bookmarks', name="all_bookmarks"),
url(r'^your_bookmarks/$', 'bookmarks.views.your_bookmarks', name="your_bookmarks"),
url(r'^add/$', 'bookmarks.views.add', name="add_bookmark"),
url(r'^(\d+)/delete/$', 'bookmarks.views.delete', name="delete_bookmark_instance"),
# for voting
(r'^(?P<object_id>\d+)/(?P<direction>up|down|clear)vote/?$',
vote_on_object, dict(
model=Bookmark,
template_object_name='bookmark',
template_name='kb/link_confirm_vote.html',
allow_xmlhttprequest=True)),
)
## Instruction:
Disable csrf checks for voting
## Code After:
from django.conf.urls import patterns, url
from django.views.decorators.csrf import csrf_exempt
# for voting
from voting.views import vote_on_object
from bookmarks.models import Bookmark
urlpatterns = patterns('',
url(r'^$', 'bookmarks.views.bookmarks', name="all_bookmarks"),
url(r'^your_bookmarks/$', 'bookmarks.views.your_bookmarks', name="your_bookmarks"),
url(r'^add/$', 'bookmarks.views.add', name="add_bookmark"),
url(r'^(\d+)/delete/$', 'bookmarks.views.delete', name="delete_bookmark_instance"),
# for voting
(r'^(?P<object_id>\d+)/(?P<direction>up|down|clear)vote/?$',
csrf_exempt(vote_on_object), dict(
model=Bookmark,
template_object_name='bookmark',
template_name='kb/link_confirm_vote.html',
allow_xmlhttprequest=True)),
)
|
from django.conf.urls import patterns, url
+ from django.views.decorators.csrf import csrf_exempt
# for voting
from voting.views import vote_on_object
from bookmarks.models import Bookmark
urlpatterns = patterns('',
url(r'^$', 'bookmarks.views.bookmarks', name="all_bookmarks"),
url(r'^your_bookmarks/$', 'bookmarks.views.your_bookmarks', name="your_bookmarks"),
url(r'^add/$', 'bookmarks.views.add', name="add_bookmark"),
url(r'^(\d+)/delete/$', 'bookmarks.views.delete', name="delete_bookmark_instance"),
# for voting
(r'^(?P<object_id>\d+)/(?P<direction>up|down|clear)vote/?$',
- vote_on_object, dict(
+ csrf_exempt(vote_on_object), dict(
? ++++++++++++ +
model=Bookmark,
template_object_name='bookmark',
template_name='kb/link_confirm_vote.html',
allow_xmlhttprequest=True)),
)
|
24ea32f71faab214a6f350d2d48b2f5715d8262d
|
manage.py
|
manage.py
|
from flask_restful import Api
from flask_migrate import Migrate, MigrateCommand
from flask_script import Manager
from app import app
from app import db
from app.auth import Register, Login
from app.bucketlist_api import BucketList, BucketListEntry
from app.bucketlist_items import BucketListItems, BucketListItemSingle
migrate = Migrate(app, db)
manager = Manager(app)
manager.add_command('db', MigrateCommand)
api = Api(app)
api.add_resource(Register, '/auth/register')
api.add_resource(Login, '/auth/login')
api.add_resource(BucketList, '/bucketlists')
api.add_resource(BucketListEntry, '/bucketlists/<int:bucketlist_id>')
api.add_resource(BucketListItems, '/bucketlists/<int:bucketlist_id>/items')
api.add_resource(BucketListItemSingle,
'/bucketlists/<int:bucketlist_id>/items/<int:item_id>')
if __name__ == '__main__':
manager.run()
|
from flask_restful import Api
from flask_migrate import Migrate, MigrateCommand
from flask_script import Manager
from app import app, db
from app.auth import Register, Login
from app.bucketlist_api import BucketLists, BucketListSingle
from app.bucketlist_items import BucketListItems, BucketListItemSingle
migrate = Migrate(app, db)
manager = Manager(app)
manager.add_command('db', MigrateCommand)
api = Api(app)
api.add_resource(Register, '/auth/register')
api.add_resource(Login, '/auth/login')
api.add_resource(BucketLists, '/bucketlists')
api.add_resource(BucketListSingle, '/bucketlists/<int:bucketlist_id>')
api.add_resource(BucketListItems, '/bucketlists/<int:bucketlist_id>/items')
api.add_resource(BucketListItemSingle,
'/bucketlists/<int:bucketlist_id>/items/<int:item_id>')
if __name__ == '__main__':
manager.run()
|
Set urls for bucketlist items endpoints
|
Set urls for bucketlist items endpoints
|
Python
|
mit
|
andela-bmwenda/cp2-bucketlist-api
|
from flask_restful import Api
from flask_migrate import Migrate, MigrateCommand
from flask_script import Manager
- from app import app
+ from app import app, db
- from app import db
from app.auth import Register, Login
- from app.bucketlist_api import BucketList, BucketListEntry
+ from app.bucketlist_api import BucketLists, BucketListSingle
from app.bucketlist_items import BucketListItems, BucketListItemSingle
migrate = Migrate(app, db)
manager = Manager(app)
manager.add_command('db', MigrateCommand)
api = Api(app)
api.add_resource(Register, '/auth/register')
api.add_resource(Login, '/auth/login')
- api.add_resource(BucketList, '/bucketlists')
+ api.add_resource(BucketLists, '/bucketlists')
- api.add_resource(BucketListEntry, '/bucketlists/<int:bucketlist_id>')
+ api.add_resource(BucketListSingle, '/bucketlists/<int:bucketlist_id>')
api.add_resource(BucketListItems, '/bucketlists/<int:bucketlist_id>/items')
api.add_resource(BucketListItemSingle,
'/bucketlists/<int:bucketlist_id>/items/<int:item_id>')
if __name__ == '__main__':
manager.run()
|
Set urls for bucketlist items endpoints
|
## Code Before:
from flask_restful import Api
from flask_migrate import Migrate, MigrateCommand
from flask_script import Manager
from app import app
from app import db
from app.auth import Register, Login
from app.bucketlist_api import BucketList, BucketListEntry
from app.bucketlist_items import BucketListItems, BucketListItemSingle
migrate = Migrate(app, db)
manager = Manager(app)
manager.add_command('db', MigrateCommand)
api = Api(app)
api.add_resource(Register, '/auth/register')
api.add_resource(Login, '/auth/login')
api.add_resource(BucketList, '/bucketlists')
api.add_resource(BucketListEntry, '/bucketlists/<int:bucketlist_id>')
api.add_resource(BucketListItems, '/bucketlists/<int:bucketlist_id>/items')
api.add_resource(BucketListItemSingle,
'/bucketlists/<int:bucketlist_id>/items/<int:item_id>')
if __name__ == '__main__':
manager.run()
## Instruction:
Set urls for bucketlist items endpoints
## Code After:
from flask_restful import Api
from flask_migrate import Migrate, MigrateCommand
from flask_script import Manager
from app import app, db
from app.auth import Register, Login
from app.bucketlist_api import BucketLists, BucketListSingle
from app.bucketlist_items import BucketListItems, BucketListItemSingle
migrate = Migrate(app, db)
manager = Manager(app)
manager.add_command('db', MigrateCommand)
api = Api(app)
api.add_resource(Register, '/auth/register')
api.add_resource(Login, '/auth/login')
api.add_resource(BucketLists, '/bucketlists')
api.add_resource(BucketListSingle, '/bucketlists/<int:bucketlist_id>')
api.add_resource(BucketListItems, '/bucketlists/<int:bucketlist_id>/items')
api.add_resource(BucketListItemSingle,
'/bucketlists/<int:bucketlist_id>/items/<int:item_id>')
if __name__ == '__main__':
manager.run()
|
from flask_restful import Api
from flask_migrate import Migrate, MigrateCommand
from flask_script import Manager
- from app import app
+ from app import app, db
? ++++
- from app import db
from app.auth import Register, Login
- from app.bucketlist_api import BucketList, BucketListEntry
? ^ ^^^
+ from app.bucketlist_api import BucketLists, BucketListSingle
? + ^^ ^^^
from app.bucketlist_items import BucketListItems, BucketListItemSingle
migrate = Migrate(app, db)
manager = Manager(app)
manager.add_command('db', MigrateCommand)
api = Api(app)
api.add_resource(Register, '/auth/register')
api.add_resource(Login, '/auth/login')
- api.add_resource(BucketList, '/bucketlists')
+ api.add_resource(BucketLists, '/bucketlists')
? +
- api.add_resource(BucketListEntry, '/bucketlists/<int:bucketlist_id>')
? ^ ^^^
+ api.add_resource(BucketListSingle, '/bucketlists/<int:bucketlist_id>')
? ^^ ^^^
api.add_resource(BucketListItems, '/bucketlists/<int:bucketlist_id>/items')
api.add_resource(BucketListItemSingle,
'/bucketlists/<int:bucketlist_id>/items/<int:item_id>')
if __name__ == '__main__':
manager.run()
|
45b3fc7babfbd922bdb174e5156f54c567a66de4
|
plotly/tests/test_core/test_graph_objs/test_graph_objs_tools.py
|
plotly/tests/test_core/test_graph_objs/test_graph_objs_tools.py
|
from __future__ import absolute_import
from unittest import TestCase
|
from __future__ import absolute_import
from unittest import TestCase
from plotly.graph_objs import graph_objs as go
from plotly.graph_objs import graph_objs_tools as got
class TestGetRole(TestCase):
def test_get_role_no_value(self):
# this is a bit fragile, but we pick a few stable values
# the location in the figure matters for this test!
fig = go.Figure(data=[{}])
fig.data[0].marker.color = 'red'
fig.layout.title = 'some-title'
parent_key_role_tuples = [
(fig.data[0], 'x', 'data'),
(fig.data[0], 'marker', 'object'),
(fig.data[0].marker, 'color', 'style'),
(fig.layout, 'title', 'info'),
(fig, 'data', 'object'),
]
for parent, key, role in parent_key_role_tuples:
self.assertEqual(got.get_role(parent, key), role, msg=key)
def test_get_role_with_value(self):
# some attributes are conditionally considered data if they're arrays
# the location in the figure matters for this test!
fig = go.Figure(data=[{}])
fig.data[0].marker.color = 'red'
parent_key_value_role_tuples = [
(fig.data[0], 'x', 'wh0cares', 'data'),
(fig.data[0], 'marker', 'wh0cares', 'object'),
(fig.data[0].marker, 'color', 'red', 'style'),
(fig.data[0].marker, 'color', ['red'], 'data')
]
for parent, key, value, role in parent_key_value_role_tuples:
self.assertEqual(got.get_role(parent, key, value), role,
msg=(key, value))
|
Add some :tiger2:s for `graph_objs_tools.py`.
|
Add some :tiger2:s for `graph_objs_tools.py`.
|
Python
|
mit
|
plotly/plotly.py,plotly/python-api,plotly/plotly.py,plotly/python-api,plotly/plotly.py,plotly/python-api
|
from __future__ import absolute_import
from unittest import TestCase
+ from plotly.graph_objs import graph_objs as go
+ from plotly.graph_objs import graph_objs_tools as got
+
+
+ class TestGetRole(TestCase):
+
+ def test_get_role_no_value(self):
+
+ # this is a bit fragile, but we pick a few stable values
+
+ # the location in the figure matters for this test!
+ fig = go.Figure(data=[{}])
+ fig.data[0].marker.color = 'red'
+ fig.layout.title = 'some-title'
+
+ parent_key_role_tuples = [
+ (fig.data[0], 'x', 'data'),
+ (fig.data[0], 'marker', 'object'),
+ (fig.data[0].marker, 'color', 'style'),
+ (fig.layout, 'title', 'info'),
+ (fig, 'data', 'object'),
+ ]
+ for parent, key, role in parent_key_role_tuples:
+ self.assertEqual(got.get_role(parent, key), role, msg=key)
+
+ def test_get_role_with_value(self):
+
+ # some attributes are conditionally considered data if they're arrays
+
+ # the location in the figure matters for this test!
+ fig = go.Figure(data=[{}])
+ fig.data[0].marker.color = 'red'
+
+ parent_key_value_role_tuples = [
+ (fig.data[0], 'x', 'wh0cares', 'data'),
+ (fig.data[0], 'marker', 'wh0cares', 'object'),
+ (fig.data[0].marker, 'color', 'red', 'style'),
+ (fig.data[0].marker, 'color', ['red'], 'data')
+ ]
+ for parent, key, value, role in parent_key_value_role_tuples:
+ self.assertEqual(got.get_role(parent, key, value), role,
+ msg=(key, value))
+
|
Add some :tiger2:s for `graph_objs_tools.py`.
|
## Code Before:
from __future__ import absolute_import
from unittest import TestCase
## Instruction:
Add some :tiger2:s for `graph_objs_tools.py`.
## Code After:
from __future__ import absolute_import
from unittest import TestCase
from plotly.graph_objs import graph_objs as go
from plotly.graph_objs import graph_objs_tools as got
class TestGetRole(TestCase):
def test_get_role_no_value(self):
# this is a bit fragile, but we pick a few stable values
# the location in the figure matters for this test!
fig = go.Figure(data=[{}])
fig.data[0].marker.color = 'red'
fig.layout.title = 'some-title'
parent_key_role_tuples = [
(fig.data[0], 'x', 'data'),
(fig.data[0], 'marker', 'object'),
(fig.data[0].marker, 'color', 'style'),
(fig.layout, 'title', 'info'),
(fig, 'data', 'object'),
]
for parent, key, role in parent_key_role_tuples:
self.assertEqual(got.get_role(parent, key), role, msg=key)
def test_get_role_with_value(self):
# some attributes are conditionally considered data if they're arrays
# the location in the figure matters for this test!
fig = go.Figure(data=[{}])
fig.data[0].marker.color = 'red'
parent_key_value_role_tuples = [
(fig.data[0], 'x', 'wh0cares', 'data'),
(fig.data[0], 'marker', 'wh0cares', 'object'),
(fig.data[0].marker, 'color', 'red', 'style'),
(fig.data[0].marker, 'color', ['red'], 'data')
]
for parent, key, value, role in parent_key_value_role_tuples:
self.assertEqual(got.get_role(parent, key, value), role,
msg=(key, value))
|
from __future__ import absolute_import
from unittest import TestCase
+
+ from plotly.graph_objs import graph_objs as go
+ from plotly.graph_objs import graph_objs_tools as got
+
+
+ class TestGetRole(TestCase):
+
+ def test_get_role_no_value(self):
+
+ # this is a bit fragile, but we pick a few stable values
+
+ # the location in the figure matters for this test!
+ fig = go.Figure(data=[{}])
+ fig.data[0].marker.color = 'red'
+ fig.layout.title = 'some-title'
+
+ parent_key_role_tuples = [
+ (fig.data[0], 'x', 'data'),
+ (fig.data[0], 'marker', 'object'),
+ (fig.data[0].marker, 'color', 'style'),
+ (fig.layout, 'title', 'info'),
+ (fig, 'data', 'object'),
+ ]
+ for parent, key, role in parent_key_role_tuples:
+ self.assertEqual(got.get_role(parent, key), role, msg=key)
+
+ def test_get_role_with_value(self):
+
+ # some attributes are conditionally considered data if they're arrays
+
+ # the location in the figure matters for this test!
+ fig = go.Figure(data=[{}])
+ fig.data[0].marker.color = 'red'
+
+ parent_key_value_role_tuples = [
+ (fig.data[0], 'x', 'wh0cares', 'data'),
+ (fig.data[0], 'marker', 'wh0cares', 'object'),
+ (fig.data[0].marker, 'color', 'red', 'style'),
+ (fig.data[0].marker, 'color', ['red'], 'data')
+ ]
+ for parent, key, value, role in parent_key_value_role_tuples:
+ self.assertEqual(got.get_role(parent, key, value), role,
+ msg=(key, value))
|
25af2e47b5b107ce4a0be4963b70bbf04b22c142
|
tests/test_element.py
|
tests/test_element.py
|
import mdtraj as md
import pytest
from mdtraj import element
from mdtraj.testing import eq
def test_immutable():
def f():
element.hydrogen.mass = 1
def g():
element.radium.symbol = 'sdfsdfsdf'
def h():
element.iron.name = 'sdfsdf'
pytest.raises(AttributeError, f)
pytest.raises(AttributeError, g)
pytest.raises(AttributeError, h)
assert element.hydrogen.mass == 1.007947
assert element.radium.symbol == 'Ra'
assert element.iron.name == 'iron'
def test_element_0(get_fn):
t = md.load(get_fn('bpti.pdb'))
a = t.top.atom(15)
H = element.Element.getBySymbol('H')
eq(a.element, element.hydrogen)
|
import mdtraj as md
import pytest
import pickle
from mdtraj import element
from mdtraj.testing import eq
def test_immutable():
def f():
element.hydrogen.mass = 1
def g():
element.radium.symbol = 'sdfsdfsdf'
def h():
element.iron.name = 'sdfsdf'
pytest.raises(AttributeError, f)
pytest.raises(AttributeError, g)
pytest.raises(AttributeError, h)
assert element.hydrogen.mass == 1.007947
assert element.radium.symbol == 'Ra'
assert element.iron.name == 'iron'
def test_element_0(get_fn):
t = md.load(get_fn('bpti.pdb'))
a = t.top.atom(15)
H = element.Element.getBySymbol('H')
eq(a.element, element.hydrogen)
def test_element_pickle():
"""Test that every Element object can pickle and de-pickle"""
for el in dir(element):
if isinstance(el, element.Element):
assert el == pickle.loads(pickle.dumps(el))
|
Add basic element pickle cycle test
|
Add basic element pickle cycle test
|
Python
|
lgpl-2.1
|
dwhswenson/mdtraj,mattwthompson/mdtraj,jchodera/mdtraj,gph82/mdtraj,dwhswenson/mdtraj,jchodera/mdtraj,rmcgibbo/mdtraj,leeping/mdtraj,gph82/mdtraj,leeping/mdtraj,jchodera/mdtraj,rmcgibbo/mdtraj,mattwthompson/mdtraj,jchodera/mdtraj,dwhswenson/mdtraj,mdtraj/mdtraj,gph82/mdtraj,leeping/mdtraj,leeping/mdtraj,mattwthompson/mdtraj,mdtraj/mdtraj,mdtraj/mdtraj,rmcgibbo/mdtraj,mattwthompson/mdtraj
|
import mdtraj as md
import pytest
+ import pickle
from mdtraj import element
from mdtraj.testing import eq
def test_immutable():
def f():
element.hydrogen.mass = 1
def g():
element.radium.symbol = 'sdfsdfsdf'
def h():
element.iron.name = 'sdfsdf'
pytest.raises(AttributeError, f)
pytest.raises(AttributeError, g)
pytest.raises(AttributeError, h)
assert element.hydrogen.mass == 1.007947
assert element.radium.symbol == 'Ra'
assert element.iron.name == 'iron'
def test_element_0(get_fn):
t = md.load(get_fn('bpti.pdb'))
a = t.top.atom(15)
H = element.Element.getBySymbol('H')
eq(a.element, element.hydrogen)
+
+ def test_element_pickle():
+ """Test that every Element object can pickle and de-pickle"""
+ for el in dir(element):
+ if isinstance(el, element.Element):
+ assert el == pickle.loads(pickle.dumps(el))
+
|
Add basic element pickle cycle test
|
## Code Before:
import mdtraj as md
import pytest
from mdtraj import element
from mdtraj.testing import eq
def test_immutable():
def f():
element.hydrogen.mass = 1
def g():
element.radium.symbol = 'sdfsdfsdf'
def h():
element.iron.name = 'sdfsdf'
pytest.raises(AttributeError, f)
pytest.raises(AttributeError, g)
pytest.raises(AttributeError, h)
assert element.hydrogen.mass == 1.007947
assert element.radium.symbol == 'Ra'
assert element.iron.name == 'iron'
def test_element_0(get_fn):
t = md.load(get_fn('bpti.pdb'))
a = t.top.atom(15)
H = element.Element.getBySymbol('H')
eq(a.element, element.hydrogen)
## Instruction:
Add basic element pickle cycle test
## Code After:
import mdtraj as md
import pytest
import pickle
from mdtraj import element
from mdtraj.testing import eq
def test_immutable():
def f():
element.hydrogen.mass = 1
def g():
element.radium.symbol = 'sdfsdfsdf'
def h():
element.iron.name = 'sdfsdf'
pytest.raises(AttributeError, f)
pytest.raises(AttributeError, g)
pytest.raises(AttributeError, h)
assert element.hydrogen.mass == 1.007947
assert element.radium.symbol == 'Ra'
assert element.iron.name == 'iron'
def test_element_0(get_fn):
t = md.load(get_fn('bpti.pdb'))
a = t.top.atom(15)
H = element.Element.getBySymbol('H')
eq(a.element, element.hydrogen)
def test_element_pickle():
"""Test that every Element object can pickle and de-pickle"""
for el in dir(element):
if isinstance(el, element.Element):
assert el == pickle.loads(pickle.dumps(el))
|
import mdtraj as md
import pytest
+ import pickle
from mdtraj import element
from mdtraj.testing import eq
def test_immutable():
def f():
element.hydrogen.mass = 1
def g():
element.radium.symbol = 'sdfsdfsdf'
def h():
element.iron.name = 'sdfsdf'
pytest.raises(AttributeError, f)
pytest.raises(AttributeError, g)
pytest.raises(AttributeError, h)
assert element.hydrogen.mass == 1.007947
assert element.radium.symbol == 'Ra'
assert element.iron.name == 'iron'
def test_element_0(get_fn):
t = md.load(get_fn('bpti.pdb'))
a = t.top.atom(15)
H = element.Element.getBySymbol('H')
eq(a.element, element.hydrogen)
+
+
+ def test_element_pickle():
+ """Test that every Element object can pickle and de-pickle"""
+ for el in dir(element):
+ if isinstance(el, element.Element):
+ assert el == pickle.loads(pickle.dumps(el))
|
0946379d23131aeec07dc29bebd4e57d95298d00
|
recipes/sos-notebook/run_test.py
|
recipes/sos-notebook/run_test.py
|
import jupyter_client
try:
jupyter_client.kernelspec.get_kernel_spec('sos')
except jupyter_client.kernelspec.NoSuchKernel:
print('sos kernel was not installed')
print('The following kernels are installed:')
print('jupyter_client.kernelspec.find_kernel_specs()')
print(jupyter_client.kernelspec.find_kernel_specs())
# Test that sos kernel is functional
import unittest
from ipykernel.tests.utils import execute, wait_for_idle, assemble_output
from sos_notebook.test_utils import sos_kernel
class TestSoSKernel(unittest.TestCase):
def testKernel(self):
with sos_kernel() as kc:
execute(kc=kc, code='a = 1\nprint(a)')
stdout, stderr = assemble_output(kc.iopub_channel)
self.assertEqual(stderr, '')
self.assertEqual(stdout.strip(), '1')
if __name__ == '__main__':
unittest.main()
|
import jupyter_client
try:
jupyter_client.kernelspec.get_kernel_spec('sos')
except jupyter_client.kernelspec.NoSuchKernel:
print('sos kernel was not installed')
print('The following kernels are installed:')
print('jupyter_client.kernelspec.find_kernel_specs()')
print(jupyter_client.kernelspec.find_kernel_specs())
# Test that sos kernel is functional
import unittest
from sos_notebook.test_utils import sos_kernel
from ipykernel.tests.utils import execute, wait_for_idle, assemble_output
class TestSoSKernel(unittest.TestCase):
def testKernel(self):
with sos_kernel() as kc:
execute(kc=kc, code='a = 1\nprint(a)')
stdout, stderr = assemble_output(kc.iopub_channel)
self.assertEqual(stderr, '')
self.assertEqual(stdout.strip(), '1')
if __name__ == '__main__':
unittest.main()
|
Use longer TIMEOUT defined in sos_notebook.test_utils.
|
Use longer TIMEOUT defined in sos_notebook.test_utils.
|
Python
|
bsd-3-clause
|
SylvainCorlay/staged-recipes,synapticarbors/staged-recipes,jochym/staged-recipes,mcs07/staged-recipes,ceholden/staged-recipes,igortg/staged-recipes,goanpeca/staged-recipes,johanneskoester/staged-recipes,isuruf/staged-recipes,scopatz/staged-recipes,chrisburr/staged-recipes,petrushy/staged-recipes,asmeurer/staged-recipes,Juanlu001/staged-recipes,dschreij/staged-recipes,synapticarbors/staged-recipes,mariusvniekerk/staged-recipes,basnijholt/staged-recipes,mariusvniekerk/staged-recipes,mcs07/staged-recipes,conda-forge/staged-recipes,birdsarah/staged-recipes,birdsarah/staged-recipes,jjhelmus/staged-recipes,cpaulik/staged-recipes,petrushy/staged-recipes,jjhelmus/staged-recipes,SylvainCorlay/staged-recipes,jochym/staged-recipes,dschreij/staged-recipes,jakirkham/staged-recipes,hadim/staged-recipes,jakirkham/staged-recipes,patricksnape/staged-recipes,patricksnape/staged-recipes,igortg/staged-recipes,scopatz/staged-recipes,stuertz/staged-recipes,johanneskoester/staged-recipes,ceholden/staged-recipes,ReimarBauer/staged-recipes,hadim/staged-recipes,basnijholt/staged-recipes,kwilcox/staged-recipes,chrisburr/staged-recipes,kwilcox/staged-recipes,cpaulik/staged-recipes,stuertz/staged-recipes,goanpeca/staged-recipes,ocefpaf/staged-recipes,isuruf/staged-recipes,Juanlu001/staged-recipes,conda-forge/staged-recipes,ReimarBauer/staged-recipes,ocefpaf/staged-recipes,asmeurer/staged-recipes
|
import jupyter_client
try:
jupyter_client.kernelspec.get_kernel_spec('sos')
except jupyter_client.kernelspec.NoSuchKernel:
print('sos kernel was not installed')
print('The following kernels are installed:')
print('jupyter_client.kernelspec.find_kernel_specs()')
print(jupyter_client.kernelspec.find_kernel_specs())
# Test that sos kernel is functional
import unittest
+ from sos_notebook.test_utils import sos_kernel
from ipykernel.tests.utils import execute, wait_for_idle, assemble_output
- from sos_notebook.test_utils import sos_kernel
class TestSoSKernel(unittest.TestCase):
def testKernel(self):
with sos_kernel() as kc:
execute(kc=kc, code='a = 1\nprint(a)')
stdout, stderr = assemble_output(kc.iopub_channel)
self.assertEqual(stderr, '')
self.assertEqual(stdout.strip(), '1')
if __name__ == '__main__':
unittest.main()
|
Use longer TIMEOUT defined in sos_notebook.test_utils.
|
## Code Before:
import jupyter_client
try:
jupyter_client.kernelspec.get_kernel_spec('sos')
except jupyter_client.kernelspec.NoSuchKernel:
print('sos kernel was not installed')
print('The following kernels are installed:')
print('jupyter_client.kernelspec.find_kernel_specs()')
print(jupyter_client.kernelspec.find_kernel_specs())
# Test that sos kernel is functional
import unittest
from ipykernel.tests.utils import execute, wait_for_idle, assemble_output
from sos_notebook.test_utils import sos_kernel
class TestSoSKernel(unittest.TestCase):
def testKernel(self):
with sos_kernel() as kc:
execute(kc=kc, code='a = 1\nprint(a)')
stdout, stderr = assemble_output(kc.iopub_channel)
self.assertEqual(stderr, '')
self.assertEqual(stdout.strip(), '1')
if __name__ == '__main__':
unittest.main()
## Instruction:
Use longer TIMEOUT defined in sos_notebook.test_utils.
## Code After:
import jupyter_client
try:
jupyter_client.kernelspec.get_kernel_spec('sos')
except jupyter_client.kernelspec.NoSuchKernel:
print('sos kernel was not installed')
print('The following kernels are installed:')
print('jupyter_client.kernelspec.find_kernel_specs()')
print(jupyter_client.kernelspec.find_kernel_specs())
# Test that sos kernel is functional
import unittest
from sos_notebook.test_utils import sos_kernel
from ipykernel.tests.utils import execute, wait_for_idle, assemble_output
class TestSoSKernel(unittest.TestCase):
def testKernel(self):
with sos_kernel() as kc:
execute(kc=kc, code='a = 1\nprint(a)')
stdout, stderr = assemble_output(kc.iopub_channel)
self.assertEqual(stderr, '')
self.assertEqual(stdout.strip(), '1')
if __name__ == '__main__':
unittest.main()
|
import jupyter_client
try:
jupyter_client.kernelspec.get_kernel_spec('sos')
except jupyter_client.kernelspec.NoSuchKernel:
print('sos kernel was not installed')
print('The following kernels are installed:')
print('jupyter_client.kernelspec.find_kernel_specs()')
print(jupyter_client.kernelspec.find_kernel_specs())
# Test that sos kernel is functional
import unittest
+ from sos_notebook.test_utils import sos_kernel
from ipykernel.tests.utils import execute, wait_for_idle, assemble_output
- from sos_notebook.test_utils import sos_kernel
class TestSoSKernel(unittest.TestCase):
def testKernel(self):
with sos_kernel() as kc:
execute(kc=kc, code='a = 1\nprint(a)')
stdout, stderr = assemble_output(kc.iopub_channel)
self.assertEqual(stderr, '')
self.assertEqual(stdout.strip(), '1')
if __name__ == '__main__':
unittest.main()
|
530bd321f38a0131eb250148bd0a67d9a59da34c
|
uno_image.py
|
uno_image.py
|
import unohelper
from com.sun.star.task import XJobExecutor
class ImageExample(unohelper.Base, XJobExecutor):
'''Class that implements the service registered in LibreOffice'''
def __init__(self, context):
self.context = context
g_ImplementationHelper = unohelper.ImplementationHelper()
g_ImplementationHelper.addImplementation(
ImageExample,
'org.libreoffice.imageexample.ImageExample',
('com.sun.star.task.JobExecutor',))
|
import unohelper
from com.sun.star.task import XJobExecutor
class ImageExample(unohelper.Base, XJobExecutor):
'''Class that implements the service registered in LibreOffice'''
def __init__(self, context):
self.context = context
self.desktop = self.createUnoService("com.sun.star.frame.Desktop")
self.graphics = self.createUnoService("com.sun.star.graphic.GraphicProvider")
def createUnoService(self, name):
return self.context.ServiceManager.createInstanceWithContext(name, self.context)
g_ImplementationHelper = unohelper.ImplementationHelper()
g_ImplementationHelper.addImplementation(
ImageExample,
'org.libreoffice.imageexample.ImageExample',
('com.sun.star.task.JobExecutor',))
|
Add code to create needed uno services
|
Add code to create needed uno services
|
Python
|
mpl-2.0
|
JIghtuse/uno-image-manipulation-example
|
import unohelper
from com.sun.star.task import XJobExecutor
class ImageExample(unohelper.Base, XJobExecutor):
'''Class that implements the service registered in LibreOffice'''
def __init__(self, context):
self.context = context
+ self.desktop = self.createUnoService("com.sun.star.frame.Desktop")
+ self.graphics = self.createUnoService("com.sun.star.graphic.GraphicProvider")
+
+ def createUnoService(self, name):
+ return self.context.ServiceManager.createInstanceWithContext(name, self.context)
g_ImplementationHelper = unohelper.ImplementationHelper()
g_ImplementationHelper.addImplementation(
ImageExample,
'org.libreoffice.imageexample.ImageExample',
('com.sun.star.task.JobExecutor',))
|
Add code to create needed uno services
|
## Code Before:
import unohelper
from com.sun.star.task import XJobExecutor
class ImageExample(unohelper.Base, XJobExecutor):
'''Class that implements the service registered in LibreOffice'''
def __init__(self, context):
self.context = context
g_ImplementationHelper = unohelper.ImplementationHelper()
g_ImplementationHelper.addImplementation(
ImageExample,
'org.libreoffice.imageexample.ImageExample',
('com.sun.star.task.JobExecutor',))
## Instruction:
Add code to create needed uno services
## Code After:
import unohelper
from com.sun.star.task import XJobExecutor
class ImageExample(unohelper.Base, XJobExecutor):
'''Class that implements the service registered in LibreOffice'''
def __init__(self, context):
self.context = context
self.desktop = self.createUnoService("com.sun.star.frame.Desktop")
self.graphics = self.createUnoService("com.sun.star.graphic.GraphicProvider")
def createUnoService(self, name):
return self.context.ServiceManager.createInstanceWithContext(name, self.context)
g_ImplementationHelper = unohelper.ImplementationHelper()
g_ImplementationHelper.addImplementation(
ImageExample,
'org.libreoffice.imageexample.ImageExample',
('com.sun.star.task.JobExecutor',))
|
import unohelper
from com.sun.star.task import XJobExecutor
class ImageExample(unohelper.Base, XJobExecutor):
'''Class that implements the service registered in LibreOffice'''
def __init__(self, context):
self.context = context
+ self.desktop = self.createUnoService("com.sun.star.frame.Desktop")
+ self.graphics = self.createUnoService("com.sun.star.graphic.GraphicProvider")
+
+ def createUnoService(self, name):
+ return self.context.ServiceManager.createInstanceWithContext(name, self.context)
g_ImplementationHelper = unohelper.ImplementationHelper()
g_ImplementationHelper.addImplementation(
ImageExample,
'org.libreoffice.imageexample.ImageExample',
('com.sun.star.task.JobExecutor',))
|
8064be72de340fca963da2cade2b73aa969fbdbd
|
csunplugged/activities/models.py
|
csunplugged/activities/models.py
|
from django.db import models
class Activity(models.Model):
name = models.CharField(max_length=200)
description = models.TextField()
|
from django.db import models
class Activity(models.Model):
name = models.CharField(max_length=200)
description = models.TextField()
def __str__(self):
return self.name
|
Add string representation for Activity model
|
Add string representation for Activity model
|
Python
|
mit
|
uccser/cs-unplugged,uccser/cs-unplugged,uccser/cs-unplugged,uccser/cs-unplugged
|
from django.db import models
class Activity(models.Model):
name = models.CharField(max_length=200)
description = models.TextField()
+ def __str__(self):
+ return self.name
+
|
Add string representation for Activity model
|
## Code Before:
from django.db import models
class Activity(models.Model):
name = models.CharField(max_length=200)
description = models.TextField()
## Instruction:
Add string representation for Activity model
## Code After:
from django.db import models
class Activity(models.Model):
name = models.CharField(max_length=200)
description = models.TextField()
def __str__(self):
return self.name
|
from django.db import models
class Activity(models.Model):
name = models.CharField(max_length=200)
description = models.TextField()
+
+ def __str__(self):
+ return self.name
|
c65ed9ec976c440b46dedc514daf883bba940282
|
myElsClient.py
|
myElsClient.py
|
import requests
class myElsClient:
"""A class that implements a Python interface to api.elsevier.com"""
# local variables
__base_url = "http://api.elsevier.com/"
def __init__(self, apiKey):
"""Instantiates a client with a given API Key."""
self.apiKey = apiKey
def getBaseURL(self):
return self.__base_url
|
import requests
class myElsClient:
"""A class that implements a Python interface to api.elsevier.com"""
# local variables
__base_url = "http://api.elsevier.com/"
# constructors
def __init__(self, apiKey):
"""Instantiates a client with a given API Key."""
self.apiKey = apiKey
# configuration functions
"""Sets an institutional token for customer authentication"""
def setInstToken(self, instToken):
self.instToken = instToken
# utility access functions
def getBaseURL(self):
"""Returns the base URL currently configured for Elsevier's APIs"""
return self.__base_url
|
Add ability to set insttoken
|
Add ability to set insttoken
|
Python
|
bsd-3-clause
|
ElsevierDev/elsapy
|
import requests
class myElsClient:
"""A class that implements a Python interface to api.elsevier.com"""
# local variables
__base_url = "http://api.elsevier.com/"
-
+
+ # constructors
def __init__(self, apiKey):
"""Instantiates a client with a given API Key."""
self.apiKey = apiKey
+ # configuration functions
+ """Sets an institutional token for customer authentication"""
+ def setInstToken(self, instToken):
+ self.instToken = instToken
+
+ # utility access functions
def getBaseURL(self):
+ """Returns the base URL currently configured for Elsevier's APIs"""
return self.__base_url
|
Add ability to set insttoken
|
## Code Before:
import requests
class myElsClient:
"""A class that implements a Python interface to api.elsevier.com"""
# local variables
__base_url = "http://api.elsevier.com/"
def __init__(self, apiKey):
"""Instantiates a client with a given API Key."""
self.apiKey = apiKey
def getBaseURL(self):
return self.__base_url
## Instruction:
Add ability to set insttoken
## Code After:
import requests
class myElsClient:
"""A class that implements a Python interface to api.elsevier.com"""
# local variables
__base_url = "http://api.elsevier.com/"
# constructors
def __init__(self, apiKey):
"""Instantiates a client with a given API Key."""
self.apiKey = apiKey
# configuration functions
"""Sets an institutional token for customer authentication"""
def setInstToken(self, instToken):
self.instToken = instToken
# utility access functions
def getBaseURL(self):
"""Returns the base URL currently configured for Elsevier's APIs"""
return self.__base_url
|
import requests
class myElsClient:
"""A class that implements a Python interface to api.elsevier.com"""
# local variables
__base_url = "http://api.elsevier.com/"
-
+
+ # constructors
def __init__(self, apiKey):
"""Instantiates a client with a given API Key."""
self.apiKey = apiKey
+ # configuration functions
+ """Sets an institutional token for customer authentication"""
+ def setInstToken(self, instToken):
+ self.instToken = instToken
+
+ # utility access functions
def getBaseURL(self):
+ """Returns the base URL currently configured for Elsevier's APIs"""
return self.__base_url
|
2a494efd72d34ac638763d162559d43fe3705698
|
test/test_datac.py
|
test/test_datac.py
|
import datac
import numpy as np
import os
params = {"temp_sun": 6000.}
bandgaps = np.linspace(0, 3.25, 100)
abscissae = datac.generate_abscissae(bandgaps, "bandgap", params)
pwd = os.getcwd()
testdir = "test"
filename = "data.dat"
fqpn = os.path.join(pwd, testdir, filename)
datac.write_json(fqpn, abscissae)
|
import datac
import numpy as np
import os
import unittest
class dummyclass(object):
"""
Simple class for testing `generate_ordinates`
"""
def __init__(self, params):
pass
def fun(self):
"""
Return value of `True`
"""
return True
params = {"temp_sun": 6000.}
bandgaps = np.linspace(0, 3.25, 100)
abscissae = datac.generate_abscissae(bandgaps, "bandgap", params)
data = datac.generate_ordinates(abscissae, dummyclass, "fun")
|
Add dummy class to test generate_ordinates
|
Add dummy class to test generate_ordinates
|
Python
|
mit
|
jrsmith3/datac,jrsmith3/datac
|
import datac
import numpy as np
import os
+ import unittest
+
+ class dummyclass(object):
+ """
+ Simple class for testing `generate_ordinates`
+ """
+ def __init__(self, params):
+ pass
+
+ def fun(self):
+ """
+ Return value of `True`
+ """
+ return True
+
params = {"temp_sun": 6000.}
bandgaps = np.linspace(0, 3.25, 100)
abscissae = datac.generate_abscissae(bandgaps, "bandgap", params)
+ data = datac.generate_ordinates(abscissae, dummyclass, "fun")
- pwd = os.getcwd()
- testdir = "test"
- filename = "data.dat"
- fqpn = os.path.join(pwd, testdir, filename)
- datac.write_json(fqpn, abscissae)
-
|
Add dummy class to test generate_ordinates
|
## Code Before:
import datac
import numpy as np
import os
params = {"temp_sun": 6000.}
bandgaps = np.linspace(0, 3.25, 100)
abscissae = datac.generate_abscissae(bandgaps, "bandgap", params)
pwd = os.getcwd()
testdir = "test"
filename = "data.dat"
fqpn = os.path.join(pwd, testdir, filename)
datac.write_json(fqpn, abscissae)
## Instruction:
Add dummy class to test generate_ordinates
## Code After:
import datac
import numpy as np
import os
import unittest
class dummyclass(object):
"""
Simple class for testing `generate_ordinates`
"""
def __init__(self, params):
pass
def fun(self):
"""
Return value of `True`
"""
return True
params = {"temp_sun": 6000.}
bandgaps = np.linspace(0, 3.25, 100)
abscissae = datac.generate_abscissae(bandgaps, "bandgap", params)
data = datac.generate_ordinates(abscissae, dummyclass, "fun")
|
import datac
import numpy as np
import os
+ import unittest
+
+ class dummyclass(object):
+ """
+ Simple class for testing `generate_ordinates`
+ """
+ def __init__(self, params):
+ pass
+
+ def fun(self):
+ """
+ Return value of `True`
+ """
+ return True
+
params = {"temp_sun": 6000.}
bandgaps = np.linspace(0, 3.25, 100)
abscissae = datac.generate_abscissae(bandgaps, "bandgap", params)
+ data = datac.generate_ordinates(abscissae, dummyclass, "fun")
- pwd = os.getcwd()
- testdir = "test"
- filename = "data.dat"
-
- fqpn = os.path.join(pwd, testdir, filename)
- datac.write_json(fqpn, abscissae)
|
0df76d66fb6a2425c6ccc8a3a75d41599b2545c6
|
auth0/v2/authentication/delegated.py
|
auth0/v2/authentication/delegated.py
|
from .base import AuthenticationBase
class Delegated(AuthenticationBase):
def __init__(self, domain):
self.domain = domain
def get_token(self, client_id, target, api_type, grant_type,
id_token=None, refresh_token=None):
if id_token and refresh_token:
raise ValueError('Only one of id_token or refresh_token '
'can be None')
data = {
'client_id': client_id,
'grant_type': grant_type,
'target': target,
'scope': 'openid',
'api_type': api_type,
}
if id_token:
data.update({'id_token': id_token})
elif refresh_token:
data.update({'refresh_token': refresh_token})
else:
raise ValueError('Either id_token or refresh_token must '
'have a value')
return self.post(
'https://%s/delegation' % self.domain,
headers={'Content-Type': 'application/json'},
data=data
)
|
from .base import AuthenticationBase
class Delegated(AuthenticationBase):
"""Delegated authentication endpoints.
Args:
domain (str): Your auth0 domain (e.g: username.auth0.com)
"""
def __init__(self, domain):
self.domain = domain
def get_token(self, client_id, target, api_type, grant_type,
id_token=None, refresh_token=None):
"""Obtain a delegation token.
"""
if id_token and refresh_token:
raise ValueError('Only one of id_token or refresh_token '
'can be None')
data = {
'client_id': client_id,
'grant_type': grant_type,
'target': target,
'scope': 'openid',
'api_type': api_type,
}
if id_token:
data.update({'id_token': id_token})
elif refresh_token:
data.update({'refresh_token': refresh_token})
else:
raise ValueError('Either id_token or refresh_token must '
'have a value')
return self.post(
'https://%s/delegation' % self.domain,
headers={'Content-Type': 'application/json'},
data=data
)
|
Add docstrings in Delegated class
|
Add docstrings in Delegated class
|
Python
|
mit
|
auth0/auth0-python,auth0/auth0-python
|
from .base import AuthenticationBase
class Delegated(AuthenticationBase):
+
+ """Delegated authentication endpoints.
+
+ Args:
+ domain (str): Your auth0 domain (e.g: username.auth0.com)
+ """
def __init__(self, domain):
self.domain = domain
def get_token(self, client_id, target, api_type, grant_type,
id_token=None, refresh_token=None):
+
+ """Obtain a delegation token.
+ """
if id_token and refresh_token:
raise ValueError('Only one of id_token or refresh_token '
'can be None')
data = {
'client_id': client_id,
'grant_type': grant_type,
'target': target,
'scope': 'openid',
'api_type': api_type,
}
if id_token:
data.update({'id_token': id_token})
elif refresh_token:
data.update({'refresh_token': refresh_token})
else:
raise ValueError('Either id_token or refresh_token must '
'have a value')
return self.post(
'https://%s/delegation' % self.domain,
headers={'Content-Type': 'application/json'},
data=data
)
|
Add docstrings in Delegated class
|
## Code Before:
from .base import AuthenticationBase
class Delegated(AuthenticationBase):
def __init__(self, domain):
self.domain = domain
def get_token(self, client_id, target, api_type, grant_type,
id_token=None, refresh_token=None):
if id_token and refresh_token:
raise ValueError('Only one of id_token or refresh_token '
'can be None')
data = {
'client_id': client_id,
'grant_type': grant_type,
'target': target,
'scope': 'openid',
'api_type': api_type,
}
if id_token:
data.update({'id_token': id_token})
elif refresh_token:
data.update({'refresh_token': refresh_token})
else:
raise ValueError('Either id_token or refresh_token must '
'have a value')
return self.post(
'https://%s/delegation' % self.domain,
headers={'Content-Type': 'application/json'},
data=data
)
## Instruction:
Add docstrings in Delegated class
## Code After:
from .base import AuthenticationBase
class Delegated(AuthenticationBase):
"""Delegated authentication endpoints.
Args:
domain (str): Your auth0 domain (e.g: username.auth0.com)
"""
def __init__(self, domain):
self.domain = domain
def get_token(self, client_id, target, api_type, grant_type,
id_token=None, refresh_token=None):
"""Obtain a delegation token.
"""
if id_token and refresh_token:
raise ValueError('Only one of id_token or refresh_token '
'can be None')
data = {
'client_id': client_id,
'grant_type': grant_type,
'target': target,
'scope': 'openid',
'api_type': api_type,
}
if id_token:
data.update({'id_token': id_token})
elif refresh_token:
data.update({'refresh_token': refresh_token})
else:
raise ValueError('Either id_token or refresh_token must '
'have a value')
return self.post(
'https://%s/delegation' % self.domain,
headers={'Content-Type': 'application/json'},
data=data
)
|
from .base import AuthenticationBase
class Delegated(AuthenticationBase):
+
+ """Delegated authentication endpoints.
+
+ Args:
+ domain (str): Your auth0 domain (e.g: username.auth0.com)
+ """
def __init__(self, domain):
self.domain = domain
def get_token(self, client_id, target, api_type, grant_type,
id_token=None, refresh_token=None):
+
+ """Obtain a delegation token.
+ """
if id_token and refresh_token:
raise ValueError('Only one of id_token or refresh_token '
'can be None')
data = {
'client_id': client_id,
'grant_type': grant_type,
'target': target,
'scope': 'openid',
'api_type': api_type,
}
if id_token:
data.update({'id_token': id_token})
elif refresh_token:
data.update({'refresh_token': refresh_token})
else:
raise ValueError('Either id_token or refresh_token must '
'have a value')
return self.post(
'https://%s/delegation' % self.domain,
headers={'Content-Type': 'application/json'},
data=data
)
|
5bd17a3088c2d1958d86efc4411b575c123e6275
|
tests/functional/test_l10n.py
|
tests/functional/test_l10n.py
|
import random
import pytest
from ..pages.home import HomePage
@pytest.mark.nondestructive
def test_change_language(base_url, selenium):
page = HomePage(base_url, selenium).open()
initial = page.footer.language
# avoid selecting the same language or locales that have homepage redirects
excluded = [initial, 'ja', 'zh-TW', 'zh-CN']
available = [l for l in page.footer.languages if l not in excluded]
new = random.choice(available)
page.footer.select_language(new)
assert new in selenium.current_url, 'Language is not in URL'
assert new == page.footer.language, 'Language has not been selected'
|
import random
import pytest
from ..pages.home import HomePage
@pytest.mark.nondestructive
def test_change_language(base_url, selenium):
page = HomePage(base_url, selenium).open()
initial = page.footer.language
# avoid selecting the same language or locales that have homepage redirects
excluded = [initial, 'ja', 'ja-JP-mac', 'zh-TW', 'zh-CN']
available = [l for l in page.footer.languages if l not in excluded]
new = random.choice(available)
page.footer.select_language(new)
assert new in selenium.current_url, 'Language is not in URL'
assert new == page.footer.language, 'Language has not been selected'
|
Exclude ja-JP-mac on homepage language select functional test
|
Exclude ja-JP-mac on homepage language select functional test
|
Python
|
mpl-2.0
|
hoosteeno/bedrock,flodolo/bedrock,gauthierm/bedrock,gauthierm/bedrock,alexgibson/bedrock,sgarrity/bedrock,l-hedgehog/bedrock,mkmelin/bedrock,flodolo/bedrock,Sancus/bedrock,analytics-pros/mozilla-bedrock,glogiotatidis/bedrock,gauthierm/bedrock,craigcook/bedrock,mozilla/bedrock,alexgibson/bedrock,mermi/bedrock,hoosteeno/bedrock,pascalchevrel/bedrock,mermi/bedrock,CSCI-462-01-2017/bedrock,flodolo/bedrock,craigcook/bedrock,Sancus/bedrock,jgmize/bedrock,davehunt/bedrock,alexgibson/bedrock,gerv/bedrock,CSCI-462-01-2017/bedrock,gauthierm/bedrock,sgarrity/bedrock,sylvestre/bedrock,sylvestre/bedrock,TheoChevalier/bedrock,jpetto/bedrock,davehunt/bedrock,analytics-pros/mozilla-bedrock,l-hedgehog/bedrock,jpetto/bedrock,flodolo/bedrock,gerv/bedrock,schalkneethling/bedrock,glogiotatidis/bedrock,TheJJ100100/bedrock,hoosteeno/bedrock,TheJJ100100/bedrock,CSCI-462-01-2017/bedrock,mozilla/bedrock,pascalchevrel/bedrock,sylvestre/bedrock,kyoshino/bedrock,kyoshino/bedrock,davehunt/bedrock,mozilla/bedrock,schalkneethling/bedrock,kyoshino/bedrock,Sancus/bedrock,TheJJ100100/bedrock,CSCI-462-01-2017/bedrock,Sancus/bedrock,mkmelin/bedrock,MichaelKohler/bedrock,ericawright/bedrock,MichaelKohler/bedrock,hoosteeno/bedrock,glogiotatidis/bedrock,TheoChevalier/bedrock,jpetto/bedrock,glogiotatidis/bedrock,l-hedgehog/bedrock,schalkneethling/bedrock,kyoshino/bedrock,TheoChevalier/bedrock,pascalchevrel/bedrock,mermi/bedrock,sylvestre/bedrock,davehunt/bedrock,alexgibson/bedrock,mozilla/bedrock,gerv/bedrock,craigcook/bedrock,ericawright/bedrock,mkmelin/bedrock,TheoChevalier/bedrock,jgmize/bedrock,analytics-pros/mozilla-bedrock,ericawright/bedrock,schalkneethling/bedrock,TheJJ100100/bedrock,jgmize/bedrock,mkmelin/bedrock,jgmize/bedrock,MichaelKohler/bedrock,gerv/bedrock,analytics-pros/mozilla-bedrock,l-hedgehog/bedrock,mermi/bedrock,MichaelKohler/bedrock,sgarrity/bedrock,jpetto/bedrock,ericawright/bedrock,pascalchevrel/bedrock,sgarrity/bedrock,craigcook/bedrock
|
import random
import pytest
from ..pages.home import HomePage
@pytest.mark.nondestructive
def test_change_language(base_url, selenium):
page = HomePage(base_url, selenium).open()
initial = page.footer.language
# avoid selecting the same language or locales that have homepage redirects
- excluded = [initial, 'ja', 'zh-TW', 'zh-CN']
+ excluded = [initial, 'ja', 'ja-JP-mac', 'zh-TW', 'zh-CN']
available = [l for l in page.footer.languages if l not in excluded]
new = random.choice(available)
page.footer.select_language(new)
assert new in selenium.current_url, 'Language is not in URL'
assert new == page.footer.language, 'Language has not been selected'
|
Exclude ja-JP-mac on homepage language select functional test
|
## Code Before:
import random
import pytest
from ..pages.home import HomePage
@pytest.mark.nondestructive
def test_change_language(base_url, selenium):
page = HomePage(base_url, selenium).open()
initial = page.footer.language
# avoid selecting the same language or locales that have homepage redirects
excluded = [initial, 'ja', 'zh-TW', 'zh-CN']
available = [l for l in page.footer.languages if l not in excluded]
new = random.choice(available)
page.footer.select_language(new)
assert new in selenium.current_url, 'Language is not in URL'
assert new == page.footer.language, 'Language has not been selected'
## Instruction:
Exclude ja-JP-mac on homepage language select functional test
## Code After:
import random
import pytest
from ..pages.home import HomePage
@pytest.mark.nondestructive
def test_change_language(base_url, selenium):
page = HomePage(base_url, selenium).open()
initial = page.footer.language
# avoid selecting the same language or locales that have homepage redirects
excluded = [initial, 'ja', 'ja-JP-mac', 'zh-TW', 'zh-CN']
available = [l for l in page.footer.languages if l not in excluded]
new = random.choice(available)
page.footer.select_language(new)
assert new in selenium.current_url, 'Language is not in URL'
assert new == page.footer.language, 'Language has not been selected'
|
import random
import pytest
from ..pages.home import HomePage
@pytest.mark.nondestructive
def test_change_language(base_url, selenium):
page = HomePage(base_url, selenium).open()
initial = page.footer.language
# avoid selecting the same language or locales that have homepage redirects
- excluded = [initial, 'ja', 'zh-TW', 'zh-CN']
+ excluded = [initial, 'ja', 'ja-JP-mac', 'zh-TW', 'zh-CN']
? +++++++++++++
available = [l for l in page.footer.languages if l not in excluded]
new = random.choice(available)
page.footer.select_language(new)
assert new in selenium.current_url, 'Language is not in URL'
assert new == page.footer.language, 'Language has not been selected'
|
969b2d322174392a85f6fa6fc92160cb18144594
|
bulbs/content/serializers.py
|
bulbs/content/serializers.py
|
from django import forms
from django.contrib.auth.models import User
from rest_framework import serializers
from .models import Content, Tag
class TagSerializer(serializers.ModelSerializer):
class Meta:
model = Tag
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
exclude = ('password',)
class SimpleAuthorSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = ('id', 'first_name', 'last_name')
class ContentSerializer(serializers.ModelSerializer):
url = serializers.HyperlinkedIdentityField(
view_name='content-detail',
lookup_field='pk'
)
class Meta:
model = Content
exclude = ('polymorphic_ctype',)
class ContentSerializerReadOnly(ContentSerializer):
tags = TagSerializer(many=True, required=False)
authors = SimpleAuthorSerializer(many=True, required=False)
class PolymorphicContentSerializerMixin(object):
def to_native(self, value):
if hasattr(value, 'get_serializer_class'):
ThisSerializer = value.get_serializer_class()
else:
class ThisSerializer(serializers.ModelSerializer):
class Meta:
model = value.__class__
serializer = ThisSerializer(context=self.context)
return serializer.to_native(value)
class PolymorphicContentSerializer(ContentSerializer, PolymorphicContentSerializerMixin):
pass
class PolymorphicContentSerializerReadOnly(ContentSerializerReadOnly, PolymorphicContentSerializerMixin):
pass
|
from django import forms
from django.contrib.auth.models import User
from rest_framework import serializers
from .models import Content, Tag
class TagSerializer(serializers.ModelSerializer):
class Meta:
model = Tag
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
exclude = ('password',)
class SimpleAuthorSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = ('id', 'first_name', 'last_name')
class ContentSerializer(serializers.ModelSerializer):
url = serializers.HyperlinkedIdentityField(
view_name='content-detail',
lookup_field='pk'
)
tags = serializers.PrimaryKeyRelatedField(many=True, required=False)
authors = serializers.PrimaryKeyRelatedField(many=True, required=False)
class Meta:
model = Content
exclude = ('polymorphic_ctype',)
class ContentSerializerReadOnly(ContentSerializer):
tags = TagSerializer(many=True, required=False)
authors = SimpleAuthorSerializer(many=True, required=False)
class PolymorphicContentSerializerMixin(object):
def to_native(self, value):
if hasattr(value, 'get_serializer_class'):
ThisSerializer = value.get_serializer_class()
else:
class ThisSerializer(serializers.ModelSerializer):
class Meta:
model = value.__class__
serializer = ThisSerializer(context=self.context)
return serializer.to_native(value)
class PolymorphicContentSerializer(ContentSerializer, PolymorphicContentSerializerMixin):
pass
class PolymorphicContentSerializerReadOnly(ContentSerializerReadOnly, PolymorphicContentSerializerMixin):
pass
|
Allow for empty tags and authors on `ContentSerializer`
|
Allow for empty tags and authors on `ContentSerializer`
|
Python
|
mit
|
theonion/django-bulbs,theonion/django-bulbs,theonion/django-bulbs,theonion/django-bulbs,pombredanne/django-bulbs,pombredanne/django-bulbs,theonion/django-bulbs
|
from django import forms
from django.contrib.auth.models import User
from rest_framework import serializers
from .models import Content, Tag
class TagSerializer(serializers.ModelSerializer):
class Meta:
model = Tag
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
exclude = ('password',)
class SimpleAuthorSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = ('id', 'first_name', 'last_name')
class ContentSerializer(serializers.ModelSerializer):
url = serializers.HyperlinkedIdentityField(
view_name='content-detail',
lookup_field='pk'
)
+ tags = serializers.PrimaryKeyRelatedField(many=True, required=False)
+ authors = serializers.PrimaryKeyRelatedField(many=True, required=False)
class Meta:
model = Content
exclude = ('polymorphic_ctype',)
class ContentSerializerReadOnly(ContentSerializer):
tags = TagSerializer(many=True, required=False)
authors = SimpleAuthorSerializer(many=True, required=False)
class PolymorphicContentSerializerMixin(object):
def to_native(self, value):
if hasattr(value, 'get_serializer_class'):
ThisSerializer = value.get_serializer_class()
else:
class ThisSerializer(serializers.ModelSerializer):
class Meta:
model = value.__class__
serializer = ThisSerializer(context=self.context)
return serializer.to_native(value)
class PolymorphicContentSerializer(ContentSerializer, PolymorphicContentSerializerMixin):
pass
class PolymorphicContentSerializerReadOnly(ContentSerializerReadOnly, PolymorphicContentSerializerMixin):
pass
|
Allow for empty tags and authors on `ContentSerializer`
|
## Code Before:
from django import forms
from django.contrib.auth.models import User
from rest_framework import serializers
from .models import Content, Tag
class TagSerializer(serializers.ModelSerializer):
class Meta:
model = Tag
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
exclude = ('password',)
class SimpleAuthorSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = ('id', 'first_name', 'last_name')
class ContentSerializer(serializers.ModelSerializer):
url = serializers.HyperlinkedIdentityField(
view_name='content-detail',
lookup_field='pk'
)
class Meta:
model = Content
exclude = ('polymorphic_ctype',)
class ContentSerializerReadOnly(ContentSerializer):
tags = TagSerializer(many=True, required=False)
authors = SimpleAuthorSerializer(many=True, required=False)
class PolymorphicContentSerializerMixin(object):
def to_native(self, value):
if hasattr(value, 'get_serializer_class'):
ThisSerializer = value.get_serializer_class()
else:
class ThisSerializer(serializers.ModelSerializer):
class Meta:
model = value.__class__
serializer = ThisSerializer(context=self.context)
return serializer.to_native(value)
class PolymorphicContentSerializer(ContentSerializer, PolymorphicContentSerializerMixin):
pass
class PolymorphicContentSerializerReadOnly(ContentSerializerReadOnly, PolymorphicContentSerializerMixin):
pass
## Instruction:
Allow for empty tags and authors on `ContentSerializer`
## Code After:
from django import forms
from django.contrib.auth.models import User
from rest_framework import serializers
from .models import Content, Tag
class TagSerializer(serializers.ModelSerializer):
class Meta:
model = Tag
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
exclude = ('password',)
class SimpleAuthorSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = ('id', 'first_name', 'last_name')
class ContentSerializer(serializers.ModelSerializer):
url = serializers.HyperlinkedIdentityField(
view_name='content-detail',
lookup_field='pk'
)
tags = serializers.PrimaryKeyRelatedField(many=True, required=False)
authors = serializers.PrimaryKeyRelatedField(many=True, required=False)
class Meta:
model = Content
exclude = ('polymorphic_ctype',)
class ContentSerializerReadOnly(ContentSerializer):
tags = TagSerializer(many=True, required=False)
authors = SimpleAuthorSerializer(many=True, required=False)
class PolymorphicContentSerializerMixin(object):
def to_native(self, value):
if hasattr(value, 'get_serializer_class'):
ThisSerializer = value.get_serializer_class()
else:
class ThisSerializer(serializers.ModelSerializer):
class Meta:
model = value.__class__
serializer = ThisSerializer(context=self.context)
return serializer.to_native(value)
class PolymorphicContentSerializer(ContentSerializer, PolymorphicContentSerializerMixin):
pass
class PolymorphicContentSerializerReadOnly(ContentSerializerReadOnly, PolymorphicContentSerializerMixin):
pass
|
from django import forms
from django.contrib.auth.models import User
from rest_framework import serializers
from .models import Content, Tag
class TagSerializer(serializers.ModelSerializer):
class Meta:
model = Tag
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
exclude = ('password',)
class SimpleAuthorSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = ('id', 'first_name', 'last_name')
class ContentSerializer(serializers.ModelSerializer):
url = serializers.HyperlinkedIdentityField(
view_name='content-detail',
lookup_field='pk'
)
+ tags = serializers.PrimaryKeyRelatedField(many=True, required=False)
+ authors = serializers.PrimaryKeyRelatedField(many=True, required=False)
class Meta:
model = Content
exclude = ('polymorphic_ctype',)
class ContentSerializerReadOnly(ContentSerializer):
tags = TagSerializer(many=True, required=False)
authors = SimpleAuthorSerializer(many=True, required=False)
class PolymorphicContentSerializerMixin(object):
def to_native(self, value):
if hasattr(value, 'get_serializer_class'):
ThisSerializer = value.get_serializer_class()
else:
class ThisSerializer(serializers.ModelSerializer):
class Meta:
model = value.__class__
serializer = ThisSerializer(context=self.context)
return serializer.to_native(value)
class PolymorphicContentSerializer(ContentSerializer, PolymorphicContentSerializerMixin):
pass
class PolymorphicContentSerializerReadOnly(ContentSerializerReadOnly, PolymorphicContentSerializerMixin):
pass
|
8f4c376a57c68636188880cd92c64b4640b1c8cc
|
sheared/web/entwine.py
|
sheared/web/entwine.py
|
import warnings
from dtml import tal, metal, tales, context
from sheared.python import io
class Entwiner:
def __init__(self):
self.builtins = context.BuiltIns({})
#self.context = context.Context()
#self.context.setDefaults(self.builtins)
def handle(self, request, reply, subpath):
self.context = {}
self.entwine(request, reply, subpath)
r = self.execute(self.page_path, throwaway=0)
reply.send(r)
def execute(self, path, throwaway=1):
r = io.readfile(path)
c = tal.compile(r, tales)
r = tal.execute(c, self.context, self.builtins, tales)
c = metal.compile(r, tales)
r = metal.execute(c, self.context, self.builtins, tales)
if throwaway and r.strip():
warnings.warn('%s: ignored non-macro content' % path)
return r
|
import warnings
from dtml import tal, metal, tales
from sheared.python import io
class Entwiner:
def handle(self, request, reply, subpath):
self.context = {}
self.entwine(request, reply, subpath)
r = self.execute(self.page_path, throwaway=0)
reply.send(r)
def execute(self, path, throwaway=1):
r = io.readfile(path)
c = tal.compile(r, tales)
r = tal.execute(c, self.context, tales)
c = metal.compile(r, tales)
r = metal.execute(c, self.context, tales)
if throwaway and r.strip():
warnings.warn('%s: ignored non-macro content' % path)
return r
|
Remove the builtins arguments to the {tal,metal}.execute calls.
|
Remove the builtins arguments to the {tal,metal}.execute calls.
git-svn-id: 8b0eea19d26e20ec80f5c0ea247ec202fbcc1090@107 5646265b-94b7-0310-9681-9501d24b2df7
|
Python
|
mit
|
kirkeby/sheared
|
import warnings
- from dtml import tal, metal, tales, context
+ from dtml import tal, metal, tales
from sheared.python import io
class Entwiner:
- def __init__(self):
- self.builtins = context.BuiltIns({})
- #self.context = context.Context()
- #self.context.setDefaults(self.builtins)
-
def handle(self, request, reply, subpath):
self.context = {}
self.entwine(request, reply, subpath)
r = self.execute(self.page_path, throwaway=0)
-
reply.send(r)
def execute(self, path, throwaway=1):
r = io.readfile(path)
c = tal.compile(r, tales)
- r = tal.execute(c, self.context, self.builtins, tales)
+ r = tal.execute(c, self.context, tales)
c = metal.compile(r, tales)
- r = metal.execute(c, self.context, self.builtins, tales)
+ r = metal.execute(c, self.context, tales)
if throwaway and r.strip():
warnings.warn('%s: ignored non-macro content' % path)
return r
|
Remove the builtins arguments to the {tal,metal}.execute calls.
|
## Code Before:
import warnings
from dtml import tal, metal, tales, context
from sheared.python import io
class Entwiner:
def __init__(self):
self.builtins = context.BuiltIns({})
#self.context = context.Context()
#self.context.setDefaults(self.builtins)
def handle(self, request, reply, subpath):
self.context = {}
self.entwine(request, reply, subpath)
r = self.execute(self.page_path, throwaway=0)
reply.send(r)
def execute(self, path, throwaway=1):
r = io.readfile(path)
c = tal.compile(r, tales)
r = tal.execute(c, self.context, self.builtins, tales)
c = metal.compile(r, tales)
r = metal.execute(c, self.context, self.builtins, tales)
if throwaway and r.strip():
warnings.warn('%s: ignored non-macro content' % path)
return r
## Instruction:
Remove the builtins arguments to the {tal,metal}.execute calls.
## Code After:
import warnings
from dtml import tal, metal, tales
from sheared.python import io
class Entwiner:
def handle(self, request, reply, subpath):
self.context = {}
self.entwine(request, reply, subpath)
r = self.execute(self.page_path, throwaway=0)
reply.send(r)
def execute(self, path, throwaway=1):
r = io.readfile(path)
c = tal.compile(r, tales)
r = tal.execute(c, self.context, tales)
c = metal.compile(r, tales)
r = metal.execute(c, self.context, tales)
if throwaway and r.strip():
warnings.warn('%s: ignored non-macro content' % path)
return r
|
import warnings
- from dtml import tal, metal, tales, context
? ---------
+ from dtml import tal, metal, tales
from sheared.python import io
class Entwiner:
- def __init__(self):
- self.builtins = context.BuiltIns({})
- #self.context = context.Context()
- #self.context.setDefaults(self.builtins)
-
def handle(self, request, reply, subpath):
self.context = {}
self.entwine(request, reply, subpath)
r = self.execute(self.page_path, throwaway=0)
-
reply.send(r)
def execute(self, path, throwaway=1):
r = io.readfile(path)
c = tal.compile(r, tales)
- r = tal.execute(c, self.context, self.builtins, tales)
? ---------------
+ r = tal.execute(c, self.context, tales)
c = metal.compile(r, tales)
- r = metal.execute(c, self.context, self.builtins, tales)
? ---------------
+ r = metal.execute(c, self.context, tales)
if throwaway and r.strip():
warnings.warn('%s: ignored non-macro content' % path)
return r
|
1d292feebd2999eb042da1f606c0fdc33103225f
|
api/models.py
|
api/models.py
|
class MessageModel:
def __init__(self, message, duration, creation_date, message_category):
# We will automatically generate the new id
self.id = 0
self.message = message
self.duration = duration
self.creation_date = creation_date
self.message_category = message_category
self.printed_times = 0
self.printed_once = False
|
class MessageModel:
def __init__(self, message, duration, creation_date, message_category):
# We will automatically generate the new id
self.id = 0
self.message = message
self.duration = duration
self.creation_date = creation_date
self.message_category = message_category
self.printed_times = 0
self.printed_once = False
class AccountModel:
def __init__(self, account_type, account_number, name, first_name, address, birthdate):
# We will automatically generate the new id
self.id = 0
self.type = account_type
self.number = account_number
self.name = name
self.first_name = first_name
self.address = address
self.birthdate = birthdate
#We will automatically generate next 2 parameters based on client address.
self.longitude = 0;
self.latitude = 0;
|
Update model script to support task database schema
|
Update model script to support task database schema
|
Python
|
mit
|
candidate48661/BEA
|
class MessageModel:
def __init__(self, message, duration, creation_date, message_category):
# We will automatically generate the new id
self.id = 0
self.message = message
self.duration = duration
self.creation_date = creation_date
self.message_category = message_category
self.printed_times = 0
self.printed_once = False
+
+
+ class AccountModel:
+ def __init__(self, account_type, account_number, name, first_name, address, birthdate):
+ # We will automatically generate the new id
+ self.id = 0
+ self.type = account_type
+ self.number = account_number
+ self.name = name
+ self.first_name = first_name
+ self.address = address
+ self.birthdate = birthdate
+ #We will automatically generate next 2 parameters based on client address.
+ self.longitude = 0;
+ self.latitude = 0;
|
Update model script to support task database schema
|
## Code Before:
class MessageModel:
def __init__(self, message, duration, creation_date, message_category):
# We will automatically generate the new id
self.id = 0
self.message = message
self.duration = duration
self.creation_date = creation_date
self.message_category = message_category
self.printed_times = 0
self.printed_once = False
## Instruction:
Update model script to support task database schema
## Code After:
class MessageModel:
def __init__(self, message, duration, creation_date, message_category):
# We will automatically generate the new id
self.id = 0
self.message = message
self.duration = duration
self.creation_date = creation_date
self.message_category = message_category
self.printed_times = 0
self.printed_once = False
class AccountModel:
def __init__(self, account_type, account_number, name, first_name, address, birthdate):
# We will automatically generate the new id
self.id = 0
self.type = account_type
self.number = account_number
self.name = name
self.first_name = first_name
self.address = address
self.birthdate = birthdate
#We will automatically generate next 2 parameters based on client address.
self.longitude = 0;
self.latitude = 0;
|
class MessageModel:
def __init__(self, message, duration, creation_date, message_category):
# We will automatically generate the new id
self.id = 0
self.message = message
self.duration = duration
self.creation_date = creation_date
self.message_category = message_category
self.printed_times = 0
self.printed_once = False
+
+
+ class AccountModel:
+ def __init__(self, account_type, account_number, name, first_name, address, birthdate):
+ # We will automatically generate the new id
+ self.id = 0
+ self.type = account_type
+ self.number = account_number
+ self.name = name
+ self.first_name = first_name
+ self.address = address
+ self.birthdate = birthdate
+ #We will automatically generate next 2 parameters based on client address.
+ self.longitude = 0;
+ self.latitude = 0;
|
eaa907d5d8e4bb4e8514c719b3c11a4a30442694
|
vpr/muxes/logic/mux2/tests/test_mux2.py
|
vpr/muxes/logic/mux2/tests/test_mux2.py
|
import cocotb
from cocotb.triggers import Timer
from cocotb.result import TestFailure
#from adder_model import adder_model
#import random
@cocotb.test()
def mux2_test(dut):
"""Test for MUX2 options"""
opts = [(x,y,z, x&~z | y&z) for x in [0,1] for y in [0,1] for z in [0,1]]
yield Timer(2)
for I0, I1, S0, _ in opts:
dut.I0 = I0
dut.I1 = I1
dut.S0 = S0
if S0:
expected = I1
else:
expected = I0
yield Timer(2)
if dut.O != expected:
raise TestFailure(
'Result is incorrect for I0(%d) I1(%d) S0(%d): %s(O) != %s (expected)' % (I0, I1, S0, dut.O, expected))
else:
dut._log.info('I0(%d) I1(%d) S0(%d) output(%d) Ok!'%(I0, I1, S0, dut.O))
|
import cocotb
from cocotb.triggers import Timer
from cocotb.result import TestFailure
from cocotb.regression import TestFactory
@cocotb.coroutine
def mux2_basic_test(dut, inputs=(1,0,0)):
"""Test for MUX2 options"""
yield Timer(2)
I0, I1, S0 = inputs
dut.I0 = I0
dut.I1 = I1
dut.S0 = S0
if S0:
expected = I1
else:
expected = I0
yield Timer(2)
if dut.O != expected:
raise TestFailure(
'Result is incorrect for I0(%d) I1(%d) S0(%d): %s(O) != %s (expected)' % (I0, I1, S0, dut.O, expected))
else:
dut._log.info('I0(%d) I1(%d) S0(%d) output(%d) Ok!'%(I0, I1, S0, dut.O))
factory = TestFactory(mux2_basic_test)
input_permutations = [(x, y, z) for x in [0,1] for y in [0,1] for z in [0,1]]
factory.add_option("inputs", input_permutations)
factory.generate_tests()
|
Use factory to iterate over permutations
|
Use factory to iterate over permutations
Signed-off-by: Jeffrey Elms <[email protected]>
|
Python
|
isc
|
SymbiFlow/symbiflow-arch-defs,SymbiFlow/symbiflow-arch-defs
|
import cocotb
from cocotb.triggers import Timer
from cocotb.result import TestFailure
- #from adder_model import adder_model
- #import random
+ from cocotb.regression import TestFactory
+
+ @cocotb.coroutine
+ def mux2_basic_test(dut, inputs=(1,0,0)):
+ """Test for MUX2 options"""
+
+ yield Timer(2)
+ I0, I1, S0 = inputs
+ dut.I0 = I0
+ dut.I1 = I1
+ dut.S0 = S0
+ if S0:
+ expected = I1
+ else:
+ expected = I0
+ yield Timer(2)
+
+ if dut.O != expected:
+ raise TestFailure(
+ 'Result is incorrect for I0(%d) I1(%d) S0(%d): %s(O) != %s (expected)' % (I0, I1, S0, dut.O, expected))
+ else:
+ dut._log.info('I0(%d) I1(%d) S0(%d) output(%d) Ok!'%(I0, I1, S0, dut.O))
+ factory = TestFactory(mux2_basic_test)
- @cocotb.test()
- def mux2_test(dut):
- """Test for MUX2 options"""
- opts = [(x,y,z, x&~z | y&z) for x in [0,1] for y in [0,1] for z in [0,1]]
+ input_permutations = [(x, y, z) for x in [0,1] for y in [0,1] for z in [0,1]]
+ factory.add_option("inputs", input_permutations)
+ factory.generate_tests()
- yield Timer(2)
- for I0, I1, S0, _ in opts:
- dut.I0 = I0
- dut.I1 = I1
- dut.S0 = S0
- if S0:
- expected = I1
- else:
- expected = I0
- yield Timer(2)
-
- if dut.O != expected:
- raise TestFailure(
- 'Result is incorrect for I0(%d) I1(%d) S0(%d): %s(O) != %s (expected)' % (I0, I1, S0, dut.O, expected))
- else:
- dut._log.info('I0(%d) I1(%d) S0(%d) output(%d) Ok!'%(I0, I1, S0, dut.O))
-
|
Use factory to iterate over permutations
|
## Code Before:
import cocotb
from cocotb.triggers import Timer
from cocotb.result import TestFailure
#from adder_model import adder_model
#import random
@cocotb.test()
def mux2_test(dut):
"""Test for MUX2 options"""
opts = [(x,y,z, x&~z | y&z) for x in [0,1] for y in [0,1] for z in [0,1]]
yield Timer(2)
for I0, I1, S0, _ in opts:
dut.I0 = I0
dut.I1 = I1
dut.S0 = S0
if S0:
expected = I1
else:
expected = I0
yield Timer(2)
if dut.O != expected:
raise TestFailure(
'Result is incorrect for I0(%d) I1(%d) S0(%d): %s(O) != %s (expected)' % (I0, I1, S0, dut.O, expected))
else:
dut._log.info('I0(%d) I1(%d) S0(%d) output(%d) Ok!'%(I0, I1, S0, dut.O))
## Instruction:
Use factory to iterate over permutations
## Code After:
import cocotb
from cocotb.triggers import Timer
from cocotb.result import TestFailure
from cocotb.regression import TestFactory
@cocotb.coroutine
def mux2_basic_test(dut, inputs=(1,0,0)):
"""Test for MUX2 options"""
yield Timer(2)
I0, I1, S0 = inputs
dut.I0 = I0
dut.I1 = I1
dut.S0 = S0
if S0:
expected = I1
else:
expected = I0
yield Timer(2)
if dut.O != expected:
raise TestFailure(
'Result is incorrect for I0(%d) I1(%d) S0(%d): %s(O) != %s (expected)' % (I0, I1, S0, dut.O, expected))
else:
dut._log.info('I0(%d) I1(%d) S0(%d) output(%d) Ok!'%(I0, I1, S0, dut.O))
factory = TestFactory(mux2_basic_test)
input_permutations = [(x, y, z) for x in [0,1] for y in [0,1] for z in [0,1]]
factory.add_option("inputs", input_permutations)
factory.generate_tests()
|
import cocotb
from cocotb.triggers import Timer
from cocotb.result import TestFailure
- #from adder_model import adder_model
- #import random
+ from cocotb.regression import TestFactory
+
+ @cocotb.coroutine
+ def mux2_basic_test(dut, inputs=(1,0,0)):
+ """Test for MUX2 options"""
+
+ yield Timer(2)
+ I0, I1, S0 = inputs
+ dut.I0 = I0
+ dut.I1 = I1
+ dut.S0 = S0
+ if S0:
+ expected = I1
+ else:
+ expected = I0
+ yield Timer(2)
+
+ if dut.O != expected:
+ raise TestFailure(
+ 'Result is incorrect for I0(%d) I1(%d) S0(%d): %s(O) != %s (expected)' % (I0, I1, S0, dut.O, expected))
+ else:
+ dut._log.info('I0(%d) I1(%d) S0(%d) output(%d) Ok!'%(I0, I1, S0, dut.O))
+ factory = TestFactory(mux2_basic_test)
- @cocotb.test()
- def mux2_test(dut):
- """Test for MUX2 options"""
- opts = [(x,y,z, x&~z | y&z) for x in [0,1] for y in [0,1] for z in [0,1]]
? ^^^^ ^^ -- ---------
+ input_permutations = [(x, y, z) for x in [0,1] for y in [0,1] for z in [0,1]]
? ^^^^^^^^^^^^^^^ ^ +
+ factory.add_option("inputs", input_permutations)
+ factory.generate_tests()
-
- yield Timer(2)
- for I0, I1, S0, _ in opts:
- dut.I0 = I0
- dut.I1 = I1
- dut.S0 = S0
- if S0:
- expected = I1
- else:
- expected = I0
- yield Timer(2)
-
- if dut.O != expected:
- raise TestFailure(
- 'Result is incorrect for I0(%d) I1(%d) S0(%d): %s(O) != %s (expected)' % (I0, I1, S0, dut.O, expected))
- else:
- dut._log.info('I0(%d) I1(%d) S0(%d) output(%d) Ok!'%(I0, I1, S0, dut.O))
|
26250bf43e659c03576a4d7e4d986b622a18bb48
|
swifpy/dictionary.py
|
swifpy/dictionary.py
|
import typing as tp
import builtins as py
from .optional import Optional, optional
K = tp.TypeVar('K')
V = tp.TypeVar('V')
class Dictionary(tp.Generic[K, V], tp.Iterable[tp.Tuple[K, V]]):
def __init__(self, entries: tp.Dict[K, V]) -> None:
self._entries: tp.Dict[K, V] = py.dict(entries)
def __getitem__(self, key: K) -> Optional[V]:
return optional(self._entries.get(key))
def __setitem__(self, key: K, value: V) -> None:
self._entries[key] = value
def for_each(self, body: tp.Callable[[K, V], None]) -> None:
for key, value in self._entries.items():
body(key, value)
@property
def count(self) -> int:
return len(self._entries)
def remove_all(self) -> None:
self._entries.clear()
def __iter__(self) -> tp.Iterator[tp.Tuple[K, V]]:
return self._entries.items().__iter__()
|
import typing as tp
import builtins as py
from .optional import Optional, optional
K = tp.TypeVar('K')
V = tp.TypeVar('V')
class Dictionary(tp.Generic[K, V], tp.Iterable[tp.Tuple[K, V]]):
def __init__(self, entries: tp.Dict[K, V]) -> None:
self._entries: tp.Dict[K, V] = py.dict(entries)
def __getitem__(self, key: K) -> Optional[V]:
return optional(self._entries.get(key))
def __setitem__(self, key: K, value: V) -> None:
self._entries[key] = value
@property
def keys(self) -> tp.Iterable[K]:
return self._entries.keys()
@property
def values(self) -> tp.Iterable[V]:
return self._entries.values()
def for_each(self, body: tp.Callable[[K, V], None]) -> None:
for key, value in self._entries.items():
body(key, value)
@property
def count(self) -> int:
return len(self._entries)
def remove_all(self) -> None:
self._entries.clear()
def __iter__(self) -> tp.Iterator[tp.Tuple[K, V]]:
return self._entries.items().__iter__()
|
Implement `keys` and `values` of `Dictionary`
|
Implement `keys` and `values` of `Dictionary`
|
Python
|
mit
|
koher/swifpy
|
import typing as tp
import builtins as py
from .optional import Optional, optional
K = tp.TypeVar('K')
V = tp.TypeVar('V')
class Dictionary(tp.Generic[K, V], tp.Iterable[tp.Tuple[K, V]]):
def __init__(self, entries: tp.Dict[K, V]) -> None:
self._entries: tp.Dict[K, V] = py.dict(entries)
def __getitem__(self, key: K) -> Optional[V]:
return optional(self._entries.get(key))
def __setitem__(self, key: K, value: V) -> None:
self._entries[key] = value
+ @property
+ def keys(self) -> tp.Iterable[K]:
+ return self._entries.keys()
+
+ @property
+ def values(self) -> tp.Iterable[V]:
+ return self._entries.values()
+
def for_each(self, body: tp.Callable[[K, V], None]) -> None:
for key, value in self._entries.items():
body(key, value)
@property
def count(self) -> int:
return len(self._entries)
def remove_all(self) -> None:
self._entries.clear()
def __iter__(self) -> tp.Iterator[tp.Tuple[K, V]]:
return self._entries.items().__iter__()
|
Implement `keys` and `values` of `Dictionary`
|
## Code Before:
import typing as tp
import builtins as py
from .optional import Optional, optional
K = tp.TypeVar('K')
V = tp.TypeVar('V')
class Dictionary(tp.Generic[K, V], tp.Iterable[tp.Tuple[K, V]]):
def __init__(self, entries: tp.Dict[K, V]) -> None:
self._entries: tp.Dict[K, V] = py.dict(entries)
def __getitem__(self, key: K) -> Optional[V]:
return optional(self._entries.get(key))
def __setitem__(self, key: K, value: V) -> None:
self._entries[key] = value
def for_each(self, body: tp.Callable[[K, V], None]) -> None:
for key, value in self._entries.items():
body(key, value)
@property
def count(self) -> int:
return len(self._entries)
def remove_all(self) -> None:
self._entries.clear()
def __iter__(self) -> tp.Iterator[tp.Tuple[K, V]]:
return self._entries.items().__iter__()
## Instruction:
Implement `keys` and `values` of `Dictionary`
## Code After:
import typing as tp
import builtins as py
from .optional import Optional, optional
K = tp.TypeVar('K')
V = tp.TypeVar('V')
class Dictionary(tp.Generic[K, V], tp.Iterable[tp.Tuple[K, V]]):
def __init__(self, entries: tp.Dict[K, V]) -> None:
self._entries: tp.Dict[K, V] = py.dict(entries)
def __getitem__(self, key: K) -> Optional[V]:
return optional(self._entries.get(key))
def __setitem__(self, key: K, value: V) -> None:
self._entries[key] = value
@property
def keys(self) -> tp.Iterable[K]:
return self._entries.keys()
@property
def values(self) -> tp.Iterable[V]:
return self._entries.values()
def for_each(self, body: tp.Callable[[K, V], None]) -> None:
for key, value in self._entries.items():
body(key, value)
@property
def count(self) -> int:
return len(self._entries)
def remove_all(self) -> None:
self._entries.clear()
def __iter__(self) -> tp.Iterator[tp.Tuple[K, V]]:
return self._entries.items().__iter__()
|
import typing as tp
import builtins as py
from .optional import Optional, optional
K = tp.TypeVar('K')
V = tp.TypeVar('V')
class Dictionary(tp.Generic[K, V], tp.Iterable[tp.Tuple[K, V]]):
def __init__(self, entries: tp.Dict[K, V]) -> None:
self._entries: tp.Dict[K, V] = py.dict(entries)
def __getitem__(self, key: K) -> Optional[V]:
return optional(self._entries.get(key))
def __setitem__(self, key: K, value: V) -> None:
self._entries[key] = value
+ @property
+ def keys(self) -> tp.Iterable[K]:
+ return self._entries.keys()
+
+ @property
+ def values(self) -> tp.Iterable[V]:
+ return self._entries.values()
+
def for_each(self, body: tp.Callable[[K, V], None]) -> None:
for key, value in self._entries.items():
body(key, value)
@property
def count(self) -> int:
return len(self._entries)
def remove_all(self) -> None:
self._entries.clear()
def __iter__(self) -> tp.Iterator[tp.Tuple[K, V]]:
return self._entries.items().__iter__()
|
7e7f9da097563d8fbd407268093b56c2f10464a5
|
radar/radar/tests/validation/test_reset_password_validation.py
|
radar/radar/tests/validation/test_reset_password_validation.py
|
import pytest
from radar.validation.reset_password import ResetPasswordValidation
from radar.validation.core import ValidationError
from radar.tests.validation.helpers import validation_runner
def test_valid():
obj = valid({
'token': '12345',
'username': 'hello',
'password': 'password',
})
assert obj['token'] == '12345'
assert obj['username'] == 'hello'
assert obj['password'] == 'password'
def test_token_missing():
invalid({
'username': 'hello',
'password': 'password',
})
def test_username_missing():
invalid({
'token': '12345',
'password': 'password',
})
def test_password_missing():
invalid({
'token': '12345',
'username': 'hello',
})
def invalid(obj, **kwargs):
with pytest.raises(ValidationError) as e:
valid(obj, **kwargs)
return e
def valid(obj, **kwargs):
return validation_runner(dict, ResetPasswordValidation, obj, **kwargs)
|
import pytest
from radar.validation.reset_password import ResetPasswordValidation
from radar.validation.core import ValidationError
from radar.tests.validation.helpers import validation_runner
def test_valid():
obj = valid({
'token': '12345',
'username': 'hello',
'password': '2irPtfNUURf8G',
})
assert obj['token'] == '12345'
assert obj['username'] == 'hello'
assert obj['password'] == '2irPtfNUURf8G'
def test_token_missing():
invalid({
'username': 'hello',
'password': 'password',
})
def test_username_missing():
invalid({
'token': '12345',
'password': 'password',
})
def test_password_missing():
invalid({
'token': '12345',
'username': 'hello',
})
def test_weak_password():
invalid({
'token': '12345',
'username': 'hello',
'password': 'password',
})
def invalid(obj, **kwargs):
with pytest.raises(ValidationError) as e:
valid(obj, **kwargs)
return e
def valid(obj, **kwargs):
return validation_runner(dict, ResetPasswordValidation, obj, **kwargs)
|
Use stronger password in reset password test
|
Use stronger password in reset password test
|
Python
|
agpl-3.0
|
renalreg/radar,renalreg/radar,renalreg/radar,renalreg/radar
|
import pytest
from radar.validation.reset_password import ResetPasswordValidation
from radar.validation.core import ValidationError
from radar.tests.validation.helpers import validation_runner
def test_valid():
obj = valid({
'token': '12345',
'username': 'hello',
- 'password': 'password',
+ 'password': '2irPtfNUURf8G',
})
assert obj['token'] == '12345'
assert obj['username'] == 'hello'
- assert obj['password'] == 'password'
+ assert obj['password'] == '2irPtfNUURf8G'
def test_token_missing():
invalid({
'username': 'hello',
'password': 'password',
})
def test_username_missing():
invalid({
'token': '12345',
'password': 'password',
})
def test_password_missing():
invalid({
'token': '12345',
'username': 'hello',
})
+ def test_weak_password():
+ invalid({
+ 'token': '12345',
+ 'username': 'hello',
+ 'password': 'password',
+ })
+
+
def invalid(obj, **kwargs):
with pytest.raises(ValidationError) as e:
valid(obj, **kwargs)
return e
def valid(obj, **kwargs):
return validation_runner(dict, ResetPasswordValidation, obj, **kwargs)
|
Use stronger password in reset password test
|
## Code Before:
import pytest
from radar.validation.reset_password import ResetPasswordValidation
from radar.validation.core import ValidationError
from radar.tests.validation.helpers import validation_runner
def test_valid():
obj = valid({
'token': '12345',
'username': 'hello',
'password': 'password',
})
assert obj['token'] == '12345'
assert obj['username'] == 'hello'
assert obj['password'] == 'password'
def test_token_missing():
invalid({
'username': 'hello',
'password': 'password',
})
def test_username_missing():
invalid({
'token': '12345',
'password': 'password',
})
def test_password_missing():
invalid({
'token': '12345',
'username': 'hello',
})
def invalid(obj, **kwargs):
with pytest.raises(ValidationError) as e:
valid(obj, **kwargs)
return e
def valid(obj, **kwargs):
return validation_runner(dict, ResetPasswordValidation, obj, **kwargs)
## Instruction:
Use stronger password in reset password test
## Code After:
import pytest
from radar.validation.reset_password import ResetPasswordValidation
from radar.validation.core import ValidationError
from radar.tests.validation.helpers import validation_runner
def test_valid():
obj = valid({
'token': '12345',
'username': 'hello',
'password': '2irPtfNUURf8G',
})
assert obj['token'] == '12345'
assert obj['username'] == 'hello'
assert obj['password'] == '2irPtfNUURf8G'
def test_token_missing():
invalid({
'username': 'hello',
'password': 'password',
})
def test_username_missing():
invalid({
'token': '12345',
'password': 'password',
})
def test_password_missing():
invalid({
'token': '12345',
'username': 'hello',
})
def test_weak_password():
invalid({
'token': '12345',
'username': 'hello',
'password': 'password',
})
def invalid(obj, **kwargs):
with pytest.raises(ValidationError) as e:
valid(obj, **kwargs)
return e
def valid(obj, **kwargs):
return validation_runner(dict, ResetPasswordValidation, obj, **kwargs)
|
import pytest
from radar.validation.reset_password import ResetPasswordValidation
from radar.validation.core import ValidationError
from radar.tests.validation.helpers import validation_runner
def test_valid():
obj = valid({
'token': '12345',
'username': 'hello',
- 'password': 'password',
+ 'password': '2irPtfNUURf8G',
})
assert obj['token'] == '12345'
assert obj['username'] == 'hello'
- assert obj['password'] == 'password'
? ^^^^^^ ^
+ assert obj['password'] == '2irPtfNUURf8G'
? ^^ ^^^^^^^^^^
def test_token_missing():
invalid({
'username': 'hello',
'password': 'password',
})
def test_username_missing():
invalid({
'token': '12345',
'password': 'password',
})
def test_password_missing():
invalid({
'token': '12345',
'username': 'hello',
})
+ def test_weak_password():
+ invalid({
+ 'token': '12345',
+ 'username': 'hello',
+ 'password': 'password',
+ })
+
+
def invalid(obj, **kwargs):
with pytest.raises(ValidationError) as e:
valid(obj, **kwargs)
return e
def valid(obj, **kwargs):
return validation_runner(dict, ResetPasswordValidation, obj, **kwargs)
|
f76086c1900bce156291e2180827570477342f70
|
tweepy/error.py
|
tweepy/error.py
|
from __future__ import print_function
import six
class TweepError(Exception):
"""Tweepy exception"""
def __init__(self, reason, response=None, api_code=None):
self.reason = six.text_type(reason)
self.response = response
self.api_code = api_code
Exception.__init__(self, reason)
def __str__(self):
return self.reason
def is_rate_limit_error_message(message):
"""Check if the supplied error message belongs to a rate limit error."""
return isinstance(message, list) \
and len(message) > 0 \
and 'code' in message[0] \
and message[0]['code'] == 88
class RateLimitError(TweepError):
"""Exception for Tweepy hitting the rate limit."""
# RateLimitError has the exact same properties and inner workings
# as TweepError for backwards compatibility reasons.
pass
|
from __future__ import print_function
import six
class TweepError(Exception):
"""Tweepy exception"""
def __init__(self, reason, response=None, api_code=None):
self.reason = six.text_type(reason)
self.response = response
self.api_code = api_code
super(TweepError, self).__init__(self, reason)
def __str__(self):
return self.reason
def is_rate_limit_error_message(message):
"""Check if the supplied error message belongs to a rate limit error."""
return isinstance(message, list) \
and len(message) > 0 \
and 'code' in message[0] \
and message[0]['code'] == 88
class RateLimitError(TweepError):
"""Exception for Tweepy hitting the rate limit."""
# RateLimitError has the exact same properties and inner workings
# as TweepError for backwards compatibility reasons.
pass
|
Use super in TweepError initialization
|
Use super in TweepError initialization
|
Python
|
mit
|
svven/tweepy,tweepy/tweepy
|
from __future__ import print_function
import six
class TweepError(Exception):
"""Tweepy exception"""
def __init__(self, reason, response=None, api_code=None):
self.reason = six.text_type(reason)
self.response = response
self.api_code = api_code
- Exception.__init__(self, reason)
+ super(TweepError, self).__init__(self, reason)
def __str__(self):
return self.reason
def is_rate_limit_error_message(message):
"""Check if the supplied error message belongs to a rate limit error."""
return isinstance(message, list) \
and len(message) > 0 \
and 'code' in message[0] \
and message[0]['code'] == 88
class RateLimitError(TweepError):
"""Exception for Tweepy hitting the rate limit."""
# RateLimitError has the exact same properties and inner workings
# as TweepError for backwards compatibility reasons.
pass
|
Use super in TweepError initialization
|
## Code Before:
from __future__ import print_function
import six
class TweepError(Exception):
"""Tweepy exception"""
def __init__(self, reason, response=None, api_code=None):
self.reason = six.text_type(reason)
self.response = response
self.api_code = api_code
Exception.__init__(self, reason)
def __str__(self):
return self.reason
def is_rate_limit_error_message(message):
"""Check if the supplied error message belongs to a rate limit error."""
return isinstance(message, list) \
and len(message) > 0 \
and 'code' in message[0] \
and message[0]['code'] == 88
class RateLimitError(TweepError):
"""Exception for Tweepy hitting the rate limit."""
# RateLimitError has the exact same properties and inner workings
# as TweepError for backwards compatibility reasons.
pass
## Instruction:
Use super in TweepError initialization
## Code After:
from __future__ import print_function
import six
class TweepError(Exception):
"""Tweepy exception"""
def __init__(self, reason, response=None, api_code=None):
self.reason = six.text_type(reason)
self.response = response
self.api_code = api_code
super(TweepError, self).__init__(self, reason)
def __str__(self):
return self.reason
def is_rate_limit_error_message(message):
"""Check if the supplied error message belongs to a rate limit error."""
return isinstance(message, list) \
and len(message) > 0 \
and 'code' in message[0] \
and message[0]['code'] == 88
class RateLimitError(TweepError):
"""Exception for Tweepy hitting the rate limit."""
# RateLimitError has the exact same properties and inner workings
# as TweepError for backwards compatibility reasons.
pass
|
from __future__ import print_function
import six
class TweepError(Exception):
"""Tweepy exception"""
def __init__(self, reason, response=None, api_code=None):
self.reason = six.text_type(reason)
self.response = response
self.api_code = api_code
- Exception.__init__(self, reason)
+ super(TweepError, self).__init__(self, reason)
def __str__(self):
return self.reason
def is_rate_limit_error_message(message):
"""Check if the supplied error message belongs to a rate limit error."""
return isinstance(message, list) \
and len(message) > 0 \
and 'code' in message[0] \
and message[0]['code'] == 88
class RateLimitError(TweepError):
"""Exception for Tweepy hitting the rate limit."""
# RateLimitError has the exact same properties and inner workings
# as TweepError for backwards compatibility reasons.
pass
|
29b7a69a39ac66ebd8f61c6c9c65e7e60b40b4a0
|
numpy/_array_api/_types.py
|
numpy/_array_api/_types.py
|
__all__ = ['Array', 'Device', 'Dtype', 'SupportsDLPack',
'SupportsBufferProtocol', 'PyCapsule']
from typing import Any, Sequence, Type, Union
from . import (Array, int8, int16, int32, int64, uint8, uint16, uint32,
uint64, float32, float64)
Array = ndarray
Device = TypeVar('device')
Dtype = Literal[int8, int16, int32, int64, uint8, uint16,
uint32, uint64, float32, float64]
SupportsDLPack = TypeVar('SupportsDLPack')
SupportsBufferProtocol = TypeVar('SupportsBufferProtocol')
PyCapsule = TypeVar('PyCapsule')
|
__all__ = ['Array', 'Device', 'Dtype', 'SupportsDLPack',
'SupportsBufferProtocol', 'PyCapsule']
from typing import Any, Sequence, Type, Union
from . import (Array, int8, int16, int32, int64, uint8, uint16, uint32,
uint64, float32, float64)
# This should really be recursive, but that isn't supported yet. See the
# similar comment in numpy/typing/_array_like.py
NestedSequence = Sequence[Sequence[Any]]
Device = Any
Dtype = Type[Union[[int8, int16, int32, int64, uint8, uint16,
uint32, uint64, float32, float64]]]
SupportsDLPack = Any
SupportsBufferProtocol = Any
PyCapsule = Any
|
Use better type definitions for the array API custom types
|
Use better type definitions for the array API custom types
|
Python
|
bsd-3-clause
|
anntzer/numpy,simongibbons/numpy,jakirkham/numpy,rgommers/numpy,pdebuyl/numpy,endolith/numpy,simongibbons/numpy,mhvk/numpy,pdebuyl/numpy,charris/numpy,rgommers/numpy,jakirkham/numpy,mattip/numpy,mhvk/numpy,mattip/numpy,rgommers/numpy,charris/numpy,numpy/numpy,simongibbons/numpy,endolith/numpy,anntzer/numpy,anntzer/numpy,pdebuyl/numpy,mhvk/numpy,numpy/numpy,jakirkham/numpy,anntzer/numpy,simongibbons/numpy,seberg/numpy,seberg/numpy,simongibbons/numpy,jakirkham/numpy,seberg/numpy,numpy/numpy,mattip/numpy,pdebuyl/numpy,charris/numpy,mattip/numpy,mhvk/numpy,rgommers/numpy,numpy/numpy,seberg/numpy,charris/numpy,jakirkham/numpy,endolith/numpy,mhvk/numpy,endolith/numpy
|
__all__ = ['Array', 'Device', 'Dtype', 'SupportsDLPack',
'SupportsBufferProtocol', 'PyCapsule']
from typing import Any, Sequence, Type, Union
from . import (Array, int8, int16, int32, int64, uint8, uint16, uint32,
uint64, float32, float64)
+ # This should really be recursive, but that isn't supported yet. See the
+ # similar comment in numpy/typing/_array_like.py
+ NestedSequence = Sequence[Sequence[Any]]
- Array = ndarray
- Device = TypeVar('device')
- Dtype = Literal[int8, int16, int32, int64, uint8, uint16,
- uint32, uint64, float32, float64]
- SupportsDLPack = TypeVar('SupportsDLPack')
- SupportsBufferProtocol = TypeVar('SupportsBufferProtocol')
- PyCapsule = TypeVar('PyCapsule')
+ Device = Any
+ Dtype = Type[Union[[int8, int16, int32, int64, uint8, uint16,
+ uint32, uint64, float32, float64]]]
+ SupportsDLPack = Any
+ SupportsBufferProtocol = Any
+ PyCapsule = Any
+
|
Use better type definitions for the array API custom types
|
## Code Before:
__all__ = ['Array', 'Device', 'Dtype', 'SupportsDLPack',
'SupportsBufferProtocol', 'PyCapsule']
from typing import Any, Sequence, Type, Union
from . import (Array, int8, int16, int32, int64, uint8, uint16, uint32,
uint64, float32, float64)
Array = ndarray
Device = TypeVar('device')
Dtype = Literal[int8, int16, int32, int64, uint8, uint16,
uint32, uint64, float32, float64]
SupportsDLPack = TypeVar('SupportsDLPack')
SupportsBufferProtocol = TypeVar('SupportsBufferProtocol')
PyCapsule = TypeVar('PyCapsule')
## Instruction:
Use better type definitions for the array API custom types
## Code After:
__all__ = ['Array', 'Device', 'Dtype', 'SupportsDLPack',
'SupportsBufferProtocol', 'PyCapsule']
from typing import Any, Sequence, Type, Union
from . import (Array, int8, int16, int32, int64, uint8, uint16, uint32,
uint64, float32, float64)
# This should really be recursive, but that isn't supported yet. See the
# similar comment in numpy/typing/_array_like.py
NestedSequence = Sequence[Sequence[Any]]
Device = Any
Dtype = Type[Union[[int8, int16, int32, int64, uint8, uint16,
uint32, uint64, float32, float64]]]
SupportsDLPack = Any
SupportsBufferProtocol = Any
PyCapsule = Any
|
__all__ = ['Array', 'Device', 'Dtype', 'SupportsDLPack',
'SupportsBufferProtocol', 'PyCapsule']
from typing import Any, Sequence, Type, Union
from . import (Array, int8, int16, int32, int64, uint8, uint16, uint32,
uint64, float32, float64)
- Array = ndarray
- Device = TypeVar('device')
+ # This should really be recursive, but that isn't supported yet. See the
+ # similar comment in numpy/typing/_array_like.py
+ NestedSequence = Sequence[Sequence[Any]]
+
+ Device = Any
- Dtype = Literal[int8, int16, int32, int64, uint8, uint16,
? ^ ^^^^^
+ Dtype = Type[Union[[int8, int16, int32, int64, uint8, uint16,
? ^^^^^^^ ^^^
- uint32, uint64, float32, float64]
+ uint32, uint64, float32, float64]]]
? +++ ++
- SupportsDLPack = TypeVar('SupportsDLPack')
- SupportsBufferProtocol = TypeVar('SupportsBufferProtocol')
- PyCapsule = TypeVar('PyCapsule')
+ SupportsDLPack = Any
+ SupportsBufferProtocol = Any
+ PyCapsule = Any
|
e507abe78dee3ae4a4261d8bde645f3df7d8b842
|
tests/atest/run_tests.py
|
tests/atest/run_tests.py
|
import sys
import os
from robot import run_cli
run_cli(sys.argv[1:] + [os.path.dirname(__file__)])
|
import sys
import os
from pathlib import Path
from robot import run_cli
if __name__ == '__main__':
curdir = Path(__file__).parent
srcdir = curdir / '..' / '..' / 'src'
run_cli(sys.argv[1:] + ['-P', srcdir.resolve(), curdir])
|
Fix test runner for acceptance tests
|
Fix test runner for acceptance tests
|
Python
|
mit
|
Eficode/robotframework-imagehorizonlibrary
|
import sys
import os
+ from pathlib import Path
+
from robot import run_cli
- run_cli(sys.argv[1:] + [os.path.dirname(__file__)])
+ if __name__ == '__main__':
+ curdir = Path(__file__).parent
+ srcdir = curdir / '..' / '..' / 'src'
+ run_cli(sys.argv[1:] + ['-P', srcdir.resolve(), curdir])
+
|
Fix test runner for acceptance tests
|
## Code Before:
import sys
import os
from robot import run_cli
run_cli(sys.argv[1:] + [os.path.dirname(__file__)])
## Instruction:
Fix test runner for acceptance tests
## Code After:
import sys
import os
from pathlib import Path
from robot import run_cli
if __name__ == '__main__':
curdir = Path(__file__).parent
srcdir = curdir / '..' / '..' / 'src'
run_cli(sys.argv[1:] + ['-P', srcdir.resolve(), curdir])
|
import sys
import os
+ from pathlib import Path
+
from robot import run_cli
- run_cli(sys.argv[1:] + [os.path.dirname(__file__)])
+
+ if __name__ == '__main__':
+ curdir = Path(__file__).parent
+ srcdir = curdir / '..' / '..' / 'src'
+ run_cli(sys.argv[1:] + ['-P', srcdir.resolve(), curdir])
|
0f4208dd6088a6a96a0145045b11cf2d152db30d
|
src/samples/pillow.py
|
src/samples/pillow.py
|
from libavg import app, avg
from PIL import Image
# Demonstrates interoperability with pillow (https://pillow.readthedocs.org/index.html)
class MyMainDiv(app.MainDiv):
def onInit(self):
self.toggleTouchVisualization()
srcbmp = avg.Bitmap("rgb24-64x64.png")
pixels = srcbmp.getPixels(False)
image = Image.frombytes("RGBA", (64,64), pixels)
# Need to swap red and blue.
b,g,r,a = image.split()
image = Image.merge("RGBA", (r,g,b,a))
image.save("foo.jpg")
destbmp = avg.Bitmap((64,64), avg.B8G8R8A8, "")
destbmp.setPixels(image.tobytes())
node = avg.ImageNode(parent=self)
node.setBitmap(destbmp)
def onExit(self):
pass
def onFrame(self):
pass
app.App().run(MyMainDiv())
|
from libavg import app, avg
from PIL import Image
# Demonstrates interoperability with pillow (https://pillow.readthedocs.io)
class MyMainDiv(app.MainDiv):
def onInit(self):
self.toggleTouchVisualization()
srcbmp = avg.Bitmap("rgb24-64x64.png")
pixels = srcbmp.getPixels(False)
image = Image.frombytes("RGBA", (64,64), pixels)
# Need to swap red and blue.
b,g,r,a = image.split()
image = Image.merge("RGBA", (r,g,b,a))
image.save("foo.png")
destbmp = avg.Bitmap((64,64), avg.B8G8R8A8, "")
destbmp.setPixels(image.tobytes())
node = avg.ImageNode(parent=self)
node.setBitmap(destbmp)
def onExit(self):
pass
def onFrame(self):
pass
app.App().run(MyMainDiv())
|
Update link in the comment and change saved image format to .png
|
Update link in the comment and change saved image format to .png
|
Python
|
lgpl-2.1
|
libavg/libavg,libavg/libavg,libavg/libavg,libavg/libavg
|
from libavg import app, avg
from PIL import Image
- # Demonstrates interoperability with pillow (https://pillow.readthedocs.org/index.html)
+ # Demonstrates interoperability with pillow (https://pillow.readthedocs.io)
class MyMainDiv(app.MainDiv):
def onInit(self):
self.toggleTouchVisualization()
srcbmp = avg.Bitmap("rgb24-64x64.png")
pixels = srcbmp.getPixels(False)
image = Image.frombytes("RGBA", (64,64), pixels)
# Need to swap red and blue.
b,g,r,a = image.split()
image = Image.merge("RGBA", (r,g,b,a))
- image.save("foo.jpg")
+ image.save("foo.png")
destbmp = avg.Bitmap((64,64), avg.B8G8R8A8, "")
destbmp.setPixels(image.tobytes())
node = avg.ImageNode(parent=self)
node.setBitmap(destbmp)
def onExit(self):
pass
def onFrame(self):
pass
app.App().run(MyMainDiv())
|
Update link in the comment and change saved image format to .png
|
## Code Before:
from libavg import app, avg
from PIL import Image
# Demonstrates interoperability with pillow (https://pillow.readthedocs.org/index.html)
class MyMainDiv(app.MainDiv):
def onInit(self):
self.toggleTouchVisualization()
srcbmp = avg.Bitmap("rgb24-64x64.png")
pixels = srcbmp.getPixels(False)
image = Image.frombytes("RGBA", (64,64), pixels)
# Need to swap red and blue.
b,g,r,a = image.split()
image = Image.merge("RGBA", (r,g,b,a))
image.save("foo.jpg")
destbmp = avg.Bitmap((64,64), avg.B8G8R8A8, "")
destbmp.setPixels(image.tobytes())
node = avg.ImageNode(parent=self)
node.setBitmap(destbmp)
def onExit(self):
pass
def onFrame(self):
pass
app.App().run(MyMainDiv())
## Instruction:
Update link in the comment and change saved image format to .png
## Code After:
from libavg import app, avg
from PIL import Image
# Demonstrates interoperability with pillow (https://pillow.readthedocs.io)
class MyMainDiv(app.MainDiv):
def onInit(self):
self.toggleTouchVisualization()
srcbmp = avg.Bitmap("rgb24-64x64.png")
pixels = srcbmp.getPixels(False)
image = Image.frombytes("RGBA", (64,64), pixels)
# Need to swap red and blue.
b,g,r,a = image.split()
image = Image.merge("RGBA", (r,g,b,a))
image.save("foo.png")
destbmp = avg.Bitmap((64,64), avg.B8G8R8A8, "")
destbmp.setPixels(image.tobytes())
node = avg.ImageNode(parent=self)
node.setBitmap(destbmp)
def onExit(self):
pass
def onFrame(self):
pass
app.App().run(MyMainDiv())
|
from libavg import app, avg
from PIL import Image
- # Demonstrates interoperability with pillow (https://pillow.readthedocs.org/index.html)
? -------------
+ # Demonstrates interoperability with pillow (https://pillow.readthedocs.io)
? +
class MyMainDiv(app.MainDiv):
def onInit(self):
self.toggleTouchVisualization()
srcbmp = avg.Bitmap("rgb24-64x64.png")
pixels = srcbmp.getPixels(False)
image = Image.frombytes("RGBA", (64,64), pixels)
# Need to swap red and blue.
b,g,r,a = image.split()
image = Image.merge("RGBA", (r,g,b,a))
- image.save("foo.jpg")
? -
+ image.save("foo.png")
? +
destbmp = avg.Bitmap((64,64), avg.B8G8R8A8, "")
destbmp.setPixels(image.tobytes())
node = avg.ImageNode(parent=self)
node.setBitmap(destbmp)
def onExit(self):
pass
def onFrame(self):
pass
app.App().run(MyMainDiv())
|
7b26b893d642d829c55126452fcbebca8cfff806
|
test_settings.py
|
test_settings.py
|
DATABASES = {
'default' : {
'ENGINE': 'django.db.backends.sqlite3'
}
}
INSTALLED_APPS = (
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.auth',
'django.contrib.sites',
'django_extensions',
'newsletter',
)
ROOT_URLCONF = 'test_urls'
SITE_ID = 1
TEMPLATE_DIRS = ('test_templates', )
|
DATABASES = {
'default' : {
'ENGINE': 'django.db.backends.sqlite3'
}
}
INSTALLED_APPS = (
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.auth',
'django.contrib.sites',
'django_extensions',
'newsletter',
)
ROOT_URLCONF = 'test_urls'
SITE_ID = 1
TEMPLATE_DIRS = ('test_templates', )
# Enable time-zone support for Django 1.4 (ignored in older versions)
USE_TZ = True
|
Enable timezone support in tests.
|
Enable timezone support in tests.
|
Python
|
agpl-3.0
|
ctxis/django-newsletter,ctxis/django-newsletter,dsanders11/django-newsletter,dsanders11/django-newsletter,dsanders11/django-newsletter,viaregio/django-newsletter,viaregio/django-newsletter,ctxis/django-newsletter
|
DATABASES = {
'default' : {
'ENGINE': 'django.db.backends.sqlite3'
}
}
INSTALLED_APPS = (
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.auth',
'django.contrib.sites',
'django_extensions',
'newsletter',
)
ROOT_URLCONF = 'test_urls'
SITE_ID = 1
TEMPLATE_DIRS = ('test_templates', )
+ # Enable time-zone support for Django 1.4 (ignored in older versions)
+ USE_TZ = True
+
|
Enable timezone support in tests.
|
## Code Before:
DATABASES = {
'default' : {
'ENGINE': 'django.db.backends.sqlite3'
}
}
INSTALLED_APPS = (
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.auth',
'django.contrib.sites',
'django_extensions',
'newsletter',
)
ROOT_URLCONF = 'test_urls'
SITE_ID = 1
TEMPLATE_DIRS = ('test_templates', )
## Instruction:
Enable timezone support in tests.
## Code After:
DATABASES = {
'default' : {
'ENGINE': 'django.db.backends.sqlite3'
}
}
INSTALLED_APPS = (
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.auth',
'django.contrib.sites',
'django_extensions',
'newsletter',
)
ROOT_URLCONF = 'test_urls'
SITE_ID = 1
TEMPLATE_DIRS = ('test_templates', )
# Enable time-zone support for Django 1.4 (ignored in older versions)
USE_TZ = True
|
DATABASES = {
'default' : {
'ENGINE': 'django.db.backends.sqlite3'
}
}
INSTALLED_APPS = (
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.auth',
'django.contrib.sites',
'django_extensions',
'newsletter',
)
ROOT_URLCONF = 'test_urls'
SITE_ID = 1
TEMPLATE_DIRS = ('test_templates', )
+
+ # Enable time-zone support for Django 1.4 (ignored in older versions)
+ USE_TZ = True
|
8359d60480371a8f63bdd4ea1b7cf03f231c1350
|
djangopress/settings_tinymce.py
|
djangopress/settings_tinymce.py
|
TINYMCE_DEFAULT_CONFIG = {
'relative_urls': False,
'plugins': "table code image link colorpicker textcolor wordcount",
'tools': "inserttable",
'toolbar': "undo redo | styleselect | bold italic underline strikethrough | alignleft aligncenter alignright alignjustify | bullist numlist outdent indent | link image | forecolor backcolor",
'extended_valid_elements': 'script[language|type|src],events[template|start],#gallery[class|id|show_description|show_title|count|slider]'
}
TINYMCE_JS_URL = "/static/js/tinymce/tinymce.min.js"
|
TINYMCE_DEFAULT_CONFIG = {
'relative_urls': False,
'plugins': "table code image link colorpicker textcolor wordcount",
'tools': "inserttable",
'toolbar': "undo redo | styleselect | bold italic underline strikethrough | alignleft aligncenter alignright alignjustify | bullist numlist outdent indent | link image | forecolor backcolor",
'extended_valid_elements': 'script[language|type|src],events[template|start],#gallery[class|id|show_description|show_title|count|slider],#show_blog_latest[class|id|words|images|blog|count]'
}
TINYMCE_JS_URL = "/static/js/tinymce/tinymce.min.js"
|
Update settings for tinymce to allow show_blog_latest tag
|
Update settings for tinymce to allow show_blog_latest tag
|
Python
|
mit
|
codefisher/djangopress,codefisher/djangopress,codefisher/djangopress,codefisher/djangopress
|
TINYMCE_DEFAULT_CONFIG = {
'relative_urls': False,
'plugins': "table code image link colorpicker textcolor wordcount",
'tools': "inserttable",
'toolbar': "undo redo | styleselect | bold italic underline strikethrough | alignleft aligncenter alignright alignjustify | bullist numlist outdent indent | link image | forecolor backcolor",
- 'extended_valid_elements': 'script[language|type|src],events[template|start],#gallery[class|id|show_description|show_title|count|slider]'
+ 'extended_valid_elements': 'script[language|type|src],events[template|start],#gallery[class|id|show_description|show_title|count|slider],#show_blog_latest[class|id|words|images|blog|count]'
}
TINYMCE_JS_URL = "/static/js/tinymce/tinymce.min.js"
|
Update settings for tinymce to allow show_blog_latest tag
|
## Code Before:
TINYMCE_DEFAULT_CONFIG = {
'relative_urls': False,
'plugins': "table code image link colorpicker textcolor wordcount",
'tools': "inserttable",
'toolbar': "undo redo | styleselect | bold italic underline strikethrough | alignleft aligncenter alignright alignjustify | bullist numlist outdent indent | link image | forecolor backcolor",
'extended_valid_elements': 'script[language|type|src],events[template|start],#gallery[class|id|show_description|show_title|count|slider]'
}
TINYMCE_JS_URL = "/static/js/tinymce/tinymce.min.js"
## Instruction:
Update settings for tinymce to allow show_blog_latest tag
## Code After:
TINYMCE_DEFAULT_CONFIG = {
'relative_urls': False,
'plugins': "table code image link colorpicker textcolor wordcount",
'tools': "inserttable",
'toolbar': "undo redo | styleselect | bold italic underline strikethrough | alignleft aligncenter alignright alignjustify | bullist numlist outdent indent | link image | forecolor backcolor",
'extended_valid_elements': 'script[language|type|src],events[template|start],#gallery[class|id|show_description|show_title|count|slider],#show_blog_latest[class|id|words|images|blog|count]'
}
TINYMCE_JS_URL = "/static/js/tinymce/tinymce.min.js"
|
TINYMCE_DEFAULT_CONFIG = {
'relative_urls': False,
'plugins': "table code image link colorpicker textcolor wordcount",
'tools': "inserttable",
'toolbar': "undo redo | styleselect | bold italic underline strikethrough | alignleft aligncenter alignright alignjustify | bullist numlist outdent indent | link image | forecolor backcolor",
- 'extended_valid_elements': 'script[language|type|src],events[template|start],#gallery[class|id|show_description|show_title|count|slider]'
+ 'extended_valid_elements': 'script[language|type|src],events[template|start],#gallery[class|id|show_description|show_title|count|slider],#show_blog_latest[class|id|words|images|blog|count]'
? ++++++++++++++++++++++++++++++++++++++++++++++++++++
}
TINYMCE_JS_URL = "/static/js/tinymce/tinymce.min.js"
|
ff63299cde0fe34fe3bfdac16593e1a0a989bec4
|
Hydv2/ScreenTools.py
|
Hydv2/ScreenTools.py
|
__author__ = 'Olivier Larrieu'
from gtk import gdk
class ScreenProperties(object):
"""
Usefull to get basic screen informations
"""
@classmethod
def screen_dimension(cls):
"""
Return a dic with the screen height and screen width
"""
width = gdk.screen_width()
height = gdk.screen_height()
return {'width': width, 'height': height}
|
__author__ = 'Olivier Larrieu'
class ScreenProperties(object):
"""
Usefull to get basic screen informations
"""
@classmethod
def screen_dimension(cls):
"""
Return a dic with the screen height and screen width
"""
from Xlib import display
display = display.Display()
root = display.screen().root
desktop = root.get_geometry()
return {'width': desktop.width, 'height': desktop.height}
|
Use Xlib instead of gtk to get screen width and screen height This limit dependances
|
Use Xlib instead of gtk to get screen width and screen height
This limit dependances
|
Python
|
artistic-2.0
|
OlivierLarrieu/HYDV2_EFL,OlivierLarrieu/HYDV2_EFL,OlivierLarrieu/HYDV2_EFL,OlivierLarrieu/HYDV2_EFL
|
__author__ = 'Olivier Larrieu'
-
- from gtk import gdk
class ScreenProperties(object):
"""
Usefull to get basic screen informations
"""
@classmethod
def screen_dimension(cls):
"""
Return a dic with the screen height and screen width
"""
- width = gdk.screen_width()
- height = gdk.screen_height()
+ from Xlib import display
+ display = display.Display()
+ root = display.screen().root
+ desktop = root.get_geometry()
+
- return {'width': width, 'height': height}
+ return {'width': desktop.width, 'height': desktop.height}
|
Use Xlib instead of gtk to get screen width and screen height This limit dependances
|
## Code Before:
__author__ = 'Olivier Larrieu'
from gtk import gdk
class ScreenProperties(object):
"""
Usefull to get basic screen informations
"""
@classmethod
def screen_dimension(cls):
"""
Return a dic with the screen height and screen width
"""
width = gdk.screen_width()
height = gdk.screen_height()
return {'width': width, 'height': height}
## Instruction:
Use Xlib instead of gtk to get screen width and screen height This limit dependances
## Code After:
__author__ = 'Olivier Larrieu'
class ScreenProperties(object):
"""
Usefull to get basic screen informations
"""
@classmethod
def screen_dimension(cls):
"""
Return a dic with the screen height and screen width
"""
from Xlib import display
display = display.Display()
root = display.screen().root
desktop = root.get_geometry()
return {'width': desktop.width, 'height': desktop.height}
|
__author__ = 'Olivier Larrieu'
-
- from gtk import gdk
class ScreenProperties(object):
"""
Usefull to get basic screen informations
"""
@classmethod
def screen_dimension(cls):
"""
Return a dic with the screen height and screen width
"""
- width = gdk.screen_width()
- height = gdk.screen_height()
+ from Xlib import display
+ display = display.Display()
+ root = display.screen().root
+ desktop = root.get_geometry()
+
- return {'width': width, 'height': height}
+ return {'width': desktop.width, 'height': desktop.height}
? ++++++++ ++++++++
|
0c913d4bf94637da916b609b1b1d0d34b03776b7
|
tests/test_logger.py
|
tests/test_logger.py
|
import pytest
from mugloar import dragon, logger
@pytest.fixture
def log_instance():
"""Returns a Logger instance"""
return logger.Logger()
@pytest.fixture
def knight():
return {'agility': 8, 'endurance': 8, 'armor': 0, 'attack': 4}
@pytest.fixture
def dragon_instance():
return dragon.Dragon()
@pytest.fixture
def stats_map():
return {'attack': 'scaleThickness',
'armor': 'clawSharpness',
'agility': 'wingStrength',
'endurance': 'fireBreath'}
def test_comparison(log_instance, knight, dragon_instance, stats_map):
log_instance.comparison(knight, dragon_instance, stats_map)
|
import pytest
from mugloar import dragon, logger
@pytest.fixture
def log_instance():
"""Returns a Logger instance"""
return logger.Logger()
@pytest.fixture
def knight():
return [('endurance', 8), ('attack', 8), ('armor', 0), ('agility', 4)]
@pytest.fixture
def dragon_instance():
return dragon.Dragon()
@pytest.fixture
def stats_map():
return {'attack': 'scaleThickness',
'armor': 'clawSharpness',
'agility': 'wingStrength',
'endurance': 'fireBreath'}
def test_comparison(log_instance, knight, dragon_instance, stats_map):
dragon_instance.set_relative_stats((5, 5, 5, 5), knight)
log_instance.comparison(knight, dragon_instance, stats_map)
|
Implement rudimentary unit tests for logger class
|
Implement rudimentary unit tests for logger class
|
Python
|
mit
|
reinikai/mugloar
|
import pytest
from mugloar import dragon, logger
@pytest.fixture
def log_instance():
"""Returns a Logger instance"""
return logger.Logger()
@pytest.fixture
def knight():
- return {'agility': 8, 'endurance': 8, 'armor': 0, 'attack': 4}
+ return [('endurance', 8), ('attack', 8), ('armor', 0), ('agility', 4)]
@pytest.fixture
def dragon_instance():
return dragon.Dragon()
@pytest.fixture
def stats_map():
return {'attack': 'scaleThickness',
'armor': 'clawSharpness',
'agility': 'wingStrength',
'endurance': 'fireBreath'}
def test_comparison(log_instance, knight, dragon_instance, stats_map):
+ dragon_instance.set_relative_stats((5, 5, 5, 5), knight)
log_instance.comparison(knight, dragon_instance, stats_map)
|
Implement rudimentary unit tests for logger class
|
## Code Before:
import pytest
from mugloar import dragon, logger
@pytest.fixture
def log_instance():
"""Returns a Logger instance"""
return logger.Logger()
@pytest.fixture
def knight():
return {'agility': 8, 'endurance': 8, 'armor': 0, 'attack': 4}
@pytest.fixture
def dragon_instance():
return dragon.Dragon()
@pytest.fixture
def stats_map():
return {'attack': 'scaleThickness',
'armor': 'clawSharpness',
'agility': 'wingStrength',
'endurance': 'fireBreath'}
def test_comparison(log_instance, knight, dragon_instance, stats_map):
log_instance.comparison(knight, dragon_instance, stats_map)
## Instruction:
Implement rudimentary unit tests for logger class
## Code After:
import pytest
from mugloar import dragon, logger
@pytest.fixture
def log_instance():
"""Returns a Logger instance"""
return logger.Logger()
@pytest.fixture
def knight():
return [('endurance', 8), ('attack', 8), ('armor', 0), ('agility', 4)]
@pytest.fixture
def dragon_instance():
return dragon.Dragon()
@pytest.fixture
def stats_map():
return {'attack': 'scaleThickness',
'armor': 'clawSharpness',
'agility': 'wingStrength',
'endurance': 'fireBreath'}
def test_comparison(log_instance, knight, dragon_instance, stats_map):
dragon_instance.set_relative_stats((5, 5, 5, 5), knight)
log_instance.comparison(knight, dragon_instance, stats_map)
|
import pytest
from mugloar import dragon, logger
@pytest.fixture
def log_instance():
"""Returns a Logger instance"""
return logger.Logger()
@pytest.fixture
def knight():
- return {'agility': 8, 'endurance': 8, 'armor': 0, 'attack': 4}
+ return [('endurance', 8), ('attack', 8), ('armor', 0), ('agility', 4)]
@pytest.fixture
def dragon_instance():
return dragon.Dragon()
@pytest.fixture
def stats_map():
return {'attack': 'scaleThickness',
'armor': 'clawSharpness',
'agility': 'wingStrength',
'endurance': 'fireBreath'}
def test_comparison(log_instance, knight, dragon_instance, stats_map):
+ dragon_instance.set_relative_stats((5, 5, 5, 5), knight)
log_instance.comparison(knight, dragon_instance, stats_map)
|
272ece1774cebaf8d6d6ae9e0dfb5fe0cce97083
|
manage.py
|
manage.py
|
import os
import sys
if __name__ == '__main__':
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'conductor.settings.development')
if 'test' in sys.argv:
# For now, fake setting the environment for testing.
os.environ['DJANGO_SETTINGS_MODULE'] = 'conductor.settings.test'
os.environ['SECRET_KEY'] = 'asecrettoeverybody'
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
import os
import sys
if __name__ == '__main__':
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'conductor.settings.development')
if 'test' in sys.argv:
# For now, fake setting the environment for testing.
os.environ['DJANGO_SETTINGS_MODULE'] = 'conductor.settings.test'
os.environ['CORS_ORIGIN_WHITELIST'] = 'localhost:4200'
os.environ['SECRET_KEY'] = 'asecrettoeverybody'
os.environ['STATIC_URL'] = '/static/'
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
Add missing env variables for testing.
|
Add missing env variables for testing.
|
Python
|
bsd-2-clause
|
mblayman/lcp,mblayman/lcp,mblayman/lcp
|
import os
import sys
if __name__ == '__main__':
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'conductor.settings.development')
if 'test' in sys.argv:
# For now, fake setting the environment for testing.
os.environ['DJANGO_SETTINGS_MODULE'] = 'conductor.settings.test'
+ os.environ['CORS_ORIGIN_WHITELIST'] = 'localhost:4200'
os.environ['SECRET_KEY'] = 'asecrettoeverybody'
+ os.environ['STATIC_URL'] = '/static/'
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
Add missing env variables for testing.
|
## Code Before:
import os
import sys
if __name__ == '__main__':
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'conductor.settings.development')
if 'test' in sys.argv:
# For now, fake setting the environment for testing.
os.environ['DJANGO_SETTINGS_MODULE'] = 'conductor.settings.test'
os.environ['SECRET_KEY'] = 'asecrettoeverybody'
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
## Instruction:
Add missing env variables for testing.
## Code After:
import os
import sys
if __name__ == '__main__':
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'conductor.settings.development')
if 'test' in sys.argv:
# For now, fake setting the environment for testing.
os.environ['DJANGO_SETTINGS_MODULE'] = 'conductor.settings.test'
os.environ['CORS_ORIGIN_WHITELIST'] = 'localhost:4200'
os.environ['SECRET_KEY'] = 'asecrettoeverybody'
os.environ['STATIC_URL'] = '/static/'
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
import os
import sys
if __name__ == '__main__':
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'conductor.settings.development')
if 'test' in sys.argv:
# For now, fake setting the environment for testing.
os.environ['DJANGO_SETTINGS_MODULE'] = 'conductor.settings.test'
+ os.environ['CORS_ORIGIN_WHITELIST'] = 'localhost:4200'
os.environ['SECRET_KEY'] = 'asecrettoeverybody'
+ os.environ['STATIC_URL'] = '/static/'
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
ee82b77f562ee1d49c2fc724a3fc58b101c0dd2b
|
src/devilry_qualifiesforexam/devilry_qualifiesforexam/urls.py
|
src/devilry_qualifiesforexam/devilry_qualifiesforexam/urls.py
|
from django.conf.urls import patterns, url, include
from django.contrib.auth.decorators import login_required
from django.views.i18n import javascript_catalog
from django.views.decorators.csrf import csrf_protect, ensure_csrf_cookie
from devilry_settings.i18n import get_javascript_catalog_packages
from .views import AppView
i18n_packages = get_javascript_catalog_packages('devilry_header', 'devilry.apps.core')
urlpatterns = patterns('devilry_qualifiesforexam',
url('^rest/', include('devilry_qualifiesforexam.rest.urls')),
url('^wizard/(?P<periodid>\d+)/$',
login_required(csrf_protect(ensure_csrf_cookie(AppView.as_view()))),
name='devilry_qualifiesforexam_ui'),
url('^i18n.js$', javascript_catalog, kwargs={'packages': i18n_packages},
name='devilry_qualifiesforexam_i18n')
)
|
from django.conf.urls import patterns, url, include
from django.contrib.auth.decorators import login_required
from django.views.i18n import javascript_catalog
from django.views.decorators.csrf import csrf_protect, ensure_csrf_cookie
from devilry_settings.i18n import get_javascript_catalog_packages
from .views import AppView
i18n_packages = get_javascript_catalog_packages('devilry_header', 'devilry.apps.core')
urlpatterns = patterns('devilry_qualifiesforexam',
url('^rest/', include('devilry_qualifiesforexam.rest.urls')),
url('^$', login_required(csrf_protect(ensure_csrf_cookie(AppView.as_view()))),
name='devilry_qualifiesforexam_ui'),
url('^i18n.js$', javascript_catalog, kwargs={'packages': i18n_packages},
name='devilry_qualifiesforexam_i18n')
)
|
Remove period id from app url.
|
devilry_qualfiesforexam: Remove period id from app url.
|
Python
|
bsd-3-clause
|
devilry/devilry-django,devilry/devilry-django,devilry/devilry-django,devilry/devilry-django
|
from django.conf.urls import patterns, url, include
from django.contrib.auth.decorators import login_required
from django.views.i18n import javascript_catalog
from django.views.decorators.csrf import csrf_protect, ensure_csrf_cookie
from devilry_settings.i18n import get_javascript_catalog_packages
from .views import AppView
i18n_packages = get_javascript_catalog_packages('devilry_header', 'devilry.apps.core')
urlpatterns = patterns('devilry_qualifiesforexam',
url('^rest/', include('devilry_qualifiesforexam.rest.urls')),
- url('^wizard/(?P<periodid>\d+)/$',
- login_required(csrf_protect(ensure_csrf_cookie(AppView.as_view()))),
+ url('^$', login_required(csrf_protect(ensure_csrf_cookie(AppView.as_view()))),
name='devilry_qualifiesforexam_ui'),
url('^i18n.js$', javascript_catalog, kwargs={'packages': i18n_packages},
name='devilry_qualifiesforexam_i18n')
)
|
Remove period id from app url.
|
## Code Before:
from django.conf.urls import patterns, url, include
from django.contrib.auth.decorators import login_required
from django.views.i18n import javascript_catalog
from django.views.decorators.csrf import csrf_protect, ensure_csrf_cookie
from devilry_settings.i18n import get_javascript_catalog_packages
from .views import AppView
i18n_packages = get_javascript_catalog_packages('devilry_header', 'devilry.apps.core')
urlpatterns = patterns('devilry_qualifiesforexam',
url('^rest/', include('devilry_qualifiesforexam.rest.urls')),
url('^wizard/(?P<periodid>\d+)/$',
login_required(csrf_protect(ensure_csrf_cookie(AppView.as_view()))),
name='devilry_qualifiesforexam_ui'),
url('^i18n.js$', javascript_catalog, kwargs={'packages': i18n_packages},
name='devilry_qualifiesforexam_i18n')
)
## Instruction:
Remove period id from app url.
## Code After:
from django.conf.urls import patterns, url, include
from django.contrib.auth.decorators import login_required
from django.views.i18n import javascript_catalog
from django.views.decorators.csrf import csrf_protect, ensure_csrf_cookie
from devilry_settings.i18n import get_javascript_catalog_packages
from .views import AppView
i18n_packages = get_javascript_catalog_packages('devilry_header', 'devilry.apps.core')
urlpatterns = patterns('devilry_qualifiesforexam',
url('^rest/', include('devilry_qualifiesforexam.rest.urls')),
url('^$', login_required(csrf_protect(ensure_csrf_cookie(AppView.as_view()))),
name='devilry_qualifiesforexam_ui'),
url('^i18n.js$', javascript_catalog, kwargs={'packages': i18n_packages},
name='devilry_qualifiesforexam_i18n')
)
|
from django.conf.urls import patterns, url, include
from django.contrib.auth.decorators import login_required
from django.views.i18n import javascript_catalog
from django.views.decorators.csrf import csrf_protect, ensure_csrf_cookie
from devilry_settings.i18n import get_javascript_catalog_packages
from .views import AppView
i18n_packages = get_javascript_catalog_packages('devilry_header', 'devilry.apps.core')
urlpatterns = patterns('devilry_qualifiesforexam',
url('^rest/', include('devilry_qualifiesforexam.rest.urls')),
- url('^wizard/(?P<periodid>\d+)/$',
- login_required(csrf_protect(ensure_csrf_cookie(AppView.as_view()))),
? ^^^
+ url('^$', login_required(csrf_protect(ensure_csrf_cookie(AppView.as_view()))),
? ^^^^^^^^^
name='devilry_qualifiesforexam_ui'),
url('^i18n.js$', javascript_catalog, kwargs={'packages': i18n_packages},
name='devilry_qualifiesforexam_i18n')
)
|
1619c955c75f91b9d61c3195704f17fc88ef9e04
|
aybu/manager/utils/pshell.py
|
aybu/manager/utils/pshell.py
|
from sqlalchemy import engine_from_config
import aybu.manager.models
def setup(env):
settings = env['request'].registry.settings
env['models'] = aybu.manager.models
env['engine'] = engine_from_config(settings, 'sqlalchemy.')
env['request'].set_db_engine = env['engine']
aybu.core.models.Base.metadata.bind = env['engine']
|
from sqlalchemy import engine_from_config
import aybu.manager.models
def setup(env):
settings = env['request'].registry.settings
env['models'] = aybu.manager.models
env['engine'] = engine_from_config(settings, 'sqlalchemy.')
env['request'].set_db_engine = env['engine']
aybu.manager.models.Base.metadata.bind = env['engine']
aybu.manager.models.Environment.initialize(settings)
env['session'] = env['request'].db_session
|
Initialize session and environment in shell
|
Initialize session and environment in shell
|
Python
|
apache-2.0
|
asidev/aybu-manager
|
from sqlalchemy import engine_from_config
import aybu.manager.models
def setup(env):
settings = env['request'].registry.settings
env['models'] = aybu.manager.models
env['engine'] = engine_from_config(settings, 'sqlalchemy.')
env['request'].set_db_engine = env['engine']
- aybu.core.models.Base.metadata.bind = env['engine']
+ aybu.manager.models.Base.metadata.bind = env['engine']
+ aybu.manager.models.Environment.initialize(settings)
+ env['session'] = env['request'].db_session
+
|
Initialize session and environment in shell
|
## Code Before:
from sqlalchemy import engine_from_config
import aybu.manager.models
def setup(env):
settings = env['request'].registry.settings
env['models'] = aybu.manager.models
env['engine'] = engine_from_config(settings, 'sqlalchemy.')
env['request'].set_db_engine = env['engine']
aybu.core.models.Base.metadata.bind = env['engine']
## Instruction:
Initialize session and environment in shell
## Code After:
from sqlalchemy import engine_from_config
import aybu.manager.models
def setup(env):
settings = env['request'].registry.settings
env['models'] = aybu.manager.models
env['engine'] = engine_from_config(settings, 'sqlalchemy.')
env['request'].set_db_engine = env['engine']
aybu.manager.models.Base.metadata.bind = env['engine']
aybu.manager.models.Environment.initialize(settings)
env['session'] = env['request'].db_session
|
from sqlalchemy import engine_from_config
import aybu.manager.models
def setup(env):
settings = env['request'].registry.settings
env['models'] = aybu.manager.models
env['engine'] = engine_from_config(settings, 'sqlalchemy.')
env['request'].set_db_engine = env['engine']
- aybu.core.models.Base.metadata.bind = env['engine']
? ^^ -
+ aybu.manager.models.Base.metadata.bind = env['engine']
? ^^^^^^
+ aybu.manager.models.Environment.initialize(settings)
+ env['session'] = env['request'].db_session
+
|
c4278b404b313c4fa5fad67a5703b7368d1c4428
|
fileapi/tests/test_qunit.py
|
fileapi/tests/test_qunit.py
|
import os
from django.conf import settings
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from django.test.utils import override_settings
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.ui import WebDriverWait
@override_settings(STATICFILES_DIRS=(os.path.join(os.path.dirname(__file__), 'static'), ))
class QunitTests(StaticLiveServerTestCase):
"""Iteractive tests with selenium."""
@classmethod
def setUpClass(cls):
cls.browser = webdriver.PhantomJS()
super().setUpClass()
@classmethod
def tearDownClass(cls):
cls.browser.quit()
super().tearDownClass()
def test_qunit(self):
"""Load the QUnit tests and check for failures."""
self.browser.get(self.live_server_url + settings.STATIC_URL + 'index.html')
results = WebDriverWait(self.browser, 5).until(
expected_conditions.visibility_of_element_located((By.ID, 'qunit-testresult')))
total = int(results.find_element_by_class_name('total').text)
failed = int(results.find_element_by_class_name('failed').text)
self.assertTrue(total and not failed, results.text)
|
import os
from django.conf import settings
from django.contrib.staticfiles import finders, storage
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from django.test.utils import override_settings
from django.utils.functional import empty
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.ui import WebDriverWait
@override_settings(STATICFILES_DIRS=(os.path.join(os.path.dirname(__file__), 'static'), ))
class QunitTests(StaticLiveServerTestCase):
"""Iteractive tests with selenium."""
@classmethod
def setUpClass(cls):
cls.browser = webdriver.PhantomJS()
super().setUpClass()
@classmethod
def tearDownClass(cls):
cls.browser.quit()
super().tearDownClass()
def setUp(self):
# Clear the cache versions of the staticfiles finders and storage
# See https://code.djangoproject.com/ticket/24197
storage.staticfiles_storage._wrapped = empty
finders.get_finder.cache_clear()
def test_qunit(self):
"""Load the QUnit tests and check for failures."""
self.browser.get(self.live_server_url + settings.STATIC_URL + 'index.html')
results = WebDriverWait(self.browser, 5).until(
expected_conditions.visibility_of_element_located((By.ID, 'qunit-testresult')))
total = int(results.find_element_by_class_name('total').text)
failed = int(results.find_element_by_class_name('failed').text)
self.assertTrue(total and not failed, results.text)
|
Clear global state/caching handled by Django so the test passes when run in the full suite.
|
Clear global state/caching handled by Django so the test passes when run in the full suite.
|
Python
|
bsd-2-clause
|
mlavin/fileapi,mlavin/fileapi,mlavin/fileapi
|
import os
from django.conf import settings
+ from django.contrib.staticfiles import finders, storage
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from django.test.utils import override_settings
+ from django.utils.functional import empty
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.ui import WebDriverWait
@override_settings(STATICFILES_DIRS=(os.path.join(os.path.dirname(__file__), 'static'), ))
class QunitTests(StaticLiveServerTestCase):
"""Iteractive tests with selenium."""
@classmethod
def setUpClass(cls):
cls.browser = webdriver.PhantomJS()
super().setUpClass()
@classmethod
def tearDownClass(cls):
cls.browser.quit()
super().tearDownClass()
+ def setUp(self):
+ # Clear the cache versions of the staticfiles finders and storage
+ # See https://code.djangoproject.com/ticket/24197
+ storage.staticfiles_storage._wrapped = empty
+ finders.get_finder.cache_clear()
+
def test_qunit(self):
"""Load the QUnit tests and check for failures."""
self.browser.get(self.live_server_url + settings.STATIC_URL + 'index.html')
results = WebDriverWait(self.browser, 5).until(
expected_conditions.visibility_of_element_located((By.ID, 'qunit-testresult')))
total = int(results.find_element_by_class_name('total').text)
failed = int(results.find_element_by_class_name('failed').text)
self.assertTrue(total and not failed, results.text)
|
Clear global state/caching handled by Django so the test passes when run in the full suite.
|
## Code Before:
import os
from django.conf import settings
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from django.test.utils import override_settings
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.ui import WebDriverWait
@override_settings(STATICFILES_DIRS=(os.path.join(os.path.dirname(__file__), 'static'), ))
class QunitTests(StaticLiveServerTestCase):
"""Iteractive tests with selenium."""
@classmethod
def setUpClass(cls):
cls.browser = webdriver.PhantomJS()
super().setUpClass()
@classmethod
def tearDownClass(cls):
cls.browser.quit()
super().tearDownClass()
def test_qunit(self):
"""Load the QUnit tests and check for failures."""
self.browser.get(self.live_server_url + settings.STATIC_URL + 'index.html')
results = WebDriverWait(self.browser, 5).until(
expected_conditions.visibility_of_element_located((By.ID, 'qunit-testresult')))
total = int(results.find_element_by_class_name('total').text)
failed = int(results.find_element_by_class_name('failed').text)
self.assertTrue(total and not failed, results.text)
## Instruction:
Clear global state/caching handled by Django so the test passes when run in the full suite.
## Code After:
import os
from django.conf import settings
from django.contrib.staticfiles import finders, storage
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from django.test.utils import override_settings
from django.utils.functional import empty
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.ui import WebDriverWait
@override_settings(STATICFILES_DIRS=(os.path.join(os.path.dirname(__file__), 'static'), ))
class QunitTests(StaticLiveServerTestCase):
"""Iteractive tests with selenium."""
@classmethod
def setUpClass(cls):
cls.browser = webdriver.PhantomJS()
super().setUpClass()
@classmethod
def tearDownClass(cls):
cls.browser.quit()
super().tearDownClass()
def setUp(self):
# Clear the cache versions of the staticfiles finders and storage
# See https://code.djangoproject.com/ticket/24197
storage.staticfiles_storage._wrapped = empty
finders.get_finder.cache_clear()
def test_qunit(self):
"""Load the QUnit tests and check for failures."""
self.browser.get(self.live_server_url + settings.STATIC_URL + 'index.html')
results = WebDriverWait(self.browser, 5).until(
expected_conditions.visibility_of_element_located((By.ID, 'qunit-testresult')))
total = int(results.find_element_by_class_name('total').text)
failed = int(results.find_element_by_class_name('failed').text)
self.assertTrue(total and not failed, results.text)
|
import os
from django.conf import settings
+ from django.contrib.staticfiles import finders, storage
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from django.test.utils import override_settings
+ from django.utils.functional import empty
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.ui import WebDriverWait
@override_settings(STATICFILES_DIRS=(os.path.join(os.path.dirname(__file__), 'static'), ))
class QunitTests(StaticLiveServerTestCase):
"""Iteractive tests with selenium."""
@classmethod
def setUpClass(cls):
cls.browser = webdriver.PhantomJS()
super().setUpClass()
@classmethod
def tearDownClass(cls):
cls.browser.quit()
super().tearDownClass()
+ def setUp(self):
+ # Clear the cache versions of the staticfiles finders and storage
+ # See https://code.djangoproject.com/ticket/24197
+ storage.staticfiles_storage._wrapped = empty
+ finders.get_finder.cache_clear()
+
def test_qunit(self):
"""Load the QUnit tests and check for failures."""
self.browser.get(self.live_server_url + settings.STATIC_URL + 'index.html')
results = WebDriverWait(self.browser, 5).until(
expected_conditions.visibility_of_element_located((By.ID, 'qunit-testresult')))
total = int(results.find_element_by_class_name('total').text)
failed = int(results.find_element_by_class_name('failed').text)
self.assertTrue(total and not failed, results.text)
|
71cf0ce2348b46841f2f37c2c8934726832e5094
|
takeyourmeds/groups/groups_admin/tests.py
|
takeyourmeds/groups/groups_admin/tests.py
|
from takeyourmeds.utils.test import SuperuserTestCase
class SmokeTest(SuperuserTestCase):
def test_index(self):
self.assertGET(200, 'groups:admin:index', login=True)
def test_view(self):
self.assertGET(
200,
'groups:admin:view',
self.user.profile.group_id,
login=True,
)
def test_create_access_tokens(self):
group = self.user.profile.group
self.assertEqual(group.access_tokens.count(), 0)
self.assertPOST(
302,
{'num_tokens': 10},
'groups:admin:create-access-tokens',
group.pk,
login=True,
)
self.assertEqual(group.access_tokens.count(), 10)
|
from takeyourmeds.utils.test import SuperuserTestCase
class SmokeTest(SuperuserTestCase):
def test_index(self):
self.assertGET(200, 'groups:admin:index', login=True)
def test_view(self):
self.assertGET(
200,
'groups:admin:view',
self.user.profile.group_id,
login=True,
)
def test_access_tokens_csv(self):
self.assertGET(
200,
'groups:admin:access-tokens-csv',
self.user.profile.group_id,
login=True,
)
def test_create_access_tokens(self):
group = self.user.profile.group
self.assertEqual(group.access_tokens.count(), 0)
self.assertPOST(
302,
{'num_tokens': 10},
'groups:admin:create-access-tokens',
group.pk,
login=True,
)
self.assertEqual(group.access_tokens.count(), 10)
|
Test access token csv download
|
Test access token csv download
Signed-off-by: Chris Lamb <[email protected]>
|
Python
|
mit
|
takeyourmeds/takeyourmeds-web,takeyourmeds/takeyourmeds-web,takeyourmeds/takeyourmeds-web,takeyourmeds/takeyourmeds-web
|
from takeyourmeds.utils.test import SuperuserTestCase
class SmokeTest(SuperuserTestCase):
def test_index(self):
self.assertGET(200, 'groups:admin:index', login=True)
def test_view(self):
self.assertGET(
200,
'groups:admin:view',
+ self.user.profile.group_id,
+ login=True,
+ )
+
+ def test_access_tokens_csv(self):
+ self.assertGET(
+ 200,
+ 'groups:admin:access-tokens-csv',
self.user.profile.group_id,
login=True,
)
def test_create_access_tokens(self):
group = self.user.profile.group
self.assertEqual(group.access_tokens.count(), 0)
self.assertPOST(
302,
{'num_tokens': 10},
'groups:admin:create-access-tokens',
group.pk,
login=True,
)
self.assertEqual(group.access_tokens.count(), 10)
|
Test access token csv download
|
## Code Before:
from takeyourmeds.utils.test import SuperuserTestCase
class SmokeTest(SuperuserTestCase):
def test_index(self):
self.assertGET(200, 'groups:admin:index', login=True)
def test_view(self):
self.assertGET(
200,
'groups:admin:view',
self.user.profile.group_id,
login=True,
)
def test_create_access_tokens(self):
group = self.user.profile.group
self.assertEqual(group.access_tokens.count(), 0)
self.assertPOST(
302,
{'num_tokens': 10},
'groups:admin:create-access-tokens',
group.pk,
login=True,
)
self.assertEqual(group.access_tokens.count(), 10)
## Instruction:
Test access token csv download
## Code After:
from takeyourmeds.utils.test import SuperuserTestCase
class SmokeTest(SuperuserTestCase):
def test_index(self):
self.assertGET(200, 'groups:admin:index', login=True)
def test_view(self):
self.assertGET(
200,
'groups:admin:view',
self.user.profile.group_id,
login=True,
)
def test_access_tokens_csv(self):
self.assertGET(
200,
'groups:admin:access-tokens-csv',
self.user.profile.group_id,
login=True,
)
def test_create_access_tokens(self):
group = self.user.profile.group
self.assertEqual(group.access_tokens.count(), 0)
self.assertPOST(
302,
{'num_tokens': 10},
'groups:admin:create-access-tokens',
group.pk,
login=True,
)
self.assertEqual(group.access_tokens.count(), 10)
|
from takeyourmeds.utils.test import SuperuserTestCase
class SmokeTest(SuperuserTestCase):
def test_index(self):
self.assertGET(200, 'groups:admin:index', login=True)
def test_view(self):
self.assertGET(
200,
'groups:admin:view',
+ self.user.profile.group_id,
+ login=True,
+ )
+
+ def test_access_tokens_csv(self):
+ self.assertGET(
+ 200,
+ 'groups:admin:access-tokens-csv',
self.user.profile.group_id,
login=True,
)
def test_create_access_tokens(self):
group = self.user.profile.group
self.assertEqual(group.access_tokens.count(), 0)
self.assertPOST(
302,
{'num_tokens': 10},
'groups:admin:create-access-tokens',
group.pk,
login=True,
)
self.assertEqual(group.access_tokens.count(), 10)
|
6d43df828cb34c8949c8f87c256bde2e6ccb7d3c
|
atamatracker/moviefile.py
|
atamatracker/moviefile.py
|
import cv2
class Movie(object):
"""Movie file object.
"""
def __init__(self, file_path):
self.__capture = cv2.VideoCapture(file_path)
def __del__(self):
self.__capture.release()
def load_image(self, time_sec):
"""Load image at the desired time.
Retruns None if no image could load.
"""
self.__capture.set(cv2.cv.CV_CAP_PROP_POS_MSEC, time_sec * 1000)
f, image = self.__capture.read()
return image
|
import cv2
class Movie(object):
"""Movie file object.
Public properties:
fps (read-only) -- [float] frames per second
width (read-only) -- [int] frame dimension
height (read-only) -- [int] frame dimension
"""
def __init__(self, file_path):
capture = cv2.VideoCapture(file_path)
self.__capture = capture
self.__fps = capture.get(cv2.cv.CV_CAP_PROP_FPS)
self.__width = int(capture.get(cv2.cv.CV_CAP_PROP_FRAME_WIDTH))
self.__height = int(capture.get(cv2.cv.CV_CAP_PROP_FRAME_HEIGHT))
def __del__(self):
self.__capture.release()
@property
def fps(self):
"""frames per second
"""
return self.__fps
@property
def width(self):
"""frame dimension
"""
return self.__width
@property
def height(self):
"""frame dimension
"""
return self.__height
def load_image(self, time_sec):
"""Load image at the desired time.
Retruns None if no image could load.
"""
self.__capture.set(cv2.cv.CV_CAP_PROP_POS_MSEC, time_sec * 1000)
f, image = self.__capture.read()
return image
|
Add some useful read-only properties to Movie class
|
Add some useful read-only properties to Movie class
|
Python
|
mit
|
ptsg/AtamaTracker
|
import cv2
class Movie(object):
"""Movie file object.
+
+ Public properties:
+ fps (read-only) -- [float] frames per second
+ width (read-only) -- [int] frame dimension
+ height (read-only) -- [int] frame dimension
"""
def __init__(self, file_path):
- self.__capture = cv2.VideoCapture(file_path)
+ capture = cv2.VideoCapture(file_path)
+
+ self.__capture = capture
+ self.__fps = capture.get(cv2.cv.CV_CAP_PROP_FPS)
+ self.__width = int(capture.get(cv2.cv.CV_CAP_PROP_FRAME_WIDTH))
+ self.__height = int(capture.get(cv2.cv.CV_CAP_PROP_FRAME_HEIGHT))
def __del__(self):
self.__capture.release()
+
+ @property
+ def fps(self):
+ """frames per second
+ """
+ return self.__fps
+
+ @property
+ def width(self):
+ """frame dimension
+ """
+ return self.__width
+
+ @property
+ def height(self):
+ """frame dimension
+ """
+ return self.__height
def load_image(self, time_sec):
"""Load image at the desired time.
Retruns None if no image could load.
"""
self.__capture.set(cv2.cv.CV_CAP_PROP_POS_MSEC, time_sec * 1000)
f, image = self.__capture.read()
return image
|
Add some useful read-only properties to Movie class
|
## Code Before:
import cv2
class Movie(object):
"""Movie file object.
"""
def __init__(self, file_path):
self.__capture = cv2.VideoCapture(file_path)
def __del__(self):
self.__capture.release()
def load_image(self, time_sec):
"""Load image at the desired time.
Retruns None if no image could load.
"""
self.__capture.set(cv2.cv.CV_CAP_PROP_POS_MSEC, time_sec * 1000)
f, image = self.__capture.read()
return image
## Instruction:
Add some useful read-only properties to Movie class
## Code After:
import cv2
class Movie(object):
"""Movie file object.
Public properties:
fps (read-only) -- [float] frames per second
width (read-only) -- [int] frame dimension
height (read-only) -- [int] frame dimension
"""
def __init__(self, file_path):
capture = cv2.VideoCapture(file_path)
self.__capture = capture
self.__fps = capture.get(cv2.cv.CV_CAP_PROP_FPS)
self.__width = int(capture.get(cv2.cv.CV_CAP_PROP_FRAME_WIDTH))
self.__height = int(capture.get(cv2.cv.CV_CAP_PROP_FRAME_HEIGHT))
def __del__(self):
self.__capture.release()
@property
def fps(self):
"""frames per second
"""
return self.__fps
@property
def width(self):
"""frame dimension
"""
return self.__width
@property
def height(self):
"""frame dimension
"""
return self.__height
def load_image(self, time_sec):
"""Load image at the desired time.
Retruns None if no image could load.
"""
self.__capture.set(cv2.cv.CV_CAP_PROP_POS_MSEC, time_sec * 1000)
f, image = self.__capture.read()
return image
|
import cv2
class Movie(object):
"""Movie file object.
+
+ Public properties:
+ fps (read-only) -- [float] frames per second
+ width (read-only) -- [int] frame dimension
+ height (read-only) -- [int] frame dimension
"""
def __init__(self, file_path):
- self.__capture = cv2.VideoCapture(file_path)
? -------
+ capture = cv2.VideoCapture(file_path)
+
+ self.__capture = capture
+ self.__fps = capture.get(cv2.cv.CV_CAP_PROP_FPS)
+ self.__width = int(capture.get(cv2.cv.CV_CAP_PROP_FRAME_WIDTH))
+ self.__height = int(capture.get(cv2.cv.CV_CAP_PROP_FRAME_HEIGHT))
def __del__(self):
self.__capture.release()
+
+ @property
+ def fps(self):
+ """frames per second
+ """
+ return self.__fps
+
+ @property
+ def width(self):
+ """frame dimension
+ """
+ return self.__width
+
+ @property
+ def height(self):
+ """frame dimension
+ """
+ return self.__height
def load_image(self, time_sec):
"""Load image at the desired time.
Retruns None if no image could load.
"""
self.__capture.set(cv2.cv.CV_CAP_PROP_POS_MSEC, time_sec * 1000)
f, image = self.__capture.read()
return image
|
d0a907872749f1bb54d6e8e160ea170059289623
|
source/custom/combo.py
|
source/custom/combo.py
|
import wx
from wx.combo import OwnerDrawnComboBox
class ComboBox(OwnerDrawnComboBox):
def __init__(self, parent, win_id, value=wx.EmptyString, pos=wx.DefaultPosition,
size=wx.DefaultSize, choices=[], style=0, validator=wx.DefaultValidator,
name=wx.ComboBoxNameStr):
OwnerDrawnComboBox.__init__(self, parent, win_id, value, pos, size, choices,
style, validator, name)
self.Default = self.GetLabel()
self.Priority = []
## Resets ComboBox to defaults
def Reset(self):
if not self.Count:
self.SetValue(self.Default)
return self.Value == self.Default
return False
|
import wx
from wx.combo import OwnerDrawnComboBox
class ComboBox(OwnerDrawnComboBox):
def __init__(self, parent, win_id=wx.ID_ANY, value=wx.EmptyString, pos=wx.DefaultPosition,
size=wx.DefaultSize, choices=[], style=0, validator=wx.DefaultValidator,
name=wx.ComboBoxNameStr):
OwnerDrawnComboBox.__init__(self, parent, win_id, value, pos, size, choices,
style, validator, name)
self.Default = self.GetLabel()
self.Priority = []
## Resets ComboBox to defaults
def Reset(self):
if not self.Count:
self.SetValue(self.Default)
return self.Value == self.Default
return False
|
Set ComboBox class default ID to wx.ID_ANY
|
Set ComboBox class default ID to wx.ID_ANY
|
Python
|
mit
|
AntumDeluge/desktop_recorder,AntumDeluge/desktop_recorder
|
import wx
from wx.combo import OwnerDrawnComboBox
class ComboBox(OwnerDrawnComboBox):
- def __init__(self, parent, win_id, value=wx.EmptyString, pos=wx.DefaultPosition,
+ def __init__(self, parent, win_id=wx.ID_ANY, value=wx.EmptyString, pos=wx.DefaultPosition,
size=wx.DefaultSize, choices=[], style=0, validator=wx.DefaultValidator,
name=wx.ComboBoxNameStr):
OwnerDrawnComboBox.__init__(self, parent, win_id, value, pos, size, choices,
style, validator, name)
self.Default = self.GetLabel()
self.Priority = []
## Resets ComboBox to defaults
def Reset(self):
if not self.Count:
self.SetValue(self.Default)
return self.Value == self.Default
return False
|
Set ComboBox class default ID to wx.ID_ANY
|
## Code Before:
import wx
from wx.combo import OwnerDrawnComboBox
class ComboBox(OwnerDrawnComboBox):
def __init__(self, parent, win_id, value=wx.EmptyString, pos=wx.DefaultPosition,
size=wx.DefaultSize, choices=[], style=0, validator=wx.DefaultValidator,
name=wx.ComboBoxNameStr):
OwnerDrawnComboBox.__init__(self, parent, win_id, value, pos, size, choices,
style, validator, name)
self.Default = self.GetLabel()
self.Priority = []
## Resets ComboBox to defaults
def Reset(self):
if not self.Count:
self.SetValue(self.Default)
return self.Value == self.Default
return False
## Instruction:
Set ComboBox class default ID to wx.ID_ANY
## Code After:
import wx
from wx.combo import OwnerDrawnComboBox
class ComboBox(OwnerDrawnComboBox):
def __init__(self, parent, win_id=wx.ID_ANY, value=wx.EmptyString, pos=wx.DefaultPosition,
size=wx.DefaultSize, choices=[], style=0, validator=wx.DefaultValidator,
name=wx.ComboBoxNameStr):
OwnerDrawnComboBox.__init__(self, parent, win_id, value, pos, size, choices,
style, validator, name)
self.Default = self.GetLabel()
self.Priority = []
## Resets ComboBox to defaults
def Reset(self):
if not self.Count:
self.SetValue(self.Default)
return self.Value == self.Default
return False
|
import wx
from wx.combo import OwnerDrawnComboBox
class ComboBox(OwnerDrawnComboBox):
- def __init__(self, parent, win_id, value=wx.EmptyString, pos=wx.DefaultPosition,
+ def __init__(self, parent, win_id=wx.ID_ANY, value=wx.EmptyString, pos=wx.DefaultPosition,
? ++++++++++
size=wx.DefaultSize, choices=[], style=0, validator=wx.DefaultValidator,
name=wx.ComboBoxNameStr):
OwnerDrawnComboBox.__init__(self, parent, win_id, value, pos, size, choices,
style, validator, name)
self.Default = self.GetLabel()
self.Priority = []
## Resets ComboBox to defaults
def Reset(self):
if not self.Count:
self.SetValue(self.Default)
return self.Value == self.Default
return False
|
6c12f97bfed8b8a4749f75e1a508caf0ea310423
|
docker/update-production.py
|
docker/update-production.py
|
import argparse
import subprocess
import json
import sys
parser = argparse.ArgumentParser()
args = parser.parse_args()
def info(msg):
sys.stdout.write('* {}\n'.format(msg))
sys.stdout.flush()
info('Determining current production details...')
output = subprocess.check_output(['tutum', 'service', 'inspect', 'lb.muzhack-staging']).decode(
'utf-8')
data = json.loads(output)
linked_service = data['linked_to_service'][0]['name']
info('Currently linked service is \'{}\''.format(linked_service))
if linked_service == 'muzhack-green':
link_to = 'muzhack-blue'
else:
assert linked_service == 'muzhack-blue'
link_to = 'muzhack-green'
info('Redeploying service \'{}\'...'.format(link_to))
subprocess.check_call(['tutum', 'service', 'redeploy', '--sync', link_to], stdout=subprocess.PIPE)
info('Linking to service \'{}\'...'.format(link_to))
subprocess.check_call(['tutum', 'service', 'set', '--link-service', '{0}:{0}'.format(link_to),
'--sync', 'lb'], stdout=subprocess.PIPE)
info('Successfully switched production service to {}'.format(link_to))
|
import argparse
import subprocess
import json
import sys
parser = argparse.ArgumentParser()
args = parser.parse_args()
def info(msg):
sys.stdout.write('* {}\n'.format(msg))
sys.stdout.flush()
info('Determining current production details...')
output = subprocess.check_output(['tutum', 'service', 'inspect', 'lb.muzhack-staging']).decode(
'utf-8')
data = json.loads(output)
linked_service = data['linked_to_service'][0]['name']
info('Currently linked service is \'{}\''.format(linked_service))
if linked_service == 'muzhack-green':
link_to = 'muzhack-blue'
else:
assert linked_service == 'muzhack-blue'
link_to = 'muzhack-green'
info('Redeploying service \'{}\'...'.format(link_to))
subprocess.check_call(['tutum', 'service', 'redeploy', '--sync', link_to], stdout=subprocess.PIPE)
info('Linking to service \'{}\'...'.format(link_to))
subprocess.check_call(['tutum', 'service', 'set', '--link-service', '{0}:{0}'.format(link_to),
'--sync', 'lb.muzhack-staging'], stdout=subprocess.PIPE)
info('Successfully switched production service to {}'.format(link_to))
|
Make sure to update correct load balancer
|
Make sure to update correct load balancer
|
Python
|
mit
|
muzhack/musitechhub,muzhack/musitechhub,muzhack/muzhack,muzhack/muzhack,muzhack/musitechhub,muzhack/muzhack,muzhack/musitechhub,muzhack/muzhack
|
import argparse
import subprocess
import json
import sys
parser = argparse.ArgumentParser()
args = parser.parse_args()
def info(msg):
sys.stdout.write('* {}\n'.format(msg))
sys.stdout.flush()
info('Determining current production details...')
output = subprocess.check_output(['tutum', 'service', 'inspect', 'lb.muzhack-staging']).decode(
'utf-8')
data = json.loads(output)
linked_service = data['linked_to_service'][0]['name']
info('Currently linked service is \'{}\''.format(linked_service))
if linked_service == 'muzhack-green':
link_to = 'muzhack-blue'
else:
assert linked_service == 'muzhack-blue'
link_to = 'muzhack-green'
info('Redeploying service \'{}\'...'.format(link_to))
subprocess.check_call(['tutum', 'service', 'redeploy', '--sync', link_to], stdout=subprocess.PIPE)
info('Linking to service \'{}\'...'.format(link_to))
subprocess.check_call(['tutum', 'service', 'set', '--link-service', '{0}:{0}'.format(link_to),
- '--sync', 'lb'], stdout=subprocess.PIPE)
+ '--sync', 'lb.muzhack-staging'], stdout=subprocess.PIPE)
info('Successfully switched production service to {}'.format(link_to))
|
Make sure to update correct load balancer
|
## Code Before:
import argparse
import subprocess
import json
import sys
parser = argparse.ArgumentParser()
args = parser.parse_args()
def info(msg):
sys.stdout.write('* {}\n'.format(msg))
sys.stdout.flush()
info('Determining current production details...')
output = subprocess.check_output(['tutum', 'service', 'inspect', 'lb.muzhack-staging']).decode(
'utf-8')
data = json.loads(output)
linked_service = data['linked_to_service'][0]['name']
info('Currently linked service is \'{}\''.format(linked_service))
if linked_service == 'muzhack-green':
link_to = 'muzhack-blue'
else:
assert linked_service == 'muzhack-blue'
link_to = 'muzhack-green'
info('Redeploying service \'{}\'...'.format(link_to))
subprocess.check_call(['tutum', 'service', 'redeploy', '--sync', link_to], stdout=subprocess.PIPE)
info('Linking to service \'{}\'...'.format(link_to))
subprocess.check_call(['tutum', 'service', 'set', '--link-service', '{0}:{0}'.format(link_to),
'--sync', 'lb'], stdout=subprocess.PIPE)
info('Successfully switched production service to {}'.format(link_to))
## Instruction:
Make sure to update correct load balancer
## Code After:
import argparse
import subprocess
import json
import sys
parser = argparse.ArgumentParser()
args = parser.parse_args()
def info(msg):
sys.stdout.write('* {}\n'.format(msg))
sys.stdout.flush()
info('Determining current production details...')
output = subprocess.check_output(['tutum', 'service', 'inspect', 'lb.muzhack-staging']).decode(
'utf-8')
data = json.loads(output)
linked_service = data['linked_to_service'][0]['name']
info('Currently linked service is \'{}\''.format(linked_service))
if linked_service == 'muzhack-green':
link_to = 'muzhack-blue'
else:
assert linked_service == 'muzhack-blue'
link_to = 'muzhack-green'
info('Redeploying service \'{}\'...'.format(link_to))
subprocess.check_call(['tutum', 'service', 'redeploy', '--sync', link_to], stdout=subprocess.PIPE)
info('Linking to service \'{}\'...'.format(link_to))
subprocess.check_call(['tutum', 'service', 'set', '--link-service', '{0}:{0}'.format(link_to),
'--sync', 'lb.muzhack-staging'], stdout=subprocess.PIPE)
info('Successfully switched production service to {}'.format(link_to))
|
import argparse
import subprocess
import json
import sys
parser = argparse.ArgumentParser()
args = parser.parse_args()
def info(msg):
sys.stdout.write('* {}\n'.format(msg))
sys.stdout.flush()
info('Determining current production details...')
output = subprocess.check_output(['tutum', 'service', 'inspect', 'lb.muzhack-staging']).decode(
'utf-8')
data = json.loads(output)
linked_service = data['linked_to_service'][0]['name']
info('Currently linked service is \'{}\''.format(linked_service))
if linked_service == 'muzhack-green':
link_to = 'muzhack-blue'
else:
assert linked_service == 'muzhack-blue'
link_to = 'muzhack-green'
info('Redeploying service \'{}\'...'.format(link_to))
subprocess.check_call(['tutum', 'service', 'redeploy', '--sync', link_to], stdout=subprocess.PIPE)
info('Linking to service \'{}\'...'.format(link_to))
subprocess.check_call(['tutum', 'service', 'set', '--link-service', '{0}:{0}'.format(link_to),
- '--sync', 'lb'], stdout=subprocess.PIPE)
+ '--sync', 'lb.muzhack-staging'], stdout=subprocess.PIPE)
? ++++++++++++++++
info('Successfully switched production service to {}'.format(link_to))
|
1f76df1fe6b77850f8741b2f52b2509ce204f93f
|
stats-to-datadog.py
|
stats-to-datadog.py
|
import urllib2
import json
import sys
from statsd import statsd
statsd.connect('localhost', 8125)
topology = sys.argv[1]
state = urllib2.urlopen(
"http://localhost:9000/api/status?toporoot={}&topic={}".format(
sys.argv[2], sys.argv[3]
)
).read()
data = json.loads(state)
amount = 0
for looplord in data:
if looplord['amount'] is not None:
amount += looplord['amount']
statsd.histogram(
'razor.kafkamon.total_delta',
amount, tags = [ "topology:{}".format(topology) ]
)
|
import urllib2
import json
import sys
from statsd import statsd
statsd.connect('localhost', 8125)
topology = sys.argv[1]
toporoot = sys.argv[2]
topic = sys.argv[3]
state = urllib2.urlopen(
"http://localhost:9000/api/status?toporoot={}&topic={}".format(
toporoot, topic
)
).read()
data = json.loads(state)
amount = 0
for looplord in data:
if looplord['amount'] is not None:
statsd.histogram(
'razor.kafkamon.topology.partition',
amount,
tags = [
"topic:{}".format(sys),
"topology:{}".format(topology),
"partition:{}".format(looplord['partition'])
]
)
amount += looplord['amount']
statsd.histogram(
'razor.kafkamon.total_delta',
amount, tags = [ "topology:{}".format(topology) ]
)
|
Add stats for each partition.
|
Add stats for each partition.
|
Python
|
mit
|
evertrue/capillary,evertrue/capillary,evertrue/capillary,keenlabs/capillary,evertrue/capillary,keenlabs/capillary,keenlabs/capillary
|
import urllib2
import json
import sys
from statsd import statsd
statsd.connect('localhost', 8125)
topology = sys.argv[1]
+ toporoot = sys.argv[2]
+ topic = sys.argv[3]
state = urllib2.urlopen(
"http://localhost:9000/api/status?toporoot={}&topic={}".format(
- sys.argv[2], sys.argv[3]
+ toporoot, topic
)
).read()
data = json.loads(state)
amount = 0
for looplord in data:
if looplord['amount'] is not None:
+ statsd.histogram(
+ 'razor.kafkamon.topology.partition',
+ amount,
+ tags = [
+ "topic:{}".format(sys),
+ "topology:{}".format(topology),
+ "partition:{}".format(looplord['partition'])
+ ]
+ )
amount += looplord['amount']
statsd.histogram(
'razor.kafkamon.total_delta',
amount, tags = [ "topology:{}".format(topology) ]
)
|
Add stats for each partition.
|
## Code Before:
import urllib2
import json
import sys
from statsd import statsd
statsd.connect('localhost', 8125)
topology = sys.argv[1]
state = urllib2.urlopen(
"http://localhost:9000/api/status?toporoot={}&topic={}".format(
sys.argv[2], sys.argv[3]
)
).read()
data = json.loads(state)
amount = 0
for looplord in data:
if looplord['amount'] is not None:
amount += looplord['amount']
statsd.histogram(
'razor.kafkamon.total_delta',
amount, tags = [ "topology:{}".format(topology) ]
)
## Instruction:
Add stats for each partition.
## Code After:
import urllib2
import json
import sys
from statsd import statsd
statsd.connect('localhost', 8125)
topology = sys.argv[1]
toporoot = sys.argv[2]
topic = sys.argv[3]
state = urllib2.urlopen(
"http://localhost:9000/api/status?toporoot={}&topic={}".format(
toporoot, topic
)
).read()
data = json.loads(state)
amount = 0
for looplord in data:
if looplord['amount'] is not None:
statsd.histogram(
'razor.kafkamon.topology.partition',
amount,
tags = [
"topic:{}".format(sys),
"topology:{}".format(topology),
"partition:{}".format(looplord['partition'])
]
)
amount += looplord['amount']
statsd.histogram(
'razor.kafkamon.total_delta',
amount, tags = [ "topology:{}".format(topology) ]
)
|
import urllib2
import json
import sys
from statsd import statsd
statsd.connect('localhost', 8125)
topology = sys.argv[1]
+ toporoot = sys.argv[2]
+ topic = sys.argv[3]
state = urllib2.urlopen(
"http://localhost:9000/api/status?toporoot={}&topic={}".format(
- sys.argv[2], sys.argv[3]
+ toporoot, topic
)
).read()
data = json.loads(state)
amount = 0
for looplord in data:
if looplord['amount'] is not None:
+ statsd.histogram(
+ 'razor.kafkamon.topology.partition',
+ amount,
+ tags = [
+ "topic:{}".format(sys),
+ "topology:{}".format(topology),
+ "partition:{}".format(looplord['partition'])
+ ]
+ )
amount += looplord['amount']
statsd.histogram(
'razor.kafkamon.total_delta',
amount, tags = [ "topology:{}".format(topology) ]
)
|
4c04979de66cf5d0858ff00002ef40df196ccd05
|
serfnode/build/handler/handler.py
|
serfnode/build/handler/handler.py
|
import os
from serf_master import SerfHandlerProxy
from base_handler import BaseHandler
try:
from my_handler import MyHandler
except ImportError:
print "Could not import user's handler."
print "Defaulting to dummy handler."
MyHandler = BaseHandler
if __name__ == '__main__':
handler = SerfHandlerProxy()
role = os.environ.get('ROLE') or 'no_role'
handler.register(role, MyHandler())
handler.run()
|
import os
from serf_master import SerfHandlerProxy
from base_handler import BaseHandler
try:
from my_handler import MyHandler
except ImportError:
MyHandler = BaseHandler
if __name__ == '__main__':
handler = SerfHandlerProxy()
role = os.environ.get('ROLE') or 'no_role'
handler.register(role, MyHandler())
handler.run()
|
Remove prints that interfere with json output
|
Remove prints that interfere with json output
|
Python
|
mit
|
waltermoreira/serfnode,waltermoreira/serfnode,waltermoreira/serfnode
|
import os
from serf_master import SerfHandlerProxy
from base_handler import BaseHandler
try:
from my_handler import MyHandler
except ImportError:
- print "Could not import user's handler."
- print "Defaulting to dummy handler."
MyHandler = BaseHandler
if __name__ == '__main__':
handler = SerfHandlerProxy()
role = os.environ.get('ROLE') or 'no_role'
handler.register(role, MyHandler())
handler.run()
|
Remove prints that interfere with json output
|
## Code Before:
import os
from serf_master import SerfHandlerProxy
from base_handler import BaseHandler
try:
from my_handler import MyHandler
except ImportError:
print "Could not import user's handler."
print "Defaulting to dummy handler."
MyHandler = BaseHandler
if __name__ == '__main__':
handler = SerfHandlerProxy()
role = os.environ.get('ROLE') or 'no_role'
handler.register(role, MyHandler())
handler.run()
## Instruction:
Remove prints that interfere with json output
## Code After:
import os
from serf_master import SerfHandlerProxy
from base_handler import BaseHandler
try:
from my_handler import MyHandler
except ImportError:
MyHandler = BaseHandler
if __name__ == '__main__':
handler = SerfHandlerProxy()
role = os.environ.get('ROLE') or 'no_role'
handler.register(role, MyHandler())
handler.run()
|
import os
from serf_master import SerfHandlerProxy
from base_handler import BaseHandler
try:
from my_handler import MyHandler
except ImportError:
- print "Could not import user's handler."
- print "Defaulting to dummy handler."
MyHandler = BaseHandler
if __name__ == '__main__':
handler = SerfHandlerProxy()
role = os.environ.get('ROLE') or 'no_role'
handler.register(role, MyHandler())
handler.run()
|
d5a1bfbff18cf129550c2c423beb8db9302c0736
|
tests/redisdl_test.py
|
tests/redisdl_test.py
|
import redisdl
import unittest
import json
import os.path
class RedisdlTest(unittest.TestCase):
def test_roundtrip(self):
path = os.path.join(os.path.dirname(__file__), 'fixtures', 'dump.json')
with open(path) as f:
dump = f.read()
redisdl.loads(dump)
redump = redisdl.dumps()
expected = json.loads(dump)
actual = json.loads(redump)
self.assertEqual(expected, actual)
|
import redisdl
import unittest
import json
import os.path
class RedisdlTest(unittest.TestCase):
def setUp(self):
import redis
self.r = redis.Redis()
for key in self.r.keys('*'):
self.r.delete(key)
def test_roundtrip(self):
path = os.path.join(os.path.dirname(__file__), 'fixtures', 'dump.json')
with open(path) as f:
dump = f.read()
redisdl.loads(dump)
redump = redisdl.dumps()
expected = json.loads(dump)
actual = json.loads(redump)
self.assertEqual(expected, actual)
|
Clear redis data store before running tests
|
Clear redis data store before running tests
|
Python
|
bsd-2-clause
|
hyunchel/redis-dump-load,p/redis-dump-load,hyunchel/redis-dump-load,p/redis-dump-load
|
import redisdl
import unittest
import json
import os.path
class RedisdlTest(unittest.TestCase):
+ def setUp(self):
+ import redis
+ self.r = redis.Redis()
+ for key in self.r.keys('*'):
+ self.r.delete(key)
+
def test_roundtrip(self):
path = os.path.join(os.path.dirname(__file__), 'fixtures', 'dump.json')
with open(path) as f:
dump = f.read()
redisdl.loads(dump)
redump = redisdl.dumps()
expected = json.loads(dump)
actual = json.loads(redump)
self.assertEqual(expected, actual)
|
Clear redis data store before running tests
|
## Code Before:
import redisdl
import unittest
import json
import os.path
class RedisdlTest(unittest.TestCase):
def test_roundtrip(self):
path = os.path.join(os.path.dirname(__file__), 'fixtures', 'dump.json')
with open(path) as f:
dump = f.read()
redisdl.loads(dump)
redump = redisdl.dumps()
expected = json.loads(dump)
actual = json.loads(redump)
self.assertEqual(expected, actual)
## Instruction:
Clear redis data store before running tests
## Code After:
import redisdl
import unittest
import json
import os.path
class RedisdlTest(unittest.TestCase):
def setUp(self):
import redis
self.r = redis.Redis()
for key in self.r.keys('*'):
self.r.delete(key)
def test_roundtrip(self):
path = os.path.join(os.path.dirname(__file__), 'fixtures', 'dump.json')
with open(path) as f:
dump = f.read()
redisdl.loads(dump)
redump = redisdl.dumps()
expected = json.loads(dump)
actual = json.loads(redump)
self.assertEqual(expected, actual)
|
import redisdl
import unittest
import json
import os.path
class RedisdlTest(unittest.TestCase):
+ def setUp(self):
+ import redis
+ self.r = redis.Redis()
+ for key in self.r.keys('*'):
+ self.r.delete(key)
+
def test_roundtrip(self):
path = os.path.join(os.path.dirname(__file__), 'fixtures', 'dump.json')
with open(path) as f:
dump = f.read()
redisdl.loads(dump)
redump = redisdl.dumps()
expected = json.loads(dump)
actual = json.loads(redump)
self.assertEqual(expected, actual)
|
375c55a085dce451146a10b66b3c2d54a9919ed4
|
pipelines/toast_example_dist.py
|
pipelines/toast_example_dist.py
|
import toast
# Split COMM_WORLD into groups of 4 processes each
cm = toast.Comm(world=MPI.COMM_WORLD, groupsize=4)
# Create the distributed data object
dd = toast.Data(comm=cm)
# Each process group appends some observations.
# For this example, each observation is going to have the same
# number of samples, and the same list of detectors. We just
# use the base TOD class, which contains the data directly as
# numpy arrays.
obs_samples = 100
obs_dets = ['detA', 'detB', 'detC']
for i in range(10):
tod = TOD(mpicomm=cm.comm_group, detectors=obs_dets, samples=obs_samples)
ob = {}
ob['id'] = '{}'.format(i)
ob['tod'] = tod
ob['intervals'] = None
ob['baselines'] = None
ob['noise'] = None
dd.obs.append(ob)
# Now at the end we have 4 process groups, each of which is assigned
# 10 observations. Each of these observations has 3 detectors and 100
# samples. So the Data object contains a total of 40 observations and
# 12000 samples.
|
import mpi4py.MPI as MPI
import toast
# Split COMM_WORLD into groups of 4 processes each
cm = toast.Comm(world=MPI.COMM_WORLD, groupsize=4)
# Create the distributed data object
dd = toast.Data(comm=cm)
# Each process group appends some observations.
# For this example, each observation is going to have the same
# number of samples, and the same list of detectors. We just
# use the base TOD class, which contains the data directly as
# numpy arrays.
obs_samples = 100
obs_dets = ['detA', 'detB', 'detC']
for i in range(10):
tod = TOD(mpicomm=cm.comm_group, detectors=obs_dets, samples=obs_samples)
ob = {}
ob['id'] = '{}'.format(i)
ob['tod'] = tod
ob['intervals'] = None
ob['baselines'] = None
ob['noise'] = None
dd.obs.append(ob)
# Now at the end we have 4 process groups, each of which is assigned
# 10 observations. Each of these observations has 3 detectors and 100
# samples. So the Data object contains a total of 40 observations and
# 12000 samples.
|
Fix typo, even though this example is not used for anything.
|
Fix typo, even though this example is not used for anything.
|
Python
|
bsd-2-clause
|
tskisner/pytoast,tskisner/pytoast
|
+
+ import mpi4py.MPI as MPI
import toast
# Split COMM_WORLD into groups of 4 processes each
cm = toast.Comm(world=MPI.COMM_WORLD, groupsize=4)
# Create the distributed data object
dd = toast.Data(comm=cm)
# Each process group appends some observations.
# For this example, each observation is going to have the same
# number of samples, and the same list of detectors. We just
# use the base TOD class, which contains the data directly as
# numpy arrays.
obs_samples = 100
obs_dets = ['detA', 'detB', 'detC']
for i in range(10):
tod = TOD(mpicomm=cm.comm_group, detectors=obs_dets, samples=obs_samples)
ob = {}
ob['id'] = '{}'.format(i)
ob['tod'] = tod
ob['intervals'] = None
ob['baselines'] = None
ob['noise'] = None
dd.obs.append(ob)
# Now at the end we have 4 process groups, each of which is assigned
# 10 observations. Each of these observations has 3 detectors and 100
# samples. So the Data object contains a total of 40 observations and
# 12000 samples.
|
Fix typo, even though this example is not used for anything.
|
## Code Before:
import toast
# Split COMM_WORLD into groups of 4 processes each
cm = toast.Comm(world=MPI.COMM_WORLD, groupsize=4)
# Create the distributed data object
dd = toast.Data(comm=cm)
# Each process group appends some observations.
# For this example, each observation is going to have the same
# number of samples, and the same list of detectors. We just
# use the base TOD class, which contains the data directly as
# numpy arrays.
obs_samples = 100
obs_dets = ['detA', 'detB', 'detC']
for i in range(10):
tod = TOD(mpicomm=cm.comm_group, detectors=obs_dets, samples=obs_samples)
ob = {}
ob['id'] = '{}'.format(i)
ob['tod'] = tod
ob['intervals'] = None
ob['baselines'] = None
ob['noise'] = None
dd.obs.append(ob)
# Now at the end we have 4 process groups, each of which is assigned
# 10 observations. Each of these observations has 3 detectors and 100
# samples. So the Data object contains a total of 40 observations and
# 12000 samples.
## Instruction:
Fix typo, even though this example is not used for anything.
## Code After:
import mpi4py.MPI as MPI
import toast
# Split COMM_WORLD into groups of 4 processes each
cm = toast.Comm(world=MPI.COMM_WORLD, groupsize=4)
# Create the distributed data object
dd = toast.Data(comm=cm)
# Each process group appends some observations.
# For this example, each observation is going to have the same
# number of samples, and the same list of detectors. We just
# use the base TOD class, which contains the data directly as
# numpy arrays.
obs_samples = 100
obs_dets = ['detA', 'detB', 'detC']
for i in range(10):
tod = TOD(mpicomm=cm.comm_group, detectors=obs_dets, samples=obs_samples)
ob = {}
ob['id'] = '{}'.format(i)
ob['tod'] = tod
ob['intervals'] = None
ob['baselines'] = None
ob['noise'] = None
dd.obs.append(ob)
# Now at the end we have 4 process groups, each of which is assigned
# 10 observations. Each of these observations has 3 detectors and 100
# samples. So the Data object contains a total of 40 observations and
# 12000 samples.
|
+
+ import mpi4py.MPI as MPI
import toast
# Split COMM_WORLD into groups of 4 processes each
cm = toast.Comm(world=MPI.COMM_WORLD, groupsize=4)
# Create the distributed data object
dd = toast.Data(comm=cm)
# Each process group appends some observations.
# For this example, each observation is going to have the same
# number of samples, and the same list of detectors. We just
# use the base TOD class, which contains the data directly as
# numpy arrays.
obs_samples = 100
obs_dets = ['detA', 'detB', 'detC']
for i in range(10):
tod = TOD(mpicomm=cm.comm_group, detectors=obs_dets, samples=obs_samples)
ob = {}
ob['id'] = '{}'.format(i)
ob['tod'] = tod
ob['intervals'] = None
ob['baselines'] = None
ob['noise'] = None
dd.obs.append(ob)
# Now at the end we have 4 process groups, each of which is assigned
# 10 observations. Each of these observations has 3 detectors and 100
# samples. So the Data object contains a total of 40 observations and
# 12000 samples.
|
760a952851664232b11ad95acb8884dda59308a4
|
sysrev/widgets.py
|
sysrev/widgets.py
|
from django.forms import widgets
from django.utils.safestring import *
# See static/js/querywidget.js
class QueryWidget(widgets.Textarea):
def __init__(self, attrs=None):
default_attrs = {'class': "queryWidget"}
if attrs:
default_attrs.update(attrs)
super(QueryWidget, self).__init__(default_attrs)
def script(self):
return mark_safe("<script src='/static/js/querywidget.js' type='text/javascript' defer></script>")
def render(self, name, value, attrs=None):
textAreaHtml = super(QueryWidget, self).render(name, value, attrs)
return self.script() + textAreaHtml
|
from django.forms import widgets
from django.utils.safestring import *
# See static/js/querywidget.js
class QueryWidget(widgets.Textarea):
def __init__(self, attrs=None):
default_attrs = {'class': "queryWidget"}
if attrs:
default_attrs.update(attrs)
super(QueryWidget, self).__init__(default_attrs)
def script(self):
return mark_safe("<noscript>Please enable javascript to use the query editor. Without it enabled, you can only "
"use the advanced editor</noscript>"
"<script src='/static/js/querywidget.js' type='text/javascript' defer></script>")
def render(self, name, value, attrs=None):
textAreaHtml = super(QueryWidget, self).render(name, value, attrs)
return self.script() + textAreaHtml
|
Add <noscript> message to indicate that only the advanced query editor is available.
|
Add <noscript> message to indicate that only the advanced query editor is available.
|
Python
|
mit
|
iliawnek/SystematicReview,iliawnek/SystematicReview,iliawnek/SystematicReview,iliawnek/SystematicReview
|
from django.forms import widgets
from django.utils.safestring import *
# See static/js/querywidget.js
class QueryWidget(widgets.Textarea):
def __init__(self, attrs=None):
default_attrs = {'class': "queryWidget"}
if attrs:
default_attrs.update(attrs)
super(QueryWidget, self).__init__(default_attrs)
def script(self):
+ return mark_safe("<noscript>Please enable javascript to use the query editor. Without it enabled, you can only "
+ "use the advanced editor</noscript>"
- return mark_safe("<script src='/static/js/querywidget.js' type='text/javascript' defer></script>")
+ "<script src='/static/js/querywidget.js' type='text/javascript' defer></script>")
def render(self, name, value, attrs=None):
textAreaHtml = super(QueryWidget, self).render(name, value, attrs)
return self.script() + textAreaHtml
|
Add <noscript> message to indicate that only the advanced query editor is available.
|
## Code Before:
from django.forms import widgets
from django.utils.safestring import *
# See static/js/querywidget.js
class QueryWidget(widgets.Textarea):
def __init__(self, attrs=None):
default_attrs = {'class': "queryWidget"}
if attrs:
default_attrs.update(attrs)
super(QueryWidget, self).__init__(default_attrs)
def script(self):
return mark_safe("<script src='/static/js/querywidget.js' type='text/javascript' defer></script>")
def render(self, name, value, attrs=None):
textAreaHtml = super(QueryWidget, self).render(name, value, attrs)
return self.script() + textAreaHtml
## Instruction:
Add <noscript> message to indicate that only the advanced query editor is available.
## Code After:
from django.forms import widgets
from django.utils.safestring import *
# See static/js/querywidget.js
class QueryWidget(widgets.Textarea):
def __init__(self, attrs=None):
default_attrs = {'class': "queryWidget"}
if attrs:
default_attrs.update(attrs)
super(QueryWidget, self).__init__(default_attrs)
def script(self):
return mark_safe("<noscript>Please enable javascript to use the query editor. Without it enabled, you can only "
"use the advanced editor</noscript>"
"<script src='/static/js/querywidget.js' type='text/javascript' defer></script>")
def render(self, name, value, attrs=None):
textAreaHtml = super(QueryWidget, self).render(name, value, attrs)
return self.script() + textAreaHtml
|
from django.forms import widgets
from django.utils.safestring import *
# See static/js/querywidget.js
class QueryWidget(widgets.Textarea):
def __init__(self, attrs=None):
default_attrs = {'class': "queryWidget"}
if attrs:
default_attrs.update(attrs)
super(QueryWidget, self).__init__(default_attrs)
def script(self):
+ return mark_safe("<noscript>Please enable javascript to use the query editor. Without it enabled, you can only "
+ "use the advanced editor</noscript>"
- return mark_safe("<script src='/static/js/querywidget.js' type='text/javascript' defer></script>")
? ------ ^^^^^^^^^^
+ "<script src='/static/js/querywidget.js' type='text/javascript' defer></script>")
? ^^^^^^^^^^^^^^^^
def render(self, name, value, attrs=None):
textAreaHtml = super(QueryWidget, self).render(name, value, attrs)
return self.script() + textAreaHtml
|
cda417454578cb8efe315850b06b047239c7796d
|
Commands/Leave.py
|
Commands/Leave.py
|
from IRCMessage import IRCMessage
from IRCResponse import IRCResponse, ResponseType
from CommandInterface import CommandInterface
import GlobalVars
class Leave(CommandInterface):
triggers = ['leave', 'gtfo']
help = "leave/gtfo - makes the bot leave the current channel"
def execute(self, message):
"""
@type message: IRCMessage
"""
if message.User.Name not in GlobalVars.admins:
return IRCResponse(ResponseType.Say, 'Only my admins can tell me to %s' % message.Command, message.ReplyTo)
if len(message.ParameterList) > 0:
return IRCResponse(ResponseType.Raw, 'PART %s :%s' % (message.ReplyTo, message.Parameters), '')
else:
return IRCResponse(ResponseType.Raw, 'PART %s :toodles!' % message.ReplyTo, '')
|
from IRCMessage import IRCMessage
from IRCResponse import IRCResponse, ResponseType
from CommandInterface import CommandInterface
import GlobalVars
class Leave(CommandInterface):
triggers = ['leave', 'gtfo']
help = "leave/gtfo - makes the bot leave the current channel"
def execute(self, message):
"""
@type message: IRCMessage
"""
if message.User.Name not in GlobalVars.admins:
if message.Command == triggers[1]:
return IRCResponse(ResponseType.Say, 'Wow, rude? Only my admins can tell me to {}'.format(message.Command), message.ReplyTo)
else:
return IRCResponse(ResponseType.Say, 'Only my admins can tell me to {}'.format(message.Command), message.ReplyTo)
if len(message.ParameterList) > 0:
return IRCResponse(ResponseType.Raw, 'PART {} :{}'.format(message.ReplyTo, message.Parameters), '')
else:
return IRCResponse(ResponseType.Raw, 'PART {} :toodles!'.format(message.ReplyTo), '')
|
Update % to .format, add response to gtfo command
|
Update % to .format, add response to gtfo command
|
Python
|
mit
|
MatthewCox/PyMoronBot,DesertBot/DesertBot
|
from IRCMessage import IRCMessage
from IRCResponse import IRCResponse, ResponseType
from CommandInterface import CommandInterface
import GlobalVars
class Leave(CommandInterface):
triggers = ['leave', 'gtfo']
help = "leave/gtfo - makes the bot leave the current channel"
def execute(self, message):
"""
@type message: IRCMessage
"""
if message.User.Name not in GlobalVars.admins:
+ if message.Command == triggers[1]:
+ return IRCResponse(ResponseType.Say, 'Wow, rude? Only my admins can tell me to {}'.format(message.Command), message.ReplyTo)
+ else:
- return IRCResponse(ResponseType.Say, 'Only my admins can tell me to %s' % message.Command, message.ReplyTo)
+ return IRCResponse(ResponseType.Say, 'Only my admins can tell me to {}'.format(message.Command), message.ReplyTo)
if len(message.ParameterList) > 0:
- return IRCResponse(ResponseType.Raw, 'PART %s :%s' % (message.ReplyTo, message.Parameters), '')
+ return IRCResponse(ResponseType.Raw, 'PART {} :{}'.format(message.ReplyTo, message.Parameters), '')
else:
- return IRCResponse(ResponseType.Raw, 'PART %s :toodles!' % message.ReplyTo, '')
+ return IRCResponse(ResponseType.Raw, 'PART {} :toodles!'.format(message.ReplyTo), '')
|
Update % to .format, add response to gtfo command
|
## Code Before:
from IRCMessage import IRCMessage
from IRCResponse import IRCResponse, ResponseType
from CommandInterface import CommandInterface
import GlobalVars
class Leave(CommandInterface):
triggers = ['leave', 'gtfo']
help = "leave/gtfo - makes the bot leave the current channel"
def execute(self, message):
"""
@type message: IRCMessage
"""
if message.User.Name not in GlobalVars.admins:
return IRCResponse(ResponseType.Say, 'Only my admins can tell me to %s' % message.Command, message.ReplyTo)
if len(message.ParameterList) > 0:
return IRCResponse(ResponseType.Raw, 'PART %s :%s' % (message.ReplyTo, message.Parameters), '')
else:
return IRCResponse(ResponseType.Raw, 'PART %s :toodles!' % message.ReplyTo, '')
## Instruction:
Update % to .format, add response to gtfo command
## Code After:
from IRCMessage import IRCMessage
from IRCResponse import IRCResponse, ResponseType
from CommandInterface import CommandInterface
import GlobalVars
class Leave(CommandInterface):
triggers = ['leave', 'gtfo']
help = "leave/gtfo - makes the bot leave the current channel"
def execute(self, message):
"""
@type message: IRCMessage
"""
if message.User.Name not in GlobalVars.admins:
if message.Command == triggers[1]:
return IRCResponse(ResponseType.Say, 'Wow, rude? Only my admins can tell me to {}'.format(message.Command), message.ReplyTo)
else:
return IRCResponse(ResponseType.Say, 'Only my admins can tell me to {}'.format(message.Command), message.ReplyTo)
if len(message.ParameterList) > 0:
return IRCResponse(ResponseType.Raw, 'PART {} :{}'.format(message.ReplyTo, message.Parameters), '')
else:
return IRCResponse(ResponseType.Raw, 'PART {} :toodles!'.format(message.ReplyTo), '')
|
from IRCMessage import IRCMessage
from IRCResponse import IRCResponse, ResponseType
from CommandInterface import CommandInterface
import GlobalVars
class Leave(CommandInterface):
triggers = ['leave', 'gtfo']
help = "leave/gtfo - makes the bot leave the current channel"
def execute(self, message):
"""
@type message: IRCMessage
"""
if message.User.Name not in GlobalVars.admins:
+ if message.Command == triggers[1]:
+ return IRCResponse(ResponseType.Say, 'Wow, rude? Only my admins can tell me to {}'.format(message.Command), message.ReplyTo)
+ else:
- return IRCResponse(ResponseType.Say, 'Only my admins can tell me to %s' % message.Command, message.ReplyTo)
? ^^ ^^^
+ return IRCResponse(ResponseType.Say, 'Only my admins can tell me to {}'.format(message.Command), message.ReplyTo)
? ++++ ^^ ^^^^^^^^ +
if len(message.ParameterList) > 0:
- return IRCResponse(ResponseType.Raw, 'PART %s :%s' % (message.ReplyTo, message.Parameters), '')
? ^^ ^^ ^^^
+ return IRCResponse(ResponseType.Raw, 'PART {} :{}'.format(message.ReplyTo, message.Parameters), '')
? ^^ ^^ ^^^^^^^
else:
- return IRCResponse(ResponseType.Raw, 'PART %s :toodles!' % message.ReplyTo, '')
? ^^ ^^^
+ return IRCResponse(ResponseType.Raw, 'PART {} :toodles!'.format(message.ReplyTo), '')
? ^^ ^^^^^^^^ +
|
7e080edea2139c5cce907f4d752320943b044ac7
|
game.py
|
game.py
|
people = '123456'
room = 'abcdef'
# murder configuration
# who was where
# who is the murderer
# current configuration
# who was where
# player location
|
import random
people = '123456'
room = 'abcdef'
# murder configuration
# who was where
# who is the murderer
# current configuration
# who was where
# player location
murder_config_people = list(people)
random.shuffle(murder_config_people)
murder_location = random.choice(room)
murderer = people[room.find(murder_location)]
current_config_people = list(people)
random.shuffle(current_config_people)
current_location = random.choice(room)
print( current_config_people)
print( current_location)
|
Add random people and rooms
|
Add random people and rooms
|
Python
|
mit
|
tomviner/dojo-adventure-game
|
+ import random
people = '123456'
room = 'abcdef'
# murder configuration
# who was where
# who is the murderer
# current configuration
# who was where
# player location
+ murder_config_people = list(people)
+ random.shuffle(murder_config_people)
+ murder_location = random.choice(room)
+ murderer = people[room.find(murder_location)]
+
+ current_config_people = list(people)
+ random.shuffle(current_config_people)
+ current_location = random.choice(room)
+
+ print( current_config_people)
+ print( current_location)
+
|
Add random people and rooms
|
## Code Before:
people = '123456'
room = 'abcdef'
# murder configuration
# who was where
# who is the murderer
# current configuration
# who was where
# player location
## Instruction:
Add random people and rooms
## Code After:
import random
people = '123456'
room = 'abcdef'
# murder configuration
# who was where
# who is the murderer
# current configuration
# who was where
# player location
murder_config_people = list(people)
random.shuffle(murder_config_people)
murder_location = random.choice(room)
murderer = people[room.find(murder_location)]
current_config_people = list(people)
random.shuffle(current_config_people)
current_location = random.choice(room)
print( current_config_people)
print( current_location)
|
+ import random
people = '123456'
room = 'abcdef'
# murder configuration
# who was where
# who is the murderer
# current configuration
# who was where
# player location
+ murder_config_people = list(people)
+ random.shuffle(murder_config_people)
+ murder_location = random.choice(room)
+ murderer = people[room.find(murder_location)]
+
+
+ current_config_people = list(people)
+ random.shuffle(current_config_people)
+ current_location = random.choice(room)
+
+ print( current_config_people)
+ print( current_location)
|
a3811c7ba8ac59853002e392d29ab4b3800bf096
|
src/test/testlexer.py
|
src/test/testlexer.py
|
from cStringIO import StringIO
from nose.tools import *
from parse import EeyoreLexer
def _lex( string ):
return list( EeyoreLexer.Lexer( StringIO( string ) ) )
def _assert_token( token, text, tp, line = None, col = None ):
assert_equal( token.getText(), text )
assert_equal( token.getType(), tp )
if line is not None:
assert_equal( token.getLine(), line )
if col is not None:
assert_equal( token.getColumn(), col )
def test_hello_world():
tokens = _lex( """print( "Hello, world!" )""" )
_assert_token( tokens[0], "print", EeyoreLexer.SYMBOL, 1, 1 )
_assert_token( tokens[1], "(", EeyoreLexer.LPAREN, 1, 6 )
_assert_token( tokens[2], "Hello, world!", EeyoreLexer.STRING, 1, 8 )
_assert_token( tokens[3], ")", EeyoreLexer.RPAREN, 1, 24 )
assert_equal( len( tokens ), 4 )
|
from cStringIO import StringIO
from nose.tools import *
from parse import EeyoreLexer
def _lex( string ):
return list( EeyoreLexer.Lexer( StringIO( string ) ) )
def _assert_token( token, text, tp, line = None, col = None ):
assert_equal( token.getText(), text )
assert_equal( token.getType(), tp )
if line is not None:
assert_equal( token.getLine(), line )
if col is not None:
assert_equal( token.getColumn(), col )
def test_hello_world():
tokens = _lex( """print( "Hello, world!" )""" )
_assert_token( tokens[0], "print", EeyoreLexer.SYMBOL, 1, 1 )
_assert_token( tokens[1], "(", EeyoreLexer.LPAREN, 1, 6 )
_assert_token( tokens[2], "Hello, world!", EeyoreLexer.STRING, 1, 8 )
_assert_token( tokens[3], ")", EeyoreLexer.RPAREN, 1, 24 )
assert_equal( len( tokens ), 4 )
def test_import():
tokens = _lex( """
import a
print()
""" )
_assert_token( tokens[0], "import", EeyoreLexer.SYMBOL, 2, 1 )
_assert_token( tokens[1], "a", EeyoreLexer.SYMBOL, 2, 8 )
_assert_token( tokens[2], "print", EeyoreLexer.SYMBOL, 4, 1 )
_assert_token( tokens[3], "(", EeyoreLexer.LPAREN, 4, 6 )
_assert_token( tokens[4], ")", EeyoreLexer.RPAREN, 4, 7 )
assert_equal( len( tokens ), 5 )
|
Add a test for lexing an import statment.
|
Add a test for lexing an import statment.
|
Python
|
mit
|
andybalaam/pepper,andybalaam/pepper,andybalaam/pepper,andybalaam/pepper,andybalaam/pepper
|
from cStringIO import StringIO
from nose.tools import *
from parse import EeyoreLexer
def _lex( string ):
return list( EeyoreLexer.Lexer( StringIO( string ) ) )
def _assert_token( token, text, tp, line = None, col = None ):
assert_equal( token.getText(), text )
assert_equal( token.getType(), tp )
if line is not None:
assert_equal( token.getLine(), line )
if col is not None:
assert_equal( token.getColumn(), col )
def test_hello_world():
tokens = _lex( """print( "Hello, world!" )""" )
_assert_token( tokens[0], "print", EeyoreLexer.SYMBOL, 1, 1 )
_assert_token( tokens[1], "(", EeyoreLexer.LPAREN, 1, 6 )
_assert_token( tokens[2], "Hello, world!", EeyoreLexer.STRING, 1, 8 )
_assert_token( tokens[3], ")", EeyoreLexer.RPAREN, 1, 24 )
assert_equal( len( tokens ), 4 )
+ def test_import():
+ tokens = _lex( """
+ import a
+
+ print()
+ """ )
+
+ _assert_token( tokens[0], "import", EeyoreLexer.SYMBOL, 2, 1 )
+ _assert_token( tokens[1], "a", EeyoreLexer.SYMBOL, 2, 8 )
+ _assert_token( tokens[2], "print", EeyoreLexer.SYMBOL, 4, 1 )
+ _assert_token( tokens[3], "(", EeyoreLexer.LPAREN, 4, 6 )
+ _assert_token( tokens[4], ")", EeyoreLexer.RPAREN, 4, 7 )
+
+ assert_equal( len( tokens ), 5 )
+
+
+
|
Add a test for lexing an import statment.
|
## Code Before:
from cStringIO import StringIO
from nose.tools import *
from parse import EeyoreLexer
def _lex( string ):
return list( EeyoreLexer.Lexer( StringIO( string ) ) )
def _assert_token( token, text, tp, line = None, col = None ):
assert_equal( token.getText(), text )
assert_equal( token.getType(), tp )
if line is not None:
assert_equal( token.getLine(), line )
if col is not None:
assert_equal( token.getColumn(), col )
def test_hello_world():
tokens = _lex( """print( "Hello, world!" )""" )
_assert_token( tokens[0], "print", EeyoreLexer.SYMBOL, 1, 1 )
_assert_token( tokens[1], "(", EeyoreLexer.LPAREN, 1, 6 )
_assert_token( tokens[2], "Hello, world!", EeyoreLexer.STRING, 1, 8 )
_assert_token( tokens[3], ")", EeyoreLexer.RPAREN, 1, 24 )
assert_equal( len( tokens ), 4 )
## Instruction:
Add a test for lexing an import statment.
## Code After:
from cStringIO import StringIO
from nose.tools import *
from parse import EeyoreLexer
def _lex( string ):
return list( EeyoreLexer.Lexer( StringIO( string ) ) )
def _assert_token( token, text, tp, line = None, col = None ):
assert_equal( token.getText(), text )
assert_equal( token.getType(), tp )
if line is not None:
assert_equal( token.getLine(), line )
if col is not None:
assert_equal( token.getColumn(), col )
def test_hello_world():
tokens = _lex( """print( "Hello, world!" )""" )
_assert_token( tokens[0], "print", EeyoreLexer.SYMBOL, 1, 1 )
_assert_token( tokens[1], "(", EeyoreLexer.LPAREN, 1, 6 )
_assert_token( tokens[2], "Hello, world!", EeyoreLexer.STRING, 1, 8 )
_assert_token( tokens[3], ")", EeyoreLexer.RPAREN, 1, 24 )
assert_equal( len( tokens ), 4 )
def test_import():
tokens = _lex( """
import a
print()
""" )
_assert_token( tokens[0], "import", EeyoreLexer.SYMBOL, 2, 1 )
_assert_token( tokens[1], "a", EeyoreLexer.SYMBOL, 2, 8 )
_assert_token( tokens[2], "print", EeyoreLexer.SYMBOL, 4, 1 )
_assert_token( tokens[3], "(", EeyoreLexer.LPAREN, 4, 6 )
_assert_token( tokens[4], ")", EeyoreLexer.RPAREN, 4, 7 )
assert_equal( len( tokens ), 5 )
|
from cStringIO import StringIO
from nose.tools import *
from parse import EeyoreLexer
def _lex( string ):
return list( EeyoreLexer.Lexer( StringIO( string ) ) )
def _assert_token( token, text, tp, line = None, col = None ):
assert_equal( token.getText(), text )
assert_equal( token.getType(), tp )
if line is not None:
assert_equal( token.getLine(), line )
if col is not None:
assert_equal( token.getColumn(), col )
def test_hello_world():
tokens = _lex( """print( "Hello, world!" )""" )
_assert_token( tokens[0], "print", EeyoreLexer.SYMBOL, 1, 1 )
_assert_token( tokens[1], "(", EeyoreLexer.LPAREN, 1, 6 )
_assert_token( tokens[2], "Hello, world!", EeyoreLexer.STRING, 1, 8 )
_assert_token( tokens[3], ")", EeyoreLexer.RPAREN, 1, 24 )
assert_equal( len( tokens ), 4 )
+
+ def test_import():
+ tokens = _lex( """
+ import a
+
+ print()
+ """ )
+
+ _assert_token( tokens[0], "import", EeyoreLexer.SYMBOL, 2, 1 )
+ _assert_token( tokens[1], "a", EeyoreLexer.SYMBOL, 2, 8 )
+ _assert_token( tokens[2], "print", EeyoreLexer.SYMBOL, 4, 1 )
+ _assert_token( tokens[3], "(", EeyoreLexer.LPAREN, 4, 6 )
+ _assert_token( tokens[4], ")", EeyoreLexer.RPAREN, 4, 7 )
+
+ assert_equal( len( tokens ), 5 )
+
+
|
3f2d27f63c1cfe2cc4616a4314420fa23daca487
|
django_lightweight_queue/task.py
|
django_lightweight_queue/task.py
|
from .job import Job
from .utils import get_backend
from . import app_settings
class task(object):
def __init__(self, queue='default', timeout=None, sigkill_on_stop=False):
self.queue = queue
self.timeout = timeout
self.sigkill_on_stop = sigkill_on_stop
app_settings.WORKERS.setdefault(self.queue, 1)
def __call__(self, fn):
return TaskWrapper(fn, self.queue, self.timeout, self.sigkill_on_stop)
class TaskWrapper(object):
def __init__(self, fn, queue, timeout, sigkill_on_stop):
self.fn = fn
self.queue = queue
self.timeout = timeout
self.sigkill_on_stop = sigkill_on_stop
self.path = '%s.%s' % (fn.__module__, fn.__name__)
def __repr__(self):
return "<TaskWrapper: %s>" % self.path
def __call__(self, *args, **kwargs):
# Allow us to override which queue at the last moment
queue = kwargs.pop('django_lightweight_queue_queue', self.queue)
job = Job(self.path, args, kwargs, self.timeout, self.sigkill_on_stop)
job.validate()
get_backend().enqueue(job, queue)
|
from .job import Job
from .utils import get_backend
from . import app_settings
class task(object):
def __init__(self, queue='default', timeout=None, sigkill_on_stop=False):
self.queue = queue
self.timeout = timeout
self.sigkill_on_stop = sigkill_on_stop
app_settings.WORKERS.setdefault(self.queue, 1)
def __call__(self, fn):
return TaskWrapper(fn, self.queue, self.timeout, self.sigkill_on_stop)
class TaskWrapper(object):
def __init__(self, fn, queue, timeout, sigkill_on_stop):
self.fn = fn
self.queue = queue
self.timeout = timeout
self.sigkill_on_stop = sigkill_on_stop
self.path = '%s.%s' % (fn.__module__, fn.__name__)
def __repr__(self):
return "<TaskWrapper: %s>" % self.path
def __call__(self, *args, **kwargs):
# Allow us to override the default values dynamically
queue = kwargs.pop('django_lightweight_queue_queue', self.queue)
timeout = kwargs.pop('django_lightweight_queue_timeout', self.timeout)
sigkill_on_stop = kwargs.pop(
'django_lightweight_queue_sigkill_on_stop',
self.sigkill_on_stop,
)
job = Job(self.path, args, kwargs, timeout, sigkill_on_stop)
job.validate()
get_backend().enqueue(job, queue)
|
Allow overriding timeout and sigkill_on_stop too.
|
Allow overriding timeout and sigkill_on_stop too.
Signed-off-by: Chris Lamb <[email protected]>
|
Python
|
bsd-3-clause
|
thread/django-lightweight-queue,prophile/django-lightweight-queue,prophile/django-lightweight-queue,thread/django-lightweight-queue,lamby/django-lightweight-queue
|
from .job import Job
from .utils import get_backend
from . import app_settings
class task(object):
def __init__(self, queue='default', timeout=None, sigkill_on_stop=False):
self.queue = queue
self.timeout = timeout
self.sigkill_on_stop = sigkill_on_stop
app_settings.WORKERS.setdefault(self.queue, 1)
def __call__(self, fn):
return TaskWrapper(fn, self.queue, self.timeout, self.sigkill_on_stop)
class TaskWrapper(object):
def __init__(self, fn, queue, timeout, sigkill_on_stop):
self.fn = fn
self.queue = queue
self.timeout = timeout
self.sigkill_on_stop = sigkill_on_stop
self.path = '%s.%s' % (fn.__module__, fn.__name__)
def __repr__(self):
return "<TaskWrapper: %s>" % self.path
def __call__(self, *args, **kwargs):
- # Allow us to override which queue at the last moment
+ # Allow us to override the default values dynamically
queue = kwargs.pop('django_lightweight_queue_queue', self.queue)
+ timeout = kwargs.pop('django_lightweight_queue_timeout', self.timeout)
+ sigkill_on_stop = kwargs.pop(
+ 'django_lightweight_queue_sigkill_on_stop',
+ self.sigkill_on_stop,
+ )
- job = Job(self.path, args, kwargs, self.timeout, self.sigkill_on_stop)
+ job = Job(self.path, args, kwargs, timeout, sigkill_on_stop)
job.validate()
get_backend().enqueue(job, queue)
|
Allow overriding timeout and sigkill_on_stop too.
|
## Code Before:
from .job import Job
from .utils import get_backend
from . import app_settings
class task(object):
def __init__(self, queue='default', timeout=None, sigkill_on_stop=False):
self.queue = queue
self.timeout = timeout
self.sigkill_on_stop = sigkill_on_stop
app_settings.WORKERS.setdefault(self.queue, 1)
def __call__(self, fn):
return TaskWrapper(fn, self.queue, self.timeout, self.sigkill_on_stop)
class TaskWrapper(object):
def __init__(self, fn, queue, timeout, sigkill_on_stop):
self.fn = fn
self.queue = queue
self.timeout = timeout
self.sigkill_on_stop = sigkill_on_stop
self.path = '%s.%s' % (fn.__module__, fn.__name__)
def __repr__(self):
return "<TaskWrapper: %s>" % self.path
def __call__(self, *args, **kwargs):
# Allow us to override which queue at the last moment
queue = kwargs.pop('django_lightweight_queue_queue', self.queue)
job = Job(self.path, args, kwargs, self.timeout, self.sigkill_on_stop)
job.validate()
get_backend().enqueue(job, queue)
## Instruction:
Allow overriding timeout and sigkill_on_stop too.
## Code After:
from .job import Job
from .utils import get_backend
from . import app_settings
class task(object):
def __init__(self, queue='default', timeout=None, sigkill_on_stop=False):
self.queue = queue
self.timeout = timeout
self.sigkill_on_stop = sigkill_on_stop
app_settings.WORKERS.setdefault(self.queue, 1)
def __call__(self, fn):
return TaskWrapper(fn, self.queue, self.timeout, self.sigkill_on_stop)
class TaskWrapper(object):
def __init__(self, fn, queue, timeout, sigkill_on_stop):
self.fn = fn
self.queue = queue
self.timeout = timeout
self.sigkill_on_stop = sigkill_on_stop
self.path = '%s.%s' % (fn.__module__, fn.__name__)
def __repr__(self):
return "<TaskWrapper: %s>" % self.path
def __call__(self, *args, **kwargs):
# Allow us to override the default values dynamically
queue = kwargs.pop('django_lightweight_queue_queue', self.queue)
timeout = kwargs.pop('django_lightweight_queue_timeout', self.timeout)
sigkill_on_stop = kwargs.pop(
'django_lightweight_queue_sigkill_on_stop',
self.sigkill_on_stop,
)
job = Job(self.path, args, kwargs, timeout, sigkill_on_stop)
job.validate()
get_backend().enqueue(job, queue)
|
from .job import Job
from .utils import get_backend
from . import app_settings
class task(object):
def __init__(self, queue='default', timeout=None, sigkill_on_stop=False):
self.queue = queue
self.timeout = timeout
self.sigkill_on_stop = sigkill_on_stop
app_settings.WORKERS.setdefault(self.queue, 1)
def __call__(self, fn):
return TaskWrapper(fn, self.queue, self.timeout, self.sigkill_on_stop)
class TaskWrapper(object):
def __init__(self, fn, queue, timeout, sigkill_on_stop):
self.fn = fn
self.queue = queue
self.timeout = timeout
self.sigkill_on_stop = sigkill_on_stop
self.path = '%s.%s' % (fn.__module__, fn.__name__)
def __repr__(self):
return "<TaskWrapper: %s>" % self.path
def __call__(self, *args, **kwargs):
- # Allow us to override which queue at the last moment
+ # Allow us to override the default values dynamically
queue = kwargs.pop('django_lightweight_queue_queue', self.queue)
+ timeout = kwargs.pop('django_lightweight_queue_timeout', self.timeout)
+ sigkill_on_stop = kwargs.pop(
+ 'django_lightweight_queue_sigkill_on_stop',
+ self.sigkill_on_stop,
+ )
- job = Job(self.path, args, kwargs, self.timeout, self.sigkill_on_stop)
? ----- -----
+ job = Job(self.path, args, kwargs, timeout, sigkill_on_stop)
job.validate()
get_backend().enqueue(job, queue)
|
723a102d6272e7ba4b9df405b7c1493c34ac5b77
|
masters/master.chromium.fyi/master_site_config.py
|
masters/master.chromium.fyi/master_site_config.py
|
"""ActiveMaster definition."""
from config_bootstrap import Master
class ChromiumFYI(Master.Master1):
project_name = 'Chromium FYI'
master_port = 8011
slave_port = 8111
master_port_alt = 8211
buildbot_url = 'http://build.chromium.org/p/chromium.fyi/'
reboot_on_step_timeout = True
pubsub_service_account_file = 'service-account-pubsub.json'
pubsub_topic_url = 'projects/luci-milo/topics/public-buildbot'
name = 'chromium.fyi'
|
"""ActiveMaster definition."""
from config_bootstrap import Master
class ChromiumFYI(Master.Master1):
project_name = 'Chromium FYI'
master_port = 8011
slave_port = 8111
master_port_alt = 8211
buildbot_url = 'http://build.chromium.org/p/chromium.fyi/'
reboot_on_step_timeout = True
|
Revert pubsub roll on FYI
|
Revert pubsub roll on FYI
BUG=
TBR=estaab
Review URL: https://codereview.chromium.org/1688503002
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@298680 0039d316-1c4b-4281-b951-d872f2087c98
|
Python
|
bsd-3-clause
|
eunchong/build,eunchong/build,eunchong/build,eunchong/build
|
"""ActiveMaster definition."""
from config_bootstrap import Master
class ChromiumFYI(Master.Master1):
project_name = 'Chromium FYI'
master_port = 8011
slave_port = 8111
master_port_alt = 8211
buildbot_url = 'http://build.chromium.org/p/chromium.fyi/'
reboot_on_step_timeout = True
- pubsub_service_account_file = 'service-account-pubsub.json'
- pubsub_topic_url = 'projects/luci-milo/topics/public-buildbot'
- name = 'chromium.fyi'
|
Revert pubsub roll on FYI
|
## Code Before:
"""ActiveMaster definition."""
from config_bootstrap import Master
class ChromiumFYI(Master.Master1):
project_name = 'Chromium FYI'
master_port = 8011
slave_port = 8111
master_port_alt = 8211
buildbot_url = 'http://build.chromium.org/p/chromium.fyi/'
reboot_on_step_timeout = True
pubsub_service_account_file = 'service-account-pubsub.json'
pubsub_topic_url = 'projects/luci-milo/topics/public-buildbot'
name = 'chromium.fyi'
## Instruction:
Revert pubsub roll on FYI
## Code After:
"""ActiveMaster definition."""
from config_bootstrap import Master
class ChromiumFYI(Master.Master1):
project_name = 'Chromium FYI'
master_port = 8011
slave_port = 8111
master_port_alt = 8211
buildbot_url = 'http://build.chromium.org/p/chromium.fyi/'
reboot_on_step_timeout = True
|
"""ActiveMaster definition."""
from config_bootstrap import Master
class ChromiumFYI(Master.Master1):
project_name = 'Chromium FYI'
master_port = 8011
slave_port = 8111
master_port_alt = 8211
buildbot_url = 'http://build.chromium.org/p/chromium.fyi/'
reboot_on_step_timeout = True
- pubsub_service_account_file = 'service-account-pubsub.json'
- pubsub_topic_url = 'projects/luci-milo/topics/public-buildbot'
- name = 'chromium.fyi'
|
04e243aafbd08008556d83d73fbbf22e5398aab4
|
telostats/stations/models.py
|
telostats/stations/models.py
|
from django.db import models
from django.utils import timezone
class Station(models.Model):
id = models.IntegerField(unique=True, primary_key=True)
name = models.CharField(u'name', max_length=100)
longitude = models.FloatField(u'longitude')
latitude = models.FloatField(u'latitude')
class Status(models.Model):
station = models.ForeignKey(Station)
timestamp = models.DateTimeField(default=timezone.now)
actual_timestamp = models.DateTimeField(default=timezone.now)
bikes = models.IntegerField(u'available bikes')
docks = models.IntegerField(u'available docks')
|
from django.db import models
from django.utils import timezone
class Station(models.Model):
id = models.IntegerField(unique=True, primary_key=True)
name = models.CharField(u'name', max_length=100)
longitude = models.FloatField(u'longitude')
latitude = models.FloatField(u'latitude')
def __unicode__(self):
return self.name
class Status(models.Model):
station = models.ForeignKey(Station)
timestamp = models.DateTimeField(default=timezone.now)
actual_timestamp = models.DateTimeField(default=timezone.now)
bikes = models.IntegerField(u'available bikes')
docks = models.IntegerField(u'available docks')
def __unicode__(self):
return u'{}: {}/{} ({})'.format(
self.station,
self.bikes, self.docks,
self.timestamp)
|
Add unicode methods to Station/Status
|
Add unicode methods to Station/Status
|
Python
|
bsd-3-clause
|
idan/telostats,idan/telostats,idan/telostats
|
from django.db import models
from django.utils import timezone
class Station(models.Model):
id = models.IntegerField(unique=True, primary_key=True)
name = models.CharField(u'name', max_length=100)
longitude = models.FloatField(u'longitude')
latitude = models.FloatField(u'latitude')
+ def __unicode__(self):
+ return self.name
+
class Status(models.Model):
station = models.ForeignKey(Station)
timestamp = models.DateTimeField(default=timezone.now)
actual_timestamp = models.DateTimeField(default=timezone.now)
bikes = models.IntegerField(u'available bikes')
docks = models.IntegerField(u'available docks')
+ def __unicode__(self):
+ return u'{}: {}/{} ({})'.format(
+ self.station,
+ self.bikes, self.docks,
+ self.timestamp)
+
|
Add unicode methods to Station/Status
|
## Code Before:
from django.db import models
from django.utils import timezone
class Station(models.Model):
id = models.IntegerField(unique=True, primary_key=True)
name = models.CharField(u'name', max_length=100)
longitude = models.FloatField(u'longitude')
latitude = models.FloatField(u'latitude')
class Status(models.Model):
station = models.ForeignKey(Station)
timestamp = models.DateTimeField(default=timezone.now)
actual_timestamp = models.DateTimeField(default=timezone.now)
bikes = models.IntegerField(u'available bikes')
docks = models.IntegerField(u'available docks')
## Instruction:
Add unicode methods to Station/Status
## Code After:
from django.db import models
from django.utils import timezone
class Station(models.Model):
id = models.IntegerField(unique=True, primary_key=True)
name = models.CharField(u'name', max_length=100)
longitude = models.FloatField(u'longitude')
latitude = models.FloatField(u'latitude')
def __unicode__(self):
return self.name
class Status(models.Model):
station = models.ForeignKey(Station)
timestamp = models.DateTimeField(default=timezone.now)
actual_timestamp = models.DateTimeField(default=timezone.now)
bikes = models.IntegerField(u'available bikes')
docks = models.IntegerField(u'available docks')
def __unicode__(self):
return u'{}: {}/{} ({})'.format(
self.station,
self.bikes, self.docks,
self.timestamp)
|
from django.db import models
from django.utils import timezone
class Station(models.Model):
id = models.IntegerField(unique=True, primary_key=True)
name = models.CharField(u'name', max_length=100)
longitude = models.FloatField(u'longitude')
latitude = models.FloatField(u'latitude')
+ def __unicode__(self):
+ return self.name
+
class Status(models.Model):
station = models.ForeignKey(Station)
timestamp = models.DateTimeField(default=timezone.now)
actual_timestamp = models.DateTimeField(default=timezone.now)
bikes = models.IntegerField(u'available bikes')
docks = models.IntegerField(u'available docks')
+
+ def __unicode__(self):
+ return u'{}: {}/{} ({})'.format(
+ self.station,
+ self.bikes, self.docks,
+ self.timestamp)
|
f915000cf88a80beadc725ab10e48d2b14d1be23
|
enlighten/counter.py
|
enlighten/counter.py
|
import sys
from enlighten._counter import Counter as _Counter
from enlighten._counter import SubCounter # pylint: disable=unused-import # noqa: F401
from enlighten._manager import Manager
# Counter is defined here to avoid circular dependencies
class Counter(_Counter): # pylint: disable=missing-docstring
__doc__ = _Counter.__doc__
def __init__(self, **kwargs):
manager = kwargs.get('manager', None)
if manager is None:
manager = Manager(stream=kwargs.get('stream', sys.stdout),
counter_class=self.__class__, set_scroll=False)
manager.counters[self] = 1
kwargs['manager'] = manager
super(Counter, self).__init__(**kwargs)
|
import sys
from enlighten._counter import Counter as _Counter
from enlighten._counter import SubCounter # pylint: disable=unused-import # noqa: F401
from enlighten._manager import get_manager
# Counter is defined here to avoid circular dependencies
class Counter(_Counter): # pylint: disable=missing-docstring
__doc__ = _Counter.__doc__
def __init__(self, **kwargs):
manager = kwargs.get('manager', None)
if manager is None:
manager = get_manager(stream=kwargs.get('stream', sys.stdout),
counter_class=self.__class__, set_scroll=False)
manager.counters[self] = 1
kwargs['manager'] = manager
super(Counter, self).__init__(**kwargs)
|
Use get_manager() for Counter direct
|
Use get_manager() for Counter direct
|
Python
|
mpl-2.0
|
Rockhopper-Technologies/enlighten
|
import sys
from enlighten._counter import Counter as _Counter
from enlighten._counter import SubCounter # pylint: disable=unused-import # noqa: F401
- from enlighten._manager import Manager
+ from enlighten._manager import get_manager
# Counter is defined here to avoid circular dependencies
class Counter(_Counter): # pylint: disable=missing-docstring
__doc__ = _Counter.__doc__
def __init__(self, **kwargs):
manager = kwargs.get('manager', None)
if manager is None:
- manager = Manager(stream=kwargs.get('stream', sys.stdout),
+ manager = get_manager(stream=kwargs.get('stream', sys.stdout),
- counter_class=self.__class__, set_scroll=False)
+ counter_class=self.__class__, set_scroll=False)
manager.counters[self] = 1
kwargs['manager'] = manager
super(Counter, self).__init__(**kwargs)
|
Use get_manager() for Counter direct
|
## Code Before:
import sys
from enlighten._counter import Counter as _Counter
from enlighten._counter import SubCounter # pylint: disable=unused-import # noqa: F401
from enlighten._manager import Manager
# Counter is defined here to avoid circular dependencies
class Counter(_Counter): # pylint: disable=missing-docstring
__doc__ = _Counter.__doc__
def __init__(self, **kwargs):
manager = kwargs.get('manager', None)
if manager is None:
manager = Manager(stream=kwargs.get('stream', sys.stdout),
counter_class=self.__class__, set_scroll=False)
manager.counters[self] = 1
kwargs['manager'] = manager
super(Counter, self).__init__(**kwargs)
## Instruction:
Use get_manager() for Counter direct
## Code After:
import sys
from enlighten._counter import Counter as _Counter
from enlighten._counter import SubCounter # pylint: disable=unused-import # noqa: F401
from enlighten._manager import get_manager
# Counter is defined here to avoid circular dependencies
class Counter(_Counter): # pylint: disable=missing-docstring
__doc__ = _Counter.__doc__
def __init__(self, **kwargs):
manager = kwargs.get('manager', None)
if manager is None:
manager = get_manager(stream=kwargs.get('stream', sys.stdout),
counter_class=self.__class__, set_scroll=False)
manager.counters[self] = 1
kwargs['manager'] = manager
super(Counter, self).__init__(**kwargs)
|
import sys
from enlighten._counter import Counter as _Counter
from enlighten._counter import SubCounter # pylint: disable=unused-import # noqa: F401
- from enlighten._manager import Manager
? ^
+ from enlighten._manager import get_manager
? ^^^^^
# Counter is defined here to avoid circular dependencies
class Counter(_Counter): # pylint: disable=missing-docstring
__doc__ = _Counter.__doc__
def __init__(self, **kwargs):
manager = kwargs.get('manager', None)
if manager is None:
- manager = Manager(stream=kwargs.get('stream', sys.stdout),
? ^
+ manager = get_manager(stream=kwargs.get('stream', sys.stdout),
? ^^^^^
- counter_class=self.__class__, set_scroll=False)
+ counter_class=self.__class__, set_scroll=False)
? ++++
manager.counters[self] = 1
kwargs['manager'] = manager
super(Counter, self).__init__(**kwargs)
|
4e9c0cb3cd0d74ce008f0279bc6e9ec353c03fee
|
senlin_dashboard/api/utils.py
|
senlin_dashboard/api/utils.py
|
import itertools
def update_pagination(entities, request_size, page_size, marker,
sort_dir, sort_key, reversed_order):
has_prev_data = False
has_more_data = False
entities = list(itertools.islice(entities, request_size))
# first and middle page condition
if len(entities) > page_size:
entities.pop(-1)
has_more_data = True
# middle page condition
if marker is not None:
has_prev_data = True
# first page condition when reached via prev back
elif reversed_order and marker is not None:
has_more_data = True
# last page condition
elif marker is not None:
has_prev_data = True
# restore the original ordering here
if reversed_order:
entities = sorted(entities, key=lambda entity:
(getattr(entity, sort_key)),
reverse=(sort_dir == sort_dir))
return entities, has_more_data, has_prev_data
|
import itertools
def update_pagination(entities, request_size, page_size, marker,
sort_dir, sort_key, reversed_order):
has_prev_data = False
has_more_data = False
entities = list(itertools.islice(entities, request_size))
# first and middle page condition
if len(entities) > page_size:
entities.pop(-1)
has_more_data = True
# middle page condition
if marker is not None:
has_prev_data = True
# first page condition when reached via prev back
elif reversed_order and marker is not None:
has_more_data = True
# last page condition
elif marker is not None:
has_prev_data = True
# restore the original ordering here
if reversed_order:
entities.reverse()
return entities, has_more_data, has_prev_data
|
Use entities.reverse() rather sorted(.., reverse=True)
|
Use entities.reverse() rather sorted(.., reverse=True)
Change-Id: I33ee5b078e3d27a45bd159be0f0b241c20792f92
|
Python
|
apache-2.0
|
openstack/senlin-dashboard,stackforge/senlin-dashboard,stackforge/senlin-dashboard,openstack/senlin-dashboard,stackforge/senlin-dashboard,openstack/senlin-dashboard,openstack/senlin-dashboard
|
import itertools
def update_pagination(entities, request_size, page_size, marker,
sort_dir, sort_key, reversed_order):
has_prev_data = False
has_more_data = False
entities = list(itertools.islice(entities, request_size))
# first and middle page condition
if len(entities) > page_size:
entities.pop(-1)
has_more_data = True
# middle page condition
if marker is not None:
has_prev_data = True
# first page condition when reached via prev back
elif reversed_order and marker is not None:
has_more_data = True
# last page condition
elif marker is not None:
has_prev_data = True
# restore the original ordering here
if reversed_order:
+ entities.reverse()
- entities = sorted(entities, key=lambda entity:
- (getattr(entity, sort_key)),
- reverse=(sort_dir == sort_dir))
return entities, has_more_data, has_prev_data
|
Use entities.reverse() rather sorted(.., reverse=True)
|
## Code Before:
import itertools
def update_pagination(entities, request_size, page_size, marker,
sort_dir, sort_key, reversed_order):
has_prev_data = False
has_more_data = False
entities = list(itertools.islice(entities, request_size))
# first and middle page condition
if len(entities) > page_size:
entities.pop(-1)
has_more_data = True
# middle page condition
if marker is not None:
has_prev_data = True
# first page condition when reached via prev back
elif reversed_order and marker is not None:
has_more_data = True
# last page condition
elif marker is not None:
has_prev_data = True
# restore the original ordering here
if reversed_order:
entities = sorted(entities, key=lambda entity:
(getattr(entity, sort_key)),
reverse=(sort_dir == sort_dir))
return entities, has_more_data, has_prev_data
## Instruction:
Use entities.reverse() rather sorted(.., reverse=True)
## Code After:
import itertools
def update_pagination(entities, request_size, page_size, marker,
sort_dir, sort_key, reversed_order):
has_prev_data = False
has_more_data = False
entities = list(itertools.islice(entities, request_size))
# first and middle page condition
if len(entities) > page_size:
entities.pop(-1)
has_more_data = True
# middle page condition
if marker is not None:
has_prev_data = True
# first page condition when reached via prev back
elif reversed_order and marker is not None:
has_more_data = True
# last page condition
elif marker is not None:
has_prev_data = True
# restore the original ordering here
if reversed_order:
entities.reverse()
return entities, has_more_data, has_prev_data
|
import itertools
def update_pagination(entities, request_size, page_size, marker,
sort_dir, sort_key, reversed_order):
has_prev_data = False
has_more_data = False
entities = list(itertools.islice(entities, request_size))
# first and middle page condition
if len(entities) > page_size:
entities.pop(-1)
has_more_data = True
# middle page condition
if marker is not None:
has_prev_data = True
# first page condition when reached via prev back
elif reversed_order and marker is not None:
has_more_data = True
# last page condition
elif marker is not None:
has_prev_data = True
# restore the original ordering here
if reversed_order:
+ entities.reverse()
- entities = sorted(entities, key=lambda entity:
- (getattr(entity, sort_key)),
- reverse=(sort_dir == sort_dir))
return entities, has_more_data, has_prev_data
|
f22fa6d0c1b7e3bde95554f87af7254c2c381c41
|
django_app_lti/urls.py
|
django_app_lti/urls.py
|
from django.urls import path
from .views import LTILaunchView, LTIToolConfigView, logout_view, logged_out_view
urlpatterns = [
path('', LTILaunchView.as_view(), name='index'),
path('launch', LTILaunchView.as_view(), name='launch'),
path('config', LTIToolConfigView.as_view(), name='config'),
path('logout', logout_view, name="logout"),
path('logged-out', logged_out_view, name="logged-out"),
]
|
from django.urls import path
from .views import LTILaunchView, LTIToolConfigView, logout_view, logged_out_view
app_name = 'lti'
urlpatterns = [
path('', LTILaunchView.as_view(), name='index'),
path('launch', LTILaunchView.as_view(), name='launch'),
path('config', LTIToolConfigView.as_view(), name='config'),
path('logout', logout_view, name="logout"),
path('logged-out', logged_out_view, name="logged-out"),
]
|
Add app_name to url module
|
Add app_name to url module
|
Python
|
bsd-3-clause
|
Harvard-ATG/django-app-lti
|
from django.urls import path
from .views import LTILaunchView, LTIToolConfigView, logout_view, logged_out_view
+ app_name = 'lti'
urlpatterns = [
path('', LTILaunchView.as_view(), name='index'),
path('launch', LTILaunchView.as_view(), name='launch'),
path('config', LTIToolConfigView.as_view(), name='config'),
path('logout', logout_view, name="logout"),
path('logged-out', logged_out_view, name="logged-out"),
]
|
Add app_name to url module
|
## Code Before:
from django.urls import path
from .views import LTILaunchView, LTIToolConfigView, logout_view, logged_out_view
urlpatterns = [
path('', LTILaunchView.as_view(), name='index'),
path('launch', LTILaunchView.as_view(), name='launch'),
path('config', LTIToolConfigView.as_view(), name='config'),
path('logout', logout_view, name="logout"),
path('logged-out', logged_out_view, name="logged-out"),
]
## Instruction:
Add app_name to url module
## Code After:
from django.urls import path
from .views import LTILaunchView, LTIToolConfigView, logout_view, logged_out_view
app_name = 'lti'
urlpatterns = [
path('', LTILaunchView.as_view(), name='index'),
path('launch', LTILaunchView.as_view(), name='launch'),
path('config', LTIToolConfigView.as_view(), name='config'),
path('logout', logout_view, name="logout"),
path('logged-out', logged_out_view, name="logged-out"),
]
|
from django.urls import path
from .views import LTILaunchView, LTIToolConfigView, logout_view, logged_out_view
+ app_name = 'lti'
urlpatterns = [
path('', LTILaunchView.as_view(), name='index'),
path('launch', LTILaunchView.as_view(), name='launch'),
path('config', LTIToolConfigView.as_view(), name='config'),
path('logout', logout_view, name="logout"),
path('logged-out', logged_out_view, name="logged-out"),
]
|
fc636b5ae0db4190e0499c6bec58d40ad0d0afe4
|
cms/management/commands/subcommands/moderator.py
|
cms/management/commands/subcommands/moderator.py
|
from cms.management.commands.subcommands.base import SubcommandsCommand
from cms.models import CMSPlugin
from cms.models.pagemodel import Page
from django.core.management.base import NoArgsCommand
class ModeratorOnCommand(NoArgsCommand):
help = 'Turn moderation on, run AFTER upgrading to 2.4'
def handle_noargs(self, **options):
"""
Ensure that the public pages look the same as their draft versions.
This is done by checking the content of the public pages, and reverting
the draft version to look the same.
The second stage is to go through the draft pages and publish the ones
marked as published.
The end result should be that the public pages and their draft versions
have the same plugins listed. If both versions exist and have content,
the public page has precedence. Otherwise, the draft version is used.
"""
for page in Page.objects.public():
if CMSPlugin.objects.filter(placeholder__page=page).count():
page.revert()
for page in Page.objects.drafts().filter(published=True):
page.publish()
class ModeratorCommand(SubcommandsCommand):
help = 'Moderator utilities'
subcommands = {
'on': ModeratorOnCommand,
}
|
from cms.management.commands.subcommands.base import SubcommandsCommand
from cms.models import CMSPlugin
from cms.models.pagemodel import Page
from django.core.management.base import NoArgsCommand
class ModeratorOnCommand(NoArgsCommand):
help = 'Turn moderation on, run AFTER upgrading to 2.4'
def handle_noargs(self, **options):
"""
Ensure that the public pages look the same as their draft versions.
This is done by checking the content of the public pages, and reverting
the draft version to look the same.
The second stage is to go through the draft pages and publish the ones
marked as published.
The end result should be that the public pages and their draft versions
have the same plugins listed. If both versions exist and have content,
the public page has precedence. Otherwise, the draft version is used.
"""
for page in Page.objects.public():
if CMSPlugin.objects.filter(placeholder__page=page).count():
page.publisher_draft.revert()
for page in Page.objects.drafts().filter(published=True):
page.publish()
class ModeratorCommand(SubcommandsCommand):
help = 'Moderator utilities'
subcommands = {
'on': ModeratorOnCommand,
}
|
Apply revert to the draft page.
|
Apply revert to the draft page.
|
Python
|
bsd-3-clause
|
keimlink/django-cms,mkoistinen/django-cms,foobacca/django-cms,FinalAngel/django-cms,nimbis/django-cms,Jaccorot/django-cms,memnonila/django-cms,nostalgiaz/django-cms,jproffitt/django-cms,adaptivelogic/django-cms,Livefyre/django-cms,sznekol/django-cms,MagicSolutions/django-cms,benzkji/django-cms,iddqd1/django-cms,SinnerSchraderMobileMirrors/django-cms,isotoma/django-cms,vxsx/django-cms,SofiaReis/django-cms,intgr/django-cms,cyberintruder/django-cms,sznekol/django-cms,ScholzVolkmer/django-cms,wyg3958/django-cms,SmithsonianEnterprises/django-cms,dhorelik/django-cms,jrief/django-cms,petecummings/django-cms,timgraham/django-cms,jeffreylu9/django-cms,chmberl/django-cms,vad/django-cms,foobacca/django-cms,intgr/django-cms,ScholzVolkmer/django-cms,frnhr/django-cms,ojii/django-cms,rsalmaso/django-cms,intip/django-cms,ojii/django-cms,sephii/django-cms,benzkji/django-cms,josjevv/django-cms,bittner/django-cms,rsalmaso/django-cms,selecsosi/django-cms,jproffitt/django-cms,Vegasvikk/django-cms,youprofit/django-cms,frnhr/django-cms,irudayarajisawa/django-cms,kk9599/django-cms,foobacca/django-cms,takeshineshiro/django-cms,FinalAngel/django-cms,Jaccorot/django-cms,AlexProfi/django-cms,SachaMPS/django-cms,wuzhihui1123/django-cms,stefanfoulis/django-cms,jrclaramunt/django-cms,chkir/django-cms,vstoykov/django-cms,andyzsf/django-cms,SofiaReis/django-cms,jsma/django-cms,owers19856/django-cms,frnhr/django-cms,11craft/django-cms,benzkji/django-cms,Vegasvikk/django-cms,yakky/django-cms,nimbis/django-cms,stefanfoulis/django-cms,intgr/django-cms,rscnt/django-cms,pancentric/django-cms,iddqd1/django-cms,qnub/django-cms,robmagee/django-cms,astagi/django-cms,vxsx/django-cms,intip/django-cms,selecsosi/django-cms,jeffreylu9/django-cms,josjevv/django-cms,astagi/django-cms,DylannCordel/django-cms,rscnt/django-cms,360youlun/django-cms,mkoistinen/django-cms,intip/django-cms,Livefyre/django-cms,isotoma/django-cms,360youlun/django-cms,divio/django-cms,rryan/django-cms,memnonila/django-cms,liuyisiyisi/django-cms,SinnerSchraderMobileMirrors/django-cms,iddqd1/django-cms,datakortet/django-cms,sznekol/django-cms,frnhr/django-cms,jrief/django-cms,evildmp/django-cms,owers19856/django-cms,Livefyre/django-cms,evildmp/django-cms,isotoma/django-cms,robmagee/django-cms,SachaMPS/django-cms,leture/django-cms,czpython/django-cms,jproffitt/django-cms,pixbuffer/django-cms,youprofit/django-cms,yakky/django-cms,bittner/django-cms,czpython/django-cms,webu/django-cms,datakortet/django-cms,SachaMPS/django-cms,nostalgiaz/django-cms,astagi/django-cms,vad/django-cms,cyberintruder/django-cms,adaptivelogic/django-cms,jeffreylu9/django-cms,webu/django-cms,petecummings/django-cms,jrief/django-cms,360youlun/django-cms,rryan/django-cms,pancentric/django-cms,AlexProfi/django-cms,divio/django-cms,leture/django-cms,jproffitt/django-cms,stefanw/django-cms,qnub/django-cms,ScholzVolkmer/django-cms,adaptivelogic/django-cms,wuzhihui1123/django-cms,pancentric/django-cms,sephii/django-cms,timgraham/django-cms,datakortet/django-cms,dhorelik/django-cms,yakky/django-cms,donce/django-cms,keimlink/django-cms,andyzsf/django-cms,saintbird/django-cms,chmberl/django-cms,wuzhihui1123/django-cms,FinalAngel/django-cms,wyg3958/django-cms,farhaadila/django-cms,vstoykov/django-cms,wyg3958/django-cms,Livefyre/django-cms,leture/django-cms,youprofit/django-cms,stefanfoulis/django-cms,saintbird/django-cms,irudayarajisawa/django-cms,rsalmaso/django-cms,selecsosi/django-cms,evildmp/django-cms,stefanw/django-cms,netzkolchose/django-cms,wuzhihui1123/django-cms,sephii/django-cms,intip/django-cms,pixbuffer/django-cms,cyberintruder/django-cms,SmithsonianEnterprises/django-cms,benzkji/django-cms,stefanw/django-cms,chkir/django-cms,11craft/django-cms,jeffreylu9/django-cms,jsma/django-cms,vad/django-cms,stefanfoulis/django-cms,netzkolchose/django-cms,qnub/django-cms,rscnt/django-cms,divio/django-cms,MagicSolutions/django-cms,datakortet/django-cms,11craft/django-cms,chkir/django-cms,bittner/django-cms,webu/django-cms,czpython/django-cms,FinalAngel/django-cms,timgraham/django-cms,nostalgiaz/django-cms,11craft/django-cms,AlexProfi/django-cms,jsma/django-cms,czpython/django-cms,SmithsonianEnterprises/django-cms,chmberl/django-cms,philippze/django-cms,jrclaramunt/django-cms,robmagee/django-cms,vad/django-cms,rsalmaso/django-cms,netzkolchose/django-cms,jrief/django-cms,jsma/django-cms,nimbis/django-cms,kk9599/django-cms,takeshineshiro/django-cms,nimbis/django-cms,SinnerSchraderMobileMirrors/django-cms,pixbuffer/django-cms,intgr/django-cms,farhaadila/django-cms,vxsx/django-cms,andyzsf/django-cms,netzkolchose/django-cms,liuyisiyisi/django-cms,josjevv/django-cms,bittner/django-cms,saintbird/django-cms,owers19856/django-cms,donce/django-cms,Jaccorot/django-cms,memnonila/django-cms,sephii/django-cms,nostalgiaz/django-cms,philippze/django-cms,yakky/django-cms,stefanw/django-cms,farhaadila/django-cms,SofiaReis/django-cms,vxsx/django-cms,petecummings/django-cms,takeshineshiro/django-cms,jrclaramunt/django-cms,rryan/django-cms,MagicSolutions/django-cms,liuyisiyisi/django-cms,ojii/django-cms,mkoistinen/django-cms,isotoma/django-cms,mkoistinen/django-cms,andyzsf/django-cms,keimlink/django-cms,rryan/django-cms,donce/django-cms,divio/django-cms,dhorelik/django-cms,irudayarajisawa/django-cms,DylannCordel/django-cms,kk9599/django-cms,foobacca/django-cms,vstoykov/django-cms,evildmp/django-cms,philippze/django-cms,Vegasvikk/django-cms,selecsosi/django-cms,DylannCordel/django-cms
|
from cms.management.commands.subcommands.base import SubcommandsCommand
from cms.models import CMSPlugin
from cms.models.pagemodel import Page
from django.core.management.base import NoArgsCommand
class ModeratorOnCommand(NoArgsCommand):
help = 'Turn moderation on, run AFTER upgrading to 2.4'
def handle_noargs(self, **options):
"""
Ensure that the public pages look the same as their draft versions.
This is done by checking the content of the public pages, and reverting
the draft version to look the same.
The second stage is to go through the draft pages and publish the ones
marked as published.
The end result should be that the public pages and their draft versions
have the same plugins listed. If both versions exist and have content,
the public page has precedence. Otherwise, the draft version is used.
"""
for page in Page.objects.public():
if CMSPlugin.objects.filter(placeholder__page=page).count():
- page.revert()
+ page.publisher_draft.revert()
for page in Page.objects.drafts().filter(published=True):
page.publish()
class ModeratorCommand(SubcommandsCommand):
help = 'Moderator utilities'
subcommands = {
'on': ModeratorOnCommand,
}
|
Apply revert to the draft page.
|
## Code Before:
from cms.management.commands.subcommands.base import SubcommandsCommand
from cms.models import CMSPlugin
from cms.models.pagemodel import Page
from django.core.management.base import NoArgsCommand
class ModeratorOnCommand(NoArgsCommand):
help = 'Turn moderation on, run AFTER upgrading to 2.4'
def handle_noargs(self, **options):
"""
Ensure that the public pages look the same as their draft versions.
This is done by checking the content of the public pages, and reverting
the draft version to look the same.
The second stage is to go through the draft pages and publish the ones
marked as published.
The end result should be that the public pages and their draft versions
have the same plugins listed. If both versions exist and have content,
the public page has precedence. Otherwise, the draft version is used.
"""
for page in Page.objects.public():
if CMSPlugin.objects.filter(placeholder__page=page).count():
page.revert()
for page in Page.objects.drafts().filter(published=True):
page.publish()
class ModeratorCommand(SubcommandsCommand):
help = 'Moderator utilities'
subcommands = {
'on': ModeratorOnCommand,
}
## Instruction:
Apply revert to the draft page.
## Code After:
from cms.management.commands.subcommands.base import SubcommandsCommand
from cms.models import CMSPlugin
from cms.models.pagemodel import Page
from django.core.management.base import NoArgsCommand
class ModeratorOnCommand(NoArgsCommand):
help = 'Turn moderation on, run AFTER upgrading to 2.4'
def handle_noargs(self, **options):
"""
Ensure that the public pages look the same as their draft versions.
This is done by checking the content of the public pages, and reverting
the draft version to look the same.
The second stage is to go through the draft pages and publish the ones
marked as published.
The end result should be that the public pages and their draft versions
have the same plugins listed. If both versions exist and have content,
the public page has precedence. Otherwise, the draft version is used.
"""
for page in Page.objects.public():
if CMSPlugin.objects.filter(placeholder__page=page).count():
page.publisher_draft.revert()
for page in Page.objects.drafts().filter(published=True):
page.publish()
class ModeratorCommand(SubcommandsCommand):
help = 'Moderator utilities'
subcommands = {
'on': ModeratorOnCommand,
}
|
from cms.management.commands.subcommands.base import SubcommandsCommand
from cms.models import CMSPlugin
from cms.models.pagemodel import Page
from django.core.management.base import NoArgsCommand
class ModeratorOnCommand(NoArgsCommand):
help = 'Turn moderation on, run AFTER upgrading to 2.4'
def handle_noargs(self, **options):
"""
Ensure that the public pages look the same as their draft versions.
This is done by checking the content of the public pages, and reverting
the draft version to look the same.
The second stage is to go through the draft pages and publish the ones
marked as published.
The end result should be that the public pages and their draft versions
have the same plugins listed. If both versions exist and have content,
the public page has precedence. Otherwise, the draft version is used.
"""
for page in Page.objects.public():
if CMSPlugin.objects.filter(placeholder__page=page).count():
- page.revert()
+ page.publisher_draft.revert()
? ++++++++++++++++
for page in Page.objects.drafts().filter(published=True):
page.publish()
class ModeratorCommand(SubcommandsCommand):
help = 'Moderator utilities'
subcommands = {
'on': ModeratorOnCommand,
}
|
e252962f9a6cc1ed6cd2ccdd72c4151708be7233
|
tests/cases/resources/tests/preview.py
|
tests/cases/resources/tests/preview.py
|
import json
from django.test import TestCase
class PreviewResourceTestCase(TestCase):
def test_get(self):
response = self.client.get('/api/data/preview/',
HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertEqual(json.loads(response.content), {
'_links': {
'self': {
'href': 'http://testserver/api/data/preview/?limit=20&page=1',
},
'base': {
'href': 'http://testserver/api/data/preview/',
}
},
'keys': [],
'object_count': 0,
'object_name': 'employee',
'object_name_plural': 'employees',
'objects': [],
'page_num': 1,
'num_pages': 1,
'limit': 20,
})
|
import json
from django.contrib.auth.models import User
from django.test import TestCase
class PreviewResourceTestCase(TestCase):
def test_get(self):
response = self.client.get('/api/data/preview/',
HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertEqual(json.loads(response.content), {
'_links': {
'self': {
'href': 'http://testserver/api/data/preview/?limit=20&page=1',
},
'base': {
'href': 'http://testserver/api/data/preview/',
}
},
'keys': [],
'object_count': 0,
'object_name': 'employee',
'object_name_plural': 'employees',
'objects': [],
'page_num': 1,
'num_pages': 1,
'limit': 20,
})
def test_get_with_user(self):
self.user = User.objects.create_user(username='test', password='test')
self.client.login(username='test', password='test')
response = self.client.get('/api/data/preview/',
HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertEqual(json.loads(response.content), {
'_links': {
'self': {
'href': 'http://testserver/api/data/preview/?limit=20&page=1',
},
'base': {
'href': 'http://testserver/api/data/preview/',
}
},
'keys': [],
'object_count': 0,
'object_name': 'employee',
'object_name_plural': 'employees',
'objects': [],
'page_num': 1,
'num_pages': 1,
'limit': 20,
})
|
Add test to recreate error in this bug
|
Add test to recreate error in this bug
|
Python
|
bsd-2-clause
|
rv816/serrano_night,rv816/serrano_night,chop-dbhi/serrano,chop-dbhi/serrano
|
import json
+ from django.contrib.auth.models import User
from django.test import TestCase
class PreviewResourceTestCase(TestCase):
def test_get(self):
response = self.client.get('/api/data/preview/',
HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertEqual(json.loads(response.content), {
'_links': {
'self': {
'href': 'http://testserver/api/data/preview/?limit=20&page=1',
},
'base': {
'href': 'http://testserver/api/data/preview/',
}
},
'keys': [],
'object_count': 0,
'object_name': 'employee',
'object_name_plural': 'employees',
'objects': [],
'page_num': 1,
'num_pages': 1,
'limit': 20,
})
+ def test_get_with_user(self):
+ self.user = User.objects.create_user(username='test', password='test')
+ self.client.login(username='test', password='test')
+
+ response = self.client.get('/api/data/preview/',
+ HTTP_ACCEPT='application/json')
+ self.assertEqual(response.status_code, 200)
+ self.assertEqual(response['Content-Type'], 'application/json')
+ self.assertEqual(json.loads(response.content), {
+ '_links': {
+ 'self': {
+ 'href': 'http://testserver/api/data/preview/?limit=20&page=1',
+ },
+ 'base': {
+ 'href': 'http://testserver/api/data/preview/',
+ }
+ },
+ 'keys': [],
+ 'object_count': 0,
+ 'object_name': 'employee',
+ 'object_name_plural': 'employees',
+ 'objects': [],
+ 'page_num': 1,
+ 'num_pages': 1,
+ 'limit': 20,
+ })
+
|
Add test to recreate error in this bug
|
## Code Before:
import json
from django.test import TestCase
class PreviewResourceTestCase(TestCase):
def test_get(self):
response = self.client.get('/api/data/preview/',
HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertEqual(json.loads(response.content), {
'_links': {
'self': {
'href': 'http://testserver/api/data/preview/?limit=20&page=1',
},
'base': {
'href': 'http://testserver/api/data/preview/',
}
},
'keys': [],
'object_count': 0,
'object_name': 'employee',
'object_name_plural': 'employees',
'objects': [],
'page_num': 1,
'num_pages': 1,
'limit': 20,
})
## Instruction:
Add test to recreate error in this bug
## Code After:
import json
from django.contrib.auth.models import User
from django.test import TestCase
class PreviewResourceTestCase(TestCase):
def test_get(self):
response = self.client.get('/api/data/preview/',
HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertEqual(json.loads(response.content), {
'_links': {
'self': {
'href': 'http://testserver/api/data/preview/?limit=20&page=1',
},
'base': {
'href': 'http://testserver/api/data/preview/',
}
},
'keys': [],
'object_count': 0,
'object_name': 'employee',
'object_name_plural': 'employees',
'objects': [],
'page_num': 1,
'num_pages': 1,
'limit': 20,
})
def test_get_with_user(self):
self.user = User.objects.create_user(username='test', password='test')
self.client.login(username='test', password='test')
response = self.client.get('/api/data/preview/',
HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertEqual(json.loads(response.content), {
'_links': {
'self': {
'href': 'http://testserver/api/data/preview/?limit=20&page=1',
},
'base': {
'href': 'http://testserver/api/data/preview/',
}
},
'keys': [],
'object_count': 0,
'object_name': 'employee',
'object_name_plural': 'employees',
'objects': [],
'page_num': 1,
'num_pages': 1,
'limit': 20,
})
|
import json
+ from django.contrib.auth.models import User
from django.test import TestCase
class PreviewResourceTestCase(TestCase):
def test_get(self):
response = self.client.get('/api/data/preview/',
HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertEqual(json.loads(response.content), {
'_links': {
'self': {
'href': 'http://testserver/api/data/preview/?limit=20&page=1',
},
'base': {
'href': 'http://testserver/api/data/preview/',
}
},
'keys': [],
'object_count': 0,
'object_name': 'employee',
'object_name_plural': 'employees',
'objects': [],
'page_num': 1,
'num_pages': 1,
'limit': 20,
})
+
+ def test_get_with_user(self):
+ self.user = User.objects.create_user(username='test', password='test')
+ self.client.login(username='test', password='test')
+
+ response = self.client.get('/api/data/preview/',
+ HTTP_ACCEPT='application/json')
+ self.assertEqual(response.status_code, 200)
+ self.assertEqual(response['Content-Type'], 'application/json')
+ self.assertEqual(json.loads(response.content), {
+ '_links': {
+ 'self': {
+ 'href': 'http://testserver/api/data/preview/?limit=20&page=1',
+ },
+ 'base': {
+ 'href': 'http://testserver/api/data/preview/',
+ }
+ },
+ 'keys': [],
+ 'object_count': 0,
+ 'object_name': 'employee',
+ 'object_name_plural': 'employees',
+ 'objects': [],
+ 'page_num': 1,
+ 'num_pages': 1,
+ 'limit': 20,
+ })
|
5c5e49797358e7020d409adf74209c0647050465
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(name='fuzzywuzzy',
version='0.2',
description='Fuzzy string matching in python',
author='Adam Cohen',
author_email='[email protected]',
url='https://github.com/seatgeek/fuzzywuzzy/',
packages=['fuzzywuzzy'])
|
from distutils.core import setup
setup(name='fuzzywuzzy',
version='0.2',
description='Fuzzy string matching in python',
author='Adam Cohen',
author_email='[email protected]',
url='https://github.com/seatgeek/fuzzywuzzy/',
packages=['fuzzywuzzy'],
classifiers=(
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3'
)
)
|
Add classifiers for python versions
|
Add classifiers for python versions
|
Python
|
mit
|
jayhetee/fuzzywuzzy,salilnavgire/fuzzywuzzy,beni55/fuzzywuzzy,beni55/fuzzywuzzy,blakejennings/fuzzywuzzy,shalecraig/fuzzywuzzy,pombredanne/fuzzywuzzy,salilnavgire/fuzzywuzzy,pombredanne/fuzzywuzzy,aeeilllmrx/fuzzywuzzy,medecau/fuzzywuzzy,zhahaoyu/fuzzywuzzy,zhahaoyu/fuzzywuzzy,jayhetee/fuzzywuzzy,shalecraig/fuzzywuzzy,aeeilllmrx/fuzzywuzzy,blakejennings/fuzzywuzzy
|
from distutils.core import setup
setup(name='fuzzywuzzy',
version='0.2',
description='Fuzzy string matching in python',
author='Adam Cohen',
author_email='[email protected]',
url='https://github.com/seatgeek/fuzzywuzzy/',
- packages=['fuzzywuzzy'])
+ packages=['fuzzywuzzy'],
+ classifiers=(
+ 'Programming Language :: Python',
+ 'Programming Language :: Python :: 2.6',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.3'
+ )
+ )
|
Add classifiers for python versions
|
## Code Before:
from distutils.core import setup
setup(name='fuzzywuzzy',
version='0.2',
description='Fuzzy string matching in python',
author='Adam Cohen',
author_email='[email protected]',
url='https://github.com/seatgeek/fuzzywuzzy/',
packages=['fuzzywuzzy'])
## Instruction:
Add classifiers for python versions
## Code After:
from distutils.core import setup
setup(name='fuzzywuzzy',
version='0.2',
description='Fuzzy string matching in python',
author='Adam Cohen',
author_email='[email protected]',
url='https://github.com/seatgeek/fuzzywuzzy/',
packages=['fuzzywuzzy'],
classifiers=(
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3'
)
)
|
from distutils.core import setup
setup(name='fuzzywuzzy',
version='0.2',
description='Fuzzy string matching in python',
author='Adam Cohen',
author_email='[email protected]',
url='https://github.com/seatgeek/fuzzywuzzy/',
- packages=['fuzzywuzzy'])
? ^
+ packages=['fuzzywuzzy'],
? ^
+ classifiers=(
+ 'Programming Language :: Python',
+ 'Programming Language :: Python :: 2.6',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.3'
+ )
+ )
|
573d3d8b652527e0293321e09474f7a6e5b243f4
|
tests/test_dispatch.py
|
tests/test_dispatch.py
|
import accordian
import pytest
def test_unknown_event(loop):
"""
An exception should be thrown when trying to register a
handler for an unknown event.
"""
dispatch = accordian.Dispatch(loop=loop)
with pytest.raises(ValueError):
dispatch.on("unknown")
def test_clean_stop(loop):
dispatch = accordian.Dispatch(loop=loop)
loop.run_until_complete(dispatch.start())
loop.run_until_complete(dispatch.stop())
|
import pytest
def test_start_idempotent(loop, dispatch):
loop.run_until_complete(dispatch.start())
assert dispatch.running
loop.run_until_complete(dispatch.start())
assert dispatch.running
def test_stop_idempotent(loop, dispatch):
loop.run_until_complete(dispatch.start())
assert dispatch.running
loop.run_until_complete(dispatch.stop())
assert not dispatch.running
loop.run_until_complete(dispatch.stop())
assert not dispatch.running
def test_clean_stop(loop, dispatch):
""" Stop ensures the main dispatch loop shuts down gracefully """
loop.run_until_complete(dispatch.start())
loop.run_until_complete(dispatch.stop())
def test_unknown_event(dispatch):
"""
An exception should be thrown when trying to register a
handler for an unknown event.
"""
with pytest.raises(ValueError):
dispatch.on("unknown")
def test_register(dispatch):
event = "my-event"
params = ["x", "y", "z"]
dispatch.register(event, params)
assert "my-event" in dispatch._handlers
def test_register_twice(dispatch):
event = "my-event"
params = ["x", "y", "z"]
dispatch.register(event, params)
with pytest.raises(ValueError):
dispatch.register(event, params)
def test_register_running(dispatch, loop):
event = "my-event"
params = ["x", "y", "z"]
loop.run_until_complete(dispatch.start())
with pytest.raises(RuntimeError):
dispatch.register(event, params)
loop.run_until_complete(dispatch.stop())
def test_unregister_unknown(dispatch):
assert "unknown-event" not in dispatch._handlers
dispatch.unregister("unknown-event")
def test_unregister_running(dispatch, loop):
event = "my-event"
params = ["x", "y", "z"]
dispatch.register(event, params)
loop.run_until_complete(dispatch.start())
with pytest.raises(RuntimeError):
dispatch.unregister(event)
loop.run_until_complete(dispatch.stop())
def test_single_handler(dispatch, loop):
event = "my-event"
params = {"x": 4, "y": 5, "z": 6}
dispatch.register(event, params.keys())
called = False
@dispatch.on(event)
async def handle(x, y):
nonlocal called
called = True
for task in [
dispatch.start(),
dispatch.trigger(event, params),
dispatch.stop()
]:
loop.run_until_complete(task)
assert called
|
Test dispatch (un)register, basic handler
|
Test dispatch (un)register, basic handler
|
Python
|
mit
|
numberoverzero/accordian
|
- import accordian
import pytest
+ def test_start_idempotent(loop, dispatch):
+ loop.run_until_complete(dispatch.start())
+ assert dispatch.running
+
+ loop.run_until_complete(dispatch.start())
+ assert dispatch.running
+
+
+ def test_stop_idempotent(loop, dispatch):
+ loop.run_until_complete(dispatch.start())
+ assert dispatch.running
+
+ loop.run_until_complete(dispatch.stop())
+ assert not dispatch.running
+
+ loop.run_until_complete(dispatch.stop())
+ assert not dispatch.running
+
+
+ def test_clean_stop(loop, dispatch):
+ """ Stop ensures the main dispatch loop shuts down gracefully """
+ loop.run_until_complete(dispatch.start())
+ loop.run_until_complete(dispatch.stop())
+
+
- def test_unknown_event(loop):
+ def test_unknown_event(dispatch):
"""
An exception should be thrown when trying to register a
handler for an unknown event.
"""
- dispatch = accordian.Dispatch(loop=loop)
with pytest.raises(ValueError):
dispatch.on("unknown")
- def test_clean_stop(loop):
- dispatch = accordian.Dispatch(loop=loop)
+ def test_register(dispatch):
+ event = "my-event"
+ params = ["x", "y", "z"]
+ dispatch.register(event, params)
+ assert "my-event" in dispatch._handlers
+
+
+ def test_register_twice(dispatch):
+ event = "my-event"
+ params = ["x", "y", "z"]
+ dispatch.register(event, params)
+
+ with pytest.raises(ValueError):
+ dispatch.register(event, params)
+
+
+ def test_register_running(dispatch, loop):
+ event = "my-event"
+ params = ["x", "y", "z"]
loop.run_until_complete(dispatch.start())
+
+ with pytest.raises(RuntimeError):
+ dispatch.register(event, params)
loop.run_until_complete(dispatch.stop())
+
+ def test_unregister_unknown(dispatch):
+ assert "unknown-event" not in dispatch._handlers
+ dispatch.unregister("unknown-event")
+
+
+ def test_unregister_running(dispatch, loop):
+ event = "my-event"
+ params = ["x", "y", "z"]
+ dispatch.register(event, params)
+
+ loop.run_until_complete(dispatch.start())
+ with pytest.raises(RuntimeError):
+ dispatch.unregister(event)
+ loop.run_until_complete(dispatch.stop())
+
+
+ def test_single_handler(dispatch, loop):
+ event = "my-event"
+ params = {"x": 4, "y": 5, "z": 6}
+ dispatch.register(event, params.keys())
+
+ called = False
+
+ @dispatch.on(event)
+ async def handle(x, y):
+ nonlocal called
+ called = True
+
+ for task in [
+ dispatch.start(),
+ dispatch.trigger(event, params),
+ dispatch.stop()
+ ]:
+ loop.run_until_complete(task)
+ assert called
+
|
Test dispatch (un)register, basic handler
|
## Code Before:
import accordian
import pytest
def test_unknown_event(loop):
"""
An exception should be thrown when trying to register a
handler for an unknown event.
"""
dispatch = accordian.Dispatch(loop=loop)
with pytest.raises(ValueError):
dispatch.on("unknown")
def test_clean_stop(loop):
dispatch = accordian.Dispatch(loop=loop)
loop.run_until_complete(dispatch.start())
loop.run_until_complete(dispatch.stop())
## Instruction:
Test dispatch (un)register, basic handler
## Code After:
import pytest
def test_start_idempotent(loop, dispatch):
loop.run_until_complete(dispatch.start())
assert dispatch.running
loop.run_until_complete(dispatch.start())
assert dispatch.running
def test_stop_idempotent(loop, dispatch):
loop.run_until_complete(dispatch.start())
assert dispatch.running
loop.run_until_complete(dispatch.stop())
assert not dispatch.running
loop.run_until_complete(dispatch.stop())
assert not dispatch.running
def test_clean_stop(loop, dispatch):
""" Stop ensures the main dispatch loop shuts down gracefully """
loop.run_until_complete(dispatch.start())
loop.run_until_complete(dispatch.stop())
def test_unknown_event(dispatch):
"""
An exception should be thrown when trying to register a
handler for an unknown event.
"""
with pytest.raises(ValueError):
dispatch.on("unknown")
def test_register(dispatch):
event = "my-event"
params = ["x", "y", "z"]
dispatch.register(event, params)
assert "my-event" in dispatch._handlers
def test_register_twice(dispatch):
event = "my-event"
params = ["x", "y", "z"]
dispatch.register(event, params)
with pytest.raises(ValueError):
dispatch.register(event, params)
def test_register_running(dispatch, loop):
event = "my-event"
params = ["x", "y", "z"]
loop.run_until_complete(dispatch.start())
with pytest.raises(RuntimeError):
dispatch.register(event, params)
loop.run_until_complete(dispatch.stop())
def test_unregister_unknown(dispatch):
assert "unknown-event" not in dispatch._handlers
dispatch.unregister("unknown-event")
def test_unregister_running(dispatch, loop):
event = "my-event"
params = ["x", "y", "z"]
dispatch.register(event, params)
loop.run_until_complete(dispatch.start())
with pytest.raises(RuntimeError):
dispatch.unregister(event)
loop.run_until_complete(dispatch.stop())
def test_single_handler(dispatch, loop):
event = "my-event"
params = {"x": 4, "y": 5, "z": 6}
dispatch.register(event, params.keys())
called = False
@dispatch.on(event)
async def handle(x, y):
nonlocal called
called = True
for task in [
dispatch.start(),
dispatch.trigger(event, params),
dispatch.stop()
]:
loop.run_until_complete(task)
assert called
|
- import accordian
import pytest
+ def test_start_idempotent(loop, dispatch):
+ loop.run_until_complete(dispatch.start())
+ assert dispatch.running
+
+ loop.run_until_complete(dispatch.start())
+ assert dispatch.running
+
+
+ def test_stop_idempotent(loop, dispatch):
+ loop.run_until_complete(dispatch.start())
+ assert dispatch.running
+
+ loop.run_until_complete(dispatch.stop())
+ assert not dispatch.running
+
+ loop.run_until_complete(dispatch.stop())
+ assert not dispatch.running
+
+
+ def test_clean_stop(loop, dispatch):
+ """ Stop ensures the main dispatch loop shuts down gracefully """
+ loop.run_until_complete(dispatch.start())
+ loop.run_until_complete(dispatch.stop())
+
+
- def test_unknown_event(loop):
? ^^^
+ def test_unknown_event(dispatch):
? ^^^ ++++
"""
An exception should be thrown when trying to register a
handler for an unknown event.
"""
- dispatch = accordian.Dispatch(loop=loop)
with pytest.raises(ValueError):
dispatch.on("unknown")
- def test_clean_stop(loop):
- dispatch = accordian.Dispatch(loop=loop)
+ def test_register(dispatch):
+ event = "my-event"
+ params = ["x", "y", "z"]
+ dispatch.register(event, params)
+ assert "my-event" in dispatch._handlers
+
+
+ def test_register_twice(dispatch):
+ event = "my-event"
+ params = ["x", "y", "z"]
+ dispatch.register(event, params)
+
+ with pytest.raises(ValueError):
+ dispatch.register(event, params)
+
+
+ def test_register_running(dispatch, loop):
+ event = "my-event"
+ params = ["x", "y", "z"]
loop.run_until_complete(dispatch.start())
+
+ with pytest.raises(RuntimeError):
+ dispatch.register(event, params)
loop.run_until_complete(dispatch.stop())
+
+
+ def test_unregister_unknown(dispatch):
+ assert "unknown-event" not in dispatch._handlers
+ dispatch.unregister("unknown-event")
+
+
+ def test_unregister_running(dispatch, loop):
+ event = "my-event"
+ params = ["x", "y", "z"]
+ dispatch.register(event, params)
+
+ loop.run_until_complete(dispatch.start())
+ with pytest.raises(RuntimeError):
+ dispatch.unregister(event)
+ loop.run_until_complete(dispatch.stop())
+
+
+ def test_single_handler(dispatch, loop):
+ event = "my-event"
+ params = {"x": 4, "y": 5, "z": 6}
+ dispatch.register(event, params.keys())
+
+ called = False
+
+ @dispatch.on(event)
+ async def handle(x, y):
+ nonlocal called
+ called = True
+
+ for task in [
+ dispatch.start(),
+ dispatch.trigger(event, params),
+ dispatch.stop()
+ ]:
+ loop.run_until_complete(task)
+ assert called
|
397eb3ee376acec005a8d7b5a4c2b2e0193a938d
|
tests/test_bookmarks.py
|
tests/test_bookmarks.py
|
import bookmarks
import unittest
class FlaskrTestCase(unittest.TestCase):
def setUp(self):
self.app = bookmarks.app.test_client()
# with bookmarks.app.app_context():
bookmarks.database.init_db()
def tearDown(self):
# with bookmarks.app.app_context():
bookmarks.database.db_session.remove()
bookmarks.database.Base.metadata.drop_all(
bind=bookmarks.database.engine)
def test_empty_db(self):
rv = self.app.get('/')
assert b'There aren\'t any bookmarks yet.' in rv.data
def register(self, username, name, email, password):
return self.app.post('/register_user/', data=dict(
username=username,
name=name,
email=email,
password=password,
confirm=password
), follow_redirects=True)
def login(self, username, password):
return self.app.post('/login', data=dict(
username=username,
password=password,
confirm=password
), follow_redirects=True)
def logout(self):
return self.app.get('/logout', follow_redirects=True)
def test_register(self):
username = 'byanofsky'
name = 'Brandon Yanofsky'
email = '[email protected]'
password = 'Brandon123'
rv = self.register(username, name, email, password)
# print(rv.data)
assert (b'Successfully registered ' in rv.data)
if __name__ == '__main__':
unittest.main()
|
import bookmarks
import unittest
class FlaskrTestCase(unittest.TestCase):
def setUp(self):
self.app = bookmarks.app.test_client()
# with bookmarks.app.app_context():
bookmarks.database.init_db()
def tearDown(self):
# with bookmarks.app.app_context():
bookmarks.database.db_session.remove()
bookmarks.database.Base.metadata.drop_all(
bind=bookmarks.database.engine)
def test_empty_db(self):
rv = self.app.get('/')
assert b'There aren\'t any bookmarks yet.' in rv.data
def register(self, username, name, email, password, confirm=None):
return self.app.post('/register_user/', data=dict(
username=username,
name=name,
email=email,
password=password,
confirm=confirm
), follow_redirects=True)
def login(self, username, password):
return self.app.post('/login', data=dict(
username=username,
password=password,
confirm=password
), follow_redirects=True)
def logout(self):
return self.app.get('/logout', follow_redirects=True)
def test_register(self):
username = 'byanofsky'
name = 'Brandon Yanofsky'
email = '[email protected]'
password = 'Brandon123'
rv = self.register(username, name, email, password)
# print(rv.data)
assert (b'Successfully registered ' in rv.data)
if __name__ == '__main__':
unittest.main()
|
Add param for confirm field on register test func
|
Add param for confirm field on register test func
|
Python
|
apache-2.0
|
byanofsky/bookmarks,byanofsky/bookmarks,byanofsky/bookmarks
|
import bookmarks
import unittest
class FlaskrTestCase(unittest.TestCase):
def setUp(self):
self.app = bookmarks.app.test_client()
# with bookmarks.app.app_context():
bookmarks.database.init_db()
def tearDown(self):
# with bookmarks.app.app_context():
bookmarks.database.db_session.remove()
bookmarks.database.Base.metadata.drop_all(
bind=bookmarks.database.engine)
def test_empty_db(self):
rv = self.app.get('/')
assert b'There aren\'t any bookmarks yet.' in rv.data
- def register(self, username, name, email, password):
+ def register(self, username, name, email, password, confirm=None):
return self.app.post('/register_user/', data=dict(
username=username,
name=name,
email=email,
password=password,
- confirm=password
+ confirm=confirm
), follow_redirects=True)
def login(self, username, password):
return self.app.post('/login', data=dict(
username=username,
password=password,
confirm=password
), follow_redirects=True)
def logout(self):
return self.app.get('/logout', follow_redirects=True)
def test_register(self):
username = 'byanofsky'
name = 'Brandon Yanofsky'
email = '[email protected]'
password = 'Brandon123'
rv = self.register(username, name, email, password)
# print(rv.data)
assert (b'Successfully registered ' in rv.data)
if __name__ == '__main__':
unittest.main()
|
Add param for confirm field on register test func
|
## Code Before:
import bookmarks
import unittest
class FlaskrTestCase(unittest.TestCase):
def setUp(self):
self.app = bookmarks.app.test_client()
# with bookmarks.app.app_context():
bookmarks.database.init_db()
def tearDown(self):
# with bookmarks.app.app_context():
bookmarks.database.db_session.remove()
bookmarks.database.Base.metadata.drop_all(
bind=bookmarks.database.engine)
def test_empty_db(self):
rv = self.app.get('/')
assert b'There aren\'t any bookmarks yet.' in rv.data
def register(self, username, name, email, password):
return self.app.post('/register_user/', data=dict(
username=username,
name=name,
email=email,
password=password,
confirm=password
), follow_redirects=True)
def login(self, username, password):
return self.app.post('/login', data=dict(
username=username,
password=password,
confirm=password
), follow_redirects=True)
def logout(self):
return self.app.get('/logout', follow_redirects=True)
def test_register(self):
username = 'byanofsky'
name = 'Brandon Yanofsky'
email = '[email protected]'
password = 'Brandon123'
rv = self.register(username, name, email, password)
# print(rv.data)
assert (b'Successfully registered ' in rv.data)
if __name__ == '__main__':
unittest.main()
## Instruction:
Add param for confirm field on register test func
## Code After:
import bookmarks
import unittest
class FlaskrTestCase(unittest.TestCase):
def setUp(self):
self.app = bookmarks.app.test_client()
# with bookmarks.app.app_context():
bookmarks.database.init_db()
def tearDown(self):
# with bookmarks.app.app_context():
bookmarks.database.db_session.remove()
bookmarks.database.Base.metadata.drop_all(
bind=bookmarks.database.engine)
def test_empty_db(self):
rv = self.app.get('/')
assert b'There aren\'t any bookmarks yet.' in rv.data
def register(self, username, name, email, password, confirm=None):
return self.app.post('/register_user/', data=dict(
username=username,
name=name,
email=email,
password=password,
confirm=confirm
), follow_redirects=True)
def login(self, username, password):
return self.app.post('/login', data=dict(
username=username,
password=password,
confirm=password
), follow_redirects=True)
def logout(self):
return self.app.get('/logout', follow_redirects=True)
def test_register(self):
username = 'byanofsky'
name = 'Brandon Yanofsky'
email = '[email protected]'
password = 'Brandon123'
rv = self.register(username, name, email, password)
# print(rv.data)
assert (b'Successfully registered ' in rv.data)
if __name__ == '__main__':
unittest.main()
|
import bookmarks
import unittest
class FlaskrTestCase(unittest.TestCase):
def setUp(self):
self.app = bookmarks.app.test_client()
# with bookmarks.app.app_context():
bookmarks.database.init_db()
def tearDown(self):
# with bookmarks.app.app_context():
bookmarks.database.db_session.remove()
bookmarks.database.Base.metadata.drop_all(
bind=bookmarks.database.engine)
def test_empty_db(self):
rv = self.app.get('/')
assert b'There aren\'t any bookmarks yet.' in rv.data
- def register(self, username, name, email, password):
+ def register(self, username, name, email, password, confirm=None):
? ++++++++++++++
return self.app.post('/register_user/', data=dict(
username=username,
name=name,
email=email,
password=password,
- confirm=password
? ^^^^^ ^
+ confirm=confirm
? ^ +++ ^
), follow_redirects=True)
def login(self, username, password):
return self.app.post('/login', data=dict(
username=username,
password=password,
confirm=password
), follow_redirects=True)
def logout(self):
return self.app.get('/logout', follow_redirects=True)
def test_register(self):
username = 'byanofsky'
name = 'Brandon Yanofsky'
email = '[email protected]'
password = 'Brandon123'
rv = self.register(username, name, email, password)
# print(rv.data)
assert (b'Successfully registered ' in rv.data)
if __name__ == '__main__':
unittest.main()
|
23f734419ac3814e09ef3763fb666a3620ac1c01
|
scripts/osfstorage/correct_moved_node_settings.py
|
scripts/osfstorage/correct_moved_node_settings.py
|
import sys
import logging
from scripts import utils as script_utils
from framework.transactions.context import TokuTransaction
from website.app import init_app
from website.addons.osfstorage import model
logger = logging.getLogger(__name__)
def do_migration():
for node_settings in model.OsfStorageNodeSettings.find():
for child in iter_children(node_settings.root_node):
if child.node_settings != node_settings:
logger.info('Update node_settings for {!r} in project {!r}'.format(child, node_settings.owner,))
child.node_settings = node_settings
child.save()
def iter_children(file_node):
to_go = [file_node]
while to_go:
for child in to_go.pop(0).children:
if child.is_folder:
to_go.append(child)
yield child
def main(dry=True):
init_app(set_backends=True, routes=False) # Sets the storage backends on all models
with TokuTransaction():
do_migration()
if dry:
raise Exception('Abort Transaction - Dry Run')
if __name__ == '__main__':
dry = 'dry' in sys.argv
if not dry:
script_utils.add_file_logger(logger, __file__)
main(dry=dry)
|
import sys
import logging
from scripts import utils as script_utils
from framework.transactions.context import TokuTransaction
from website.app import init_app
from website.addons.osfstorage import model
logger = logging.getLogger(__name__)
def do_migration():
count = 0
errored = 0
for node_settings in model.OsfStorageNodeSettings.find():
for child in iter_children(node_settings.root_node):
if child.node_settings != node_settings:
logger.info('Update node_settings for {!r} in project {!r}'.format(child, node_settings.owner))
child.node_settings = node_settings
try:
child.save()
except Exception as err:
errored += 1
logger.error('Error occurred while updating {!r}'.format(child))
logger.exception(err)
logger.error('Skipping...')
else:
count += 1
logger.info('Updated: {} file nodes'.format(count))
logger.info('Errored: {} file nodes'.format(errored))
def iter_children(file_node):
to_go = [file_node]
while to_go:
for child in to_go.pop(0).children:
if child.is_folder:
to_go.append(child)
yield child
def main(dry=True):
init_app(set_backends=True, routes=False) # Sets the storage backends on all models
with TokuTransaction():
do_migration()
if dry:
raise Exception('Abort Transaction - Dry Run')
if __name__ == '__main__':
dry = 'dry' in sys.argv
if not dry:
script_utils.add_file_logger(logger, __file__)
main(dry=dry)
|
Add count and allow errors to pass for now
|
Add count and allow errors to pass for now
[skip ci]
|
Python
|
apache-2.0
|
pattisdr/osf.io,abought/osf.io,DanielSBrown/osf.io,samanehsan/osf.io,billyhunt/osf.io,brandonPurvis/osf.io,samchrisinger/osf.io,brandonPurvis/osf.io,mattclark/osf.io,emetsger/osf.io,brianjgeiger/osf.io,mfraezz/osf.io,jmcarp/osf.io,acshi/osf.io,crcresearch/osf.io,sbt9uc/osf.io,mluke93/osf.io,haoyuchen1992/osf.io,acshi/osf.io,laurenrevere/osf.io,HarryRybacki/osf.io,felliott/osf.io,KAsante95/osf.io,dplorimer/osf,GageGaskins/osf.io,njantrania/osf.io,leb2dg/osf.io,RomanZWang/osf.io,cosenal/osf.io,RomanZWang/osf.io,zachjanicki/osf.io,HalcyonChimera/osf.io,mluo613/osf.io,leb2dg/osf.io,SSJohns/osf.io,brandonPurvis/osf.io,crcresearch/osf.io,caseyrollins/osf.io,samanehsan/osf.io,mluo613/osf.io,caneruguz/osf.io,erinspace/osf.io,billyhunt/osf.io,amyshi188/osf.io,sbt9uc/osf.io,danielneis/osf.io,haoyuchen1992/osf.io,cslzchen/osf.io,caneruguz/osf.io,sloria/osf.io,MerlinZhang/osf.io,samchrisinger/osf.io,CenterForOpenScience/osf.io,caseyrollins/osf.io,mluo613/osf.io,ZobairAlijan/osf.io,mfraezz/osf.io,laurenrevere/osf.io,SSJohns/osf.io,ticklemepierce/osf.io,adlius/osf.io,monikagrabowska/osf.io,kch8qx/osf.io,monikagrabowska/osf.io,njantrania/osf.io,acshi/osf.io,aaxelb/osf.io,KAsante95/osf.io,ZobairAlijan/osf.io,doublebits/osf.io,Ghalko/osf.io,amyshi188/osf.io,caseyrygt/osf.io,felliott/osf.io,Nesiehr/osf.io,MerlinZhang/osf.io,DanielSBrown/osf.io,jnayak1/osf.io,wearpants/osf.io,adlius/osf.io,cwisecarver/osf.io,doublebits/osf.io,TomHeatwole/osf.io,ckc6cz/osf.io,KAsante95/osf.io,RomanZWang/osf.io,caneruguz/osf.io,ckc6cz/osf.io,dplorimer/osf,Johnetordoff/osf.io,hmoco/osf.io,jnayak1/osf.io,caseyrollins/osf.io,petermalcolm/osf.io,brandonPurvis/osf.io,caseyrygt/osf.io,GageGaskins/osf.io,baylee-d/osf.io,Nesiehr/osf.io,ZobairAlijan/osf.io,cwisecarver/osf.io,caseyrygt/osf.io,emetsger/osf.io,cslzchen/osf.io,lyndsysimon/osf.io,reinaH/osf.io,abought/osf.io,billyhunt/osf.io,HarryRybacki/osf.io,emetsger/osf.io,GageGaskins/osf.io,monikagrabowska/osf.io,emetsger/osf.io,HalcyonChimera/osf.io,samchrisinger/osf.io,cldershem/osf.io,CenterForOpenScience/osf.io,saradbowman/osf.io,asanfilippo7/osf.io,cosenal/osf.io,leb2dg/osf.io,kch8qx/osf.io,danielneis/osf.io,icereval/osf.io,chennan47/osf.io,kch8qx/osf.io,MerlinZhang/osf.io,mattclark/osf.io,chrisseto/osf.io,Nesiehr/osf.io,zamattiac/osf.io,chrisseto/osf.io,ckc6cz/osf.io,jolene-esposito/osf.io,ticklemepierce/osf.io,saradbowman/osf.io,rdhyee/osf.io,MerlinZhang/osf.io,alexschiller/osf.io,samanehsan/osf.io,TomHeatwole/osf.io,arpitar/osf.io,petermalcolm/osf.io,zamattiac/osf.io,rdhyee/osf.io,caneruguz/osf.io,samchrisinger/osf.io,bdyetton/prettychart,acshi/osf.io,cslzchen/osf.io,kch8qx/osf.io,arpitar/osf.io,TomHeatwole/osf.io,DanielSBrown/osf.io,baylee-d/osf.io,zachjanicki/osf.io,felliott/osf.io,mattclark/osf.io,ckc6cz/osf.io,felliott/osf.io,RomanZWang/osf.io,bdyetton/prettychart,reinaH/osf.io,baylee-d/osf.io,alexschiller/osf.io,haoyuchen1992/osf.io,Johnetordoff/osf.io,brianjgeiger/osf.io,amyshi188/osf.io,leb2dg/osf.io,billyhunt/osf.io,aaxelb/osf.io,erinspace/osf.io,brianjgeiger/osf.io,acshi/osf.io,jnayak1/osf.io,amyshi188/osf.io,pattisdr/osf.io,petermalcolm/osf.io,icereval/osf.io,HarryRybacki/osf.io,CenterForOpenScience/osf.io,jmcarp/osf.io,alexschiller/osf.io,kch8qx/osf.io,rdhyee/osf.io,hmoco/osf.io,chennan47/osf.io,KAsante95/osf.io,Johnetordoff/osf.io,adlius/osf.io,KAsante95/osf.io,SSJohns/osf.io,cslzchen/osf.io,hmoco/osf.io,Ghalko/osf.io,erinspace/osf.io,HalcyonChimera/osf.io,njantrania/osf.io,icereval/osf.io,laurenrevere/osf.io,HalcyonChimera/osf.io,ticklemepierce/osf.io,jolene-esposito/osf.io,abought/osf.io,lyndsysimon/osf.io,jmcarp/osf.io,asanfilippo7/osf.io,zachjanicki/osf.io,caseyrygt/osf.io,lyndsysimon/osf.io,zamattiac/osf.io,GageGaskins/osf.io,lyndsysimon/osf.io,kwierman/osf.io,petermalcolm/osf.io,doublebits/osf.io,SSJohns/osf.io,asanfilippo7/osf.io,doublebits/osf.io,TomBaxter/osf.io,doublebits/osf.io,TomBaxter/osf.io,rdhyee/osf.io,chrisseto/osf.io,monikagrabowska/osf.io,sloria/osf.io,dplorimer/osf,arpitar/osf.io,binoculars/osf.io,danielneis/osf.io,dplorimer/osf,mluke93/osf.io,hmoco/osf.io,mluo613/osf.io,ticklemepierce/osf.io,jolene-esposito/osf.io,HarryRybacki/osf.io,chrisseto/osf.io,haoyuchen1992/osf.io,bdyetton/prettychart,RomanZWang/osf.io,mluke93/osf.io,brianjgeiger/osf.io,binoculars/osf.io,wearpants/osf.io,reinaH/osf.io,mfraezz/osf.io,kwierman/osf.io,Ghalko/osf.io,binoculars/osf.io,wearpants/osf.io,kwierman/osf.io,samanehsan/osf.io,TomBaxter/osf.io,danielneis/osf.io,jmcarp/osf.io,asanfilippo7/osf.io,mluo613/osf.io,cosenal/osf.io,arpitar/osf.io,reinaH/osf.io,mfraezz/osf.io,abought/osf.io,mluke93/osf.io,cwisecarver/osf.io,wearpants/osf.io,njantrania/osf.io,sbt9uc/osf.io,sloria/osf.io,zachjanicki/osf.io,brandonPurvis/osf.io,Nesiehr/osf.io,cldershem/osf.io,pattisdr/osf.io,jolene-esposito/osf.io,sbt9uc/osf.io,alexschiller/osf.io,cwisecarver/osf.io,monikagrabowska/osf.io,cldershem/osf.io,zamattiac/osf.io,cldershem/osf.io,jnayak1/osf.io,cosenal/osf.io,Ghalko/osf.io,aaxelb/osf.io,TomHeatwole/osf.io,alexschiller/osf.io,CenterForOpenScience/osf.io,ZobairAlijan/osf.io,Johnetordoff/osf.io,bdyetton/prettychart,adlius/osf.io,DanielSBrown/osf.io,billyhunt/osf.io,kwierman/osf.io,aaxelb/osf.io,GageGaskins/osf.io,crcresearch/osf.io,chennan47/osf.io
|
import sys
import logging
from scripts import utils as script_utils
from framework.transactions.context import TokuTransaction
from website.app import init_app
from website.addons.osfstorage import model
logger = logging.getLogger(__name__)
def do_migration():
+ count = 0
+ errored = 0
for node_settings in model.OsfStorageNodeSettings.find():
for child in iter_children(node_settings.root_node):
if child.node_settings != node_settings:
- logger.info('Update node_settings for {!r} in project {!r}'.format(child, node_settings.owner,))
+ logger.info('Update node_settings for {!r} in project {!r}'.format(child, node_settings.owner))
child.node_settings = node_settings
+ try:
- child.save()
+ child.save()
+ except Exception as err:
+ errored += 1
+ logger.error('Error occurred while updating {!r}'.format(child))
+ logger.exception(err)
+ logger.error('Skipping...')
+ else:
+ count += 1
+ logger.info('Updated: {} file nodes'.format(count))
+ logger.info('Errored: {} file nodes'.format(errored))
def iter_children(file_node):
to_go = [file_node]
while to_go:
for child in to_go.pop(0).children:
if child.is_folder:
to_go.append(child)
yield child
def main(dry=True):
init_app(set_backends=True, routes=False) # Sets the storage backends on all models
with TokuTransaction():
do_migration()
if dry:
raise Exception('Abort Transaction - Dry Run')
if __name__ == '__main__':
dry = 'dry' in sys.argv
if not dry:
script_utils.add_file_logger(logger, __file__)
main(dry=dry)
|
Add count and allow errors to pass for now
|
## Code Before:
import sys
import logging
from scripts import utils as script_utils
from framework.transactions.context import TokuTransaction
from website.app import init_app
from website.addons.osfstorage import model
logger = logging.getLogger(__name__)
def do_migration():
for node_settings in model.OsfStorageNodeSettings.find():
for child in iter_children(node_settings.root_node):
if child.node_settings != node_settings:
logger.info('Update node_settings for {!r} in project {!r}'.format(child, node_settings.owner,))
child.node_settings = node_settings
child.save()
def iter_children(file_node):
to_go = [file_node]
while to_go:
for child in to_go.pop(0).children:
if child.is_folder:
to_go.append(child)
yield child
def main(dry=True):
init_app(set_backends=True, routes=False) # Sets the storage backends on all models
with TokuTransaction():
do_migration()
if dry:
raise Exception('Abort Transaction - Dry Run')
if __name__ == '__main__':
dry = 'dry' in sys.argv
if not dry:
script_utils.add_file_logger(logger, __file__)
main(dry=dry)
## Instruction:
Add count and allow errors to pass for now
## Code After:
import sys
import logging
from scripts import utils as script_utils
from framework.transactions.context import TokuTransaction
from website.app import init_app
from website.addons.osfstorage import model
logger = logging.getLogger(__name__)
def do_migration():
count = 0
errored = 0
for node_settings in model.OsfStorageNodeSettings.find():
for child in iter_children(node_settings.root_node):
if child.node_settings != node_settings:
logger.info('Update node_settings for {!r} in project {!r}'.format(child, node_settings.owner))
child.node_settings = node_settings
try:
child.save()
except Exception as err:
errored += 1
logger.error('Error occurred while updating {!r}'.format(child))
logger.exception(err)
logger.error('Skipping...')
else:
count += 1
logger.info('Updated: {} file nodes'.format(count))
logger.info('Errored: {} file nodes'.format(errored))
def iter_children(file_node):
to_go = [file_node]
while to_go:
for child in to_go.pop(0).children:
if child.is_folder:
to_go.append(child)
yield child
def main(dry=True):
init_app(set_backends=True, routes=False) # Sets the storage backends on all models
with TokuTransaction():
do_migration()
if dry:
raise Exception('Abort Transaction - Dry Run')
if __name__ == '__main__':
dry = 'dry' in sys.argv
if not dry:
script_utils.add_file_logger(logger, __file__)
main(dry=dry)
|
import sys
import logging
from scripts import utils as script_utils
from framework.transactions.context import TokuTransaction
from website.app import init_app
from website.addons.osfstorage import model
logger = logging.getLogger(__name__)
def do_migration():
+ count = 0
+ errored = 0
for node_settings in model.OsfStorageNodeSettings.find():
for child in iter_children(node_settings.root_node):
if child.node_settings != node_settings:
- logger.info('Update node_settings for {!r} in project {!r}'.format(child, node_settings.owner,))
? -
+ logger.info('Update node_settings for {!r} in project {!r}'.format(child, node_settings.owner))
child.node_settings = node_settings
+ try:
- child.save()
+ child.save()
? ++++
+ except Exception as err:
+ errored += 1
+ logger.error('Error occurred while updating {!r}'.format(child))
+ logger.exception(err)
+ logger.error('Skipping...')
+ else:
+ count += 1
+ logger.info('Updated: {} file nodes'.format(count))
+ logger.info('Errored: {} file nodes'.format(errored))
def iter_children(file_node):
to_go = [file_node]
while to_go:
for child in to_go.pop(0).children:
if child.is_folder:
to_go.append(child)
yield child
def main(dry=True):
init_app(set_backends=True, routes=False) # Sets the storage backends on all models
with TokuTransaction():
do_migration()
if dry:
raise Exception('Abort Transaction - Dry Run')
if __name__ == '__main__':
dry = 'dry' in sys.argv
if not dry:
script_utils.add_file_logger(logger, __file__)
main(dry=dry)
|
c1f5b9e3bfa96762fdbe9f4ca54b3851b38294da
|
test/__init__.py
|
test/__init__.py
|
import glob, os.path, sys
# Add path to hiredis.so load path
path = glob.glob("build/lib*/hiredis/*.so")[0]
sys.path.insert(0, os.path.dirname(path))
from unittest import *
from . import reader
def tests():
suite = TestSuite()
suite.addTest(makeSuite(reader.ReaderTest))
return suite
|
import glob, os.path, sys
version = sys.version.split(" ")[0]
majorminor = version[0:3]
# Add path to hiredis.so load path
path = glob.glob("build/lib*-%s/hiredis/*.so" % majorminor)[0]
sys.path.insert(0, os.path.dirname(path))
from unittest import *
from . import reader
def tests():
suite = TestSuite()
suite.addTest(makeSuite(reader.ReaderTest))
return suite
|
Add versioned path for dynamic library lookup
|
Add versioned path for dynamic library lookup
|
Python
|
bsd-3-clause
|
badboy/hiredis-py-win,charsyam/hiredis-py,badboy/hiredis-py-win,redis/hiredis-py,charsyam/hiredis-py,badboy/hiredis-py-win,redis/hiredis-py
|
import glob, os.path, sys
+ version = sys.version.split(" ")[0]
+ majorminor = version[0:3]
+
# Add path to hiredis.so load path
- path = glob.glob("build/lib*/hiredis/*.so")[0]
+ path = glob.glob("build/lib*-%s/hiredis/*.so" % majorminor)[0]
sys.path.insert(0, os.path.dirname(path))
from unittest import *
from . import reader
def tests():
suite = TestSuite()
suite.addTest(makeSuite(reader.ReaderTest))
return suite
|
Add versioned path for dynamic library lookup
|
## Code Before:
import glob, os.path, sys
# Add path to hiredis.so load path
path = glob.glob("build/lib*/hiredis/*.so")[0]
sys.path.insert(0, os.path.dirname(path))
from unittest import *
from . import reader
def tests():
suite = TestSuite()
suite.addTest(makeSuite(reader.ReaderTest))
return suite
## Instruction:
Add versioned path for dynamic library lookup
## Code After:
import glob, os.path, sys
version = sys.version.split(" ")[0]
majorminor = version[0:3]
# Add path to hiredis.so load path
path = glob.glob("build/lib*-%s/hiredis/*.so" % majorminor)[0]
sys.path.insert(0, os.path.dirname(path))
from unittest import *
from . import reader
def tests():
suite = TestSuite()
suite.addTest(makeSuite(reader.ReaderTest))
return suite
|
import glob, os.path, sys
+ version = sys.version.split(" ")[0]
+ majorminor = version[0:3]
+
# Add path to hiredis.so load path
- path = glob.glob("build/lib*/hiredis/*.so")[0]
+ path = glob.glob("build/lib*-%s/hiredis/*.so" % majorminor)[0]
? +++ +++++++++++++
sys.path.insert(0, os.path.dirname(path))
from unittest import *
from . import reader
def tests():
suite = TestSuite()
suite.addTest(makeSuite(reader.ReaderTest))
return suite
|
713fcc3f86b4be4d35f0c5ba081a4f786648320a
|
vim/pythonx/elixir_helpers.py
|
vim/pythonx/elixir_helpers.py
|
import re
_DASHES_AND_UNDERSCORES = re.compile("[-_]")
_MODULE_FILEPATH = re.compile(r"lib\/([^\/]+)\/([\w+\/]+)*\/([^\/]+).ex")
def closing_character(tabstop):
"""
Return closing character for a tabstop containing an opening character.
"""
if tabstop.startswith("("):
return ")"
if tabstop.startswith("{"):
return "}"
if tabstop.startswith("["):
return "]"
if tabstop.startswith("\""):
return "\""
return ""
def module_path_match(path, regex=_MODULE_FILEPATH):
"""
Return match data for an Elixir module from a file path.
"""
return re.search(regex, path)
def outer_module_name(path):
"""
Return name for an outer Elixir module from a file path.
"""
outer_module_path = module_path_match(path).group(1)
return to_module_name(outer_module_path)
def to_module_name(string):
"""
Convert string into an Elixir module name
"""
return (
re.sub(_DASHES_AND_UNDERSCORES, " ", string)
.title()
.replace(" ", "")
.replace(".ex", "")
)
|
import re
_DASHES_AND_UNDERSCORES = re.compile("[-_]")
_MODULE_FILEPATH = re.compile(r"lib\/([^\/]+)\/([\w+\/]+)*\/([^\/]+).ex")
_CLOSING_CHARACTERS = {
"(": ")",
"{": "}",
"[": "]",
"\"": "\""
}
def closing_character(tabstop):
"""
Return closing character for a tabstop containing an opening character.
"""
if tabstop:
return _CLOSING_CHARACTERS.get(tabstop[0], "")
return ""
def module_path_match(path, regex=_MODULE_FILEPATH):
"""
Return match data for an Elixir module from a file path.
"""
return re.search(regex, path)
def outer_module_name(path):
"""
Return name for an outer Elixir module from a file path.
"""
outer_module_path = module_path_match(path).group(1)
return to_module_name(outer_module_path)
def to_module_name(string):
"""
Convert string into an Elixir module name
"""
return (
re.sub(_DASHES_AND_UNDERSCORES, " ", string)
.title()
.replace(" ", "")
.replace(".ex", "")
)
|
Refactor python if statement into dictionary
|
Refactor python if statement into dictionary
|
Python
|
mit
|
paulfioravanti/dotfiles,paulfioravanti/dotfiles,paulfioravanti/dotfiles
|
import re
_DASHES_AND_UNDERSCORES = re.compile("[-_]")
_MODULE_FILEPATH = re.compile(r"lib\/([^\/]+)\/([\w+\/]+)*\/([^\/]+).ex")
+ _CLOSING_CHARACTERS = {
+ "(": ")",
+ "{": "}",
+ "[": "]",
+ "\"": "\""
+ }
def closing_character(tabstop):
"""
Return closing character for a tabstop containing an opening character.
"""
+ if tabstop:
+ return _CLOSING_CHARACTERS.get(tabstop[0], "")
- if tabstop.startswith("("):
- return ")"
- if tabstop.startswith("{"):
- return "}"
- if tabstop.startswith("["):
- return "]"
- if tabstop.startswith("\""):
- return "\""
-
return ""
def module_path_match(path, regex=_MODULE_FILEPATH):
"""
Return match data for an Elixir module from a file path.
"""
return re.search(regex, path)
def outer_module_name(path):
"""
Return name for an outer Elixir module from a file path.
"""
outer_module_path = module_path_match(path).group(1)
return to_module_name(outer_module_path)
def to_module_name(string):
"""
Convert string into an Elixir module name
"""
return (
re.sub(_DASHES_AND_UNDERSCORES, " ", string)
.title()
.replace(" ", "")
.replace(".ex", "")
)
|
Refactor python if statement into dictionary
|
## Code Before:
import re
_DASHES_AND_UNDERSCORES = re.compile("[-_]")
_MODULE_FILEPATH = re.compile(r"lib\/([^\/]+)\/([\w+\/]+)*\/([^\/]+).ex")
def closing_character(tabstop):
"""
Return closing character for a tabstop containing an opening character.
"""
if tabstop.startswith("("):
return ")"
if tabstop.startswith("{"):
return "}"
if tabstop.startswith("["):
return "]"
if tabstop.startswith("\""):
return "\""
return ""
def module_path_match(path, regex=_MODULE_FILEPATH):
"""
Return match data for an Elixir module from a file path.
"""
return re.search(regex, path)
def outer_module_name(path):
"""
Return name for an outer Elixir module from a file path.
"""
outer_module_path = module_path_match(path).group(1)
return to_module_name(outer_module_path)
def to_module_name(string):
"""
Convert string into an Elixir module name
"""
return (
re.sub(_DASHES_AND_UNDERSCORES, " ", string)
.title()
.replace(" ", "")
.replace(".ex", "")
)
## Instruction:
Refactor python if statement into dictionary
## Code After:
import re
_DASHES_AND_UNDERSCORES = re.compile("[-_]")
_MODULE_FILEPATH = re.compile(r"lib\/([^\/]+)\/([\w+\/]+)*\/([^\/]+).ex")
_CLOSING_CHARACTERS = {
"(": ")",
"{": "}",
"[": "]",
"\"": "\""
}
def closing_character(tabstop):
"""
Return closing character for a tabstop containing an opening character.
"""
if tabstop:
return _CLOSING_CHARACTERS.get(tabstop[0], "")
return ""
def module_path_match(path, regex=_MODULE_FILEPATH):
"""
Return match data for an Elixir module from a file path.
"""
return re.search(regex, path)
def outer_module_name(path):
"""
Return name for an outer Elixir module from a file path.
"""
outer_module_path = module_path_match(path).group(1)
return to_module_name(outer_module_path)
def to_module_name(string):
"""
Convert string into an Elixir module name
"""
return (
re.sub(_DASHES_AND_UNDERSCORES, " ", string)
.title()
.replace(" ", "")
.replace(".ex", "")
)
|
import re
_DASHES_AND_UNDERSCORES = re.compile("[-_]")
_MODULE_FILEPATH = re.compile(r"lib\/([^\/]+)\/([\w+\/]+)*\/([^\/]+).ex")
+ _CLOSING_CHARACTERS = {
+ "(": ")",
+ "{": "}",
+ "[": "]",
+ "\"": "\""
+ }
def closing_character(tabstop):
"""
Return closing character for a tabstop containing an opening character.
"""
+ if tabstop:
+ return _CLOSING_CHARACTERS.get(tabstop[0], "")
- if tabstop.startswith("("):
- return ")"
- if tabstop.startswith("{"):
- return "}"
- if tabstop.startswith("["):
- return "]"
- if tabstop.startswith("\""):
- return "\""
-
return ""
def module_path_match(path, regex=_MODULE_FILEPATH):
"""
Return match data for an Elixir module from a file path.
"""
return re.search(regex, path)
def outer_module_name(path):
"""
Return name for an outer Elixir module from a file path.
"""
outer_module_path = module_path_match(path).group(1)
return to_module_name(outer_module_path)
def to_module_name(string):
"""
Convert string into an Elixir module name
"""
return (
re.sub(_DASHES_AND_UNDERSCORES, " ", string)
.title()
.replace(" ", "")
.replace(".ex", "")
)
|
c262e1d4c1c7422675728298019ee674242b68dd
|
examples/framework/faren/faren.py
|
examples/framework/faren/faren.py
|
import gtk
from kiwi.controllers import BaseController
from kiwi.ui.views import BaseView
from kiwi.ui.gadgets import quit_if_last
class FarenControl(BaseController):
def on_quitbutton__clicked(self, *args):
self.view.hide_and_quit()
def after_temperature__changed(self, entry, *args):
try:
temp = float(entry.get_text())
except ValueError:
temp = 0
celsius = (temp - 32) * 5/9.0
farenheit = (temp * 9/5.0) + 32
self.view.celsius.set_text("%.2f" % celsius)
self.view.farenheit.set_text("%.2f" % farenheit)
widgets = ["quitbutton", "temperature", "celsius", "farenheit"]
view = BaseView(gladefile="faren", delete_handler=quit_if_last,
widgets=widgets)
ctl = FarenControl(view)
view.show()
gtk.main()
|
import gtk
from kiwi.controllers import BaseController
from kiwi.ui.views import BaseView
from kiwi.ui.gadgets import quit_if_last
class FarenControl(BaseController):
def on_quitbutton__clicked(self, *args):
self.view.hide_and_quit()
def after_temperature__insert_text(self, entry, *args):
try:
temp = float(entry.get_text())
except ValueError:
temp = 0
celsius = (temp - 32) * 5/9.0
farenheit = (temp * 9/5.0) + 32
self.view.celsius.set_text("%.2f" % celsius)
self.view.farenheit.set_text("%.2f" % farenheit)
widgets = ["quitbutton", "temperature", "celsius", "farenheit"]
view = BaseView(gladefile="faren", delete_handler=quit_if_last,
widgets=widgets)
ctl = FarenControl(view)
view.show()
gtk.main()
|
Use insert_text instead of changed
|
Use insert_text instead of changed
|
Python
|
lgpl-2.1
|
Schevo/kiwi,Schevo/kiwi,Schevo/kiwi
|
import gtk
from kiwi.controllers import BaseController
from kiwi.ui.views import BaseView
from kiwi.ui.gadgets import quit_if_last
class FarenControl(BaseController):
def on_quitbutton__clicked(self, *args):
self.view.hide_and_quit()
- def after_temperature__changed(self, entry, *args):
+ def after_temperature__insert_text(self, entry, *args):
try:
temp = float(entry.get_text())
except ValueError:
temp = 0
celsius = (temp - 32) * 5/9.0
farenheit = (temp * 9/5.0) + 32
self.view.celsius.set_text("%.2f" % celsius)
self.view.farenheit.set_text("%.2f" % farenheit)
widgets = ["quitbutton", "temperature", "celsius", "farenheit"]
view = BaseView(gladefile="faren", delete_handler=quit_if_last,
widgets=widgets)
ctl = FarenControl(view)
view.show()
gtk.main()
|
Use insert_text instead of changed
|
## Code Before:
import gtk
from kiwi.controllers import BaseController
from kiwi.ui.views import BaseView
from kiwi.ui.gadgets import quit_if_last
class FarenControl(BaseController):
def on_quitbutton__clicked(self, *args):
self.view.hide_and_quit()
def after_temperature__changed(self, entry, *args):
try:
temp = float(entry.get_text())
except ValueError:
temp = 0
celsius = (temp - 32) * 5/9.0
farenheit = (temp * 9/5.0) + 32
self.view.celsius.set_text("%.2f" % celsius)
self.view.farenheit.set_text("%.2f" % farenheit)
widgets = ["quitbutton", "temperature", "celsius", "farenheit"]
view = BaseView(gladefile="faren", delete_handler=quit_if_last,
widgets=widgets)
ctl = FarenControl(view)
view.show()
gtk.main()
## Instruction:
Use insert_text instead of changed
## Code After:
import gtk
from kiwi.controllers import BaseController
from kiwi.ui.views import BaseView
from kiwi.ui.gadgets import quit_if_last
class FarenControl(BaseController):
def on_quitbutton__clicked(self, *args):
self.view.hide_and_quit()
def after_temperature__insert_text(self, entry, *args):
try:
temp = float(entry.get_text())
except ValueError:
temp = 0
celsius = (temp - 32) * 5/9.0
farenheit = (temp * 9/5.0) + 32
self.view.celsius.set_text("%.2f" % celsius)
self.view.farenheit.set_text("%.2f" % farenheit)
widgets = ["quitbutton", "temperature", "celsius", "farenheit"]
view = BaseView(gladefile="faren", delete_handler=quit_if_last,
widgets=widgets)
ctl = FarenControl(view)
view.show()
gtk.main()
|
import gtk
from kiwi.controllers import BaseController
from kiwi.ui.views import BaseView
from kiwi.ui.gadgets import quit_if_last
class FarenControl(BaseController):
def on_quitbutton__clicked(self, *args):
self.view.hide_and_quit()
- def after_temperature__changed(self, entry, *args):
? ^^^ ^ ^
+ def after_temperature__insert_text(self, entry, *args):
? ^ ^ ^^^^^^^
try:
temp = float(entry.get_text())
except ValueError:
temp = 0
celsius = (temp - 32) * 5/9.0
farenheit = (temp * 9/5.0) + 32
self.view.celsius.set_text("%.2f" % celsius)
self.view.farenheit.set_text("%.2f" % farenheit)
widgets = ["quitbutton", "temperature", "celsius", "farenheit"]
view = BaseView(gladefile="faren", delete_handler=quit_if_last,
widgets=widgets)
ctl = FarenControl(view)
view.show()
gtk.main()
|
e73795b8ad016bba5b1ab5118a5153085a1e99b0
|
nova/tests/functional/api_sample_tests/test_servers_ips.py
|
nova/tests/functional/api_sample_tests/test_servers_ips.py
|
from nova.tests.functional.api_sample_tests import test_servers
class ServersIpsJsonTest(test_servers.ServersSampleBase):
sample_dir = 'server-ips'
def test_get(self):
# Test getting a server's IP information.
uuid = self._post_server()
response = self._do_get('servers/%s/ips' % uuid)
self._verify_response('server-ips-resp', {}, response, 200)
def test_get_by_network(self):
# Test getting a server's IP information by network id.
uuid = self._post_server()
response = self._do_get('servers/%s/ips/private' % uuid)
self._verify_response('server-ips-network-resp', {}, response, 200)
|
from nova.tests import fixtures as nova_fixtures
from nova.tests.functional.api_sample_tests import test_servers
class ServersIpsJsonTest(test_servers.ServersSampleBase):
sample_dir = 'server-ips'
def test_get(self):
# Test getting a server's IP information.
uuid = self._post_server()
response = self._do_get('servers/%s/ips' % uuid)
self._verify_response('server-ips-resp', {}, response, 200)
def test_get_by_network(self):
# Test getting a server's IP information by network id.
server_uuid = self._post_server()
network_label = nova_fixtures.NeutronFixture.network_1['name']
response = self._do_get('servers/%s/ips/%s' % (
server_uuid, network_label))
self._verify_response('server-ips-network-resp', {}, response, 200)
|
Make it obvious where we're getting our names from
|
trivial: Make it obvious where we're getting our names from
Change-Id: Ib9aa790c8999e50a2a3587561604ff1e51666f38
Signed-off-by: Stephen Finucane <[email protected]>
|
Python
|
apache-2.0
|
mahak/nova,mahak/nova,klmitch/nova,klmitch/nova,klmitch/nova,rahulunair/nova,mahak/nova,rahulunair/nova,openstack/nova,openstack/nova,rahulunair/nova,openstack/nova,klmitch/nova
|
+ from nova.tests import fixtures as nova_fixtures
from nova.tests.functional.api_sample_tests import test_servers
class ServersIpsJsonTest(test_servers.ServersSampleBase):
sample_dir = 'server-ips'
def test_get(self):
# Test getting a server's IP information.
uuid = self._post_server()
response = self._do_get('servers/%s/ips' % uuid)
self._verify_response('server-ips-resp', {}, response, 200)
def test_get_by_network(self):
# Test getting a server's IP information by network id.
- uuid = self._post_server()
+ server_uuid = self._post_server()
+ network_label = nova_fixtures.NeutronFixture.network_1['name']
- response = self._do_get('servers/%s/ips/private' % uuid)
+ response = self._do_get('servers/%s/ips/%s' % (
+ server_uuid, network_label))
self._verify_response('server-ips-network-resp', {}, response, 200)
|
Make it obvious where we're getting our names from
|
## Code Before:
from nova.tests.functional.api_sample_tests import test_servers
class ServersIpsJsonTest(test_servers.ServersSampleBase):
sample_dir = 'server-ips'
def test_get(self):
# Test getting a server's IP information.
uuid = self._post_server()
response = self._do_get('servers/%s/ips' % uuid)
self._verify_response('server-ips-resp', {}, response, 200)
def test_get_by_network(self):
# Test getting a server's IP information by network id.
uuid = self._post_server()
response = self._do_get('servers/%s/ips/private' % uuid)
self._verify_response('server-ips-network-resp', {}, response, 200)
## Instruction:
Make it obvious where we're getting our names from
## Code After:
from nova.tests import fixtures as nova_fixtures
from nova.tests.functional.api_sample_tests import test_servers
class ServersIpsJsonTest(test_servers.ServersSampleBase):
sample_dir = 'server-ips'
def test_get(self):
# Test getting a server's IP information.
uuid = self._post_server()
response = self._do_get('servers/%s/ips' % uuid)
self._verify_response('server-ips-resp', {}, response, 200)
def test_get_by_network(self):
# Test getting a server's IP information by network id.
server_uuid = self._post_server()
network_label = nova_fixtures.NeutronFixture.network_1['name']
response = self._do_get('servers/%s/ips/%s' % (
server_uuid, network_label))
self._verify_response('server-ips-network-resp', {}, response, 200)
|
+ from nova.tests import fixtures as nova_fixtures
from nova.tests.functional.api_sample_tests import test_servers
class ServersIpsJsonTest(test_servers.ServersSampleBase):
sample_dir = 'server-ips'
def test_get(self):
# Test getting a server's IP information.
uuid = self._post_server()
response = self._do_get('servers/%s/ips' % uuid)
self._verify_response('server-ips-resp', {}, response, 200)
def test_get_by_network(self):
# Test getting a server's IP information by network id.
- uuid = self._post_server()
+ server_uuid = self._post_server()
? +++++++
+ network_label = nova_fixtures.NeutronFixture.network_1['name']
- response = self._do_get('servers/%s/ips/private' % uuid)
? ^^^^^^^ ^^^^^
+ response = self._do_get('servers/%s/ips/%s' % (
? ^^ ^
+ server_uuid, network_label))
self._verify_response('server-ips-network-resp', {}, response, 200)
|
13ffdb0cb455bf32a10d055e6e972c0ca725557a
|
src/mmw/apps/home/views.py
|
src/mmw/apps/home/views.py
|
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from django.contrib.auth.models import User
from django.shortcuts import render_to_response
from rest_framework import serializers, viewsets
# Serializers define the API representation.
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('url', 'username', 'email', 'is_staff')
# ViewSets define the view behavior.
class UserViewSet(viewsets.ModelViewSet):
queryset = User.objects.all()
serializer_class = UserSerializer
def home_page(request):
return render_to_response('home/home.html')
def compare(request):
return render_to_response('home/compare.html')
|
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from django.contrib.auth.models import User
from django.shortcuts import render_to_response
from django.template.context_processors import csrf
from rest_framework import serializers, viewsets
# Serializers define the API representation.
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('url', 'username', 'email', 'is_staff')
# ViewSets define the view behavior.
class UserViewSet(viewsets.ModelViewSet):
queryset = User.objects.all()
serializer_class = UserSerializer
def home_page(request):
csrf_token = {}
csrf_token.update(csrf(request))
return render_to_response('home/home.html', csrf_token)
def compare(request):
return render_to_response('home/compare.html')
|
Return a csrf token on the homepage.
|
Return a csrf token on the homepage.
We were not setting a CSRF token on the homepage. This meant that requests to
API endpoints did not have a token available. This change sets the token
immediatley as part of the cookie. Ajax calls can then use this value.
|
Python
|
apache-2.0
|
WikiWatershed/model-my-watershed,kdeloach/model-my-watershed,kdeloach/model-my-watershed,mmcfarland/model-my-watershed-1,lewfish/model-my-watershed,WikiWatershed/model-my-watershed,lliss/model-my-watershed,lewfish/model-my-watershed,mmcfarland/model-my-watershed-1,lewfish/model-my-watershed,WikiWatershed/model-my-watershed,lewfish/model-my-watershed,lewfish/model-my-watershed,lliss/model-my-watershed,mmcfarland/model-my-watershed-1,lliss/model-my-watershed,mmcfarland/model-my-watershed,lliss/model-my-watershed,kdeloach/model-my-watershed,WikiWatershed/model-my-watershed,mmcfarland/model-my-watershed,project-icp/bee-pollinator-app,mmcfarland/model-my-watershed,lliss/model-my-watershed,mmcfarland/model-my-watershed,kdeloach/model-my-watershed,project-icp/bee-pollinator-app,mmcfarland/model-my-watershed-1,project-icp/bee-pollinator-app,WikiWatershed/model-my-watershed,project-icp/bee-pollinator-app,mmcfarland/model-my-watershed,kdeloach/model-my-watershed
|
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from django.contrib.auth.models import User
from django.shortcuts import render_to_response
+ from django.template.context_processors import csrf
from rest_framework import serializers, viewsets
# Serializers define the API representation.
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('url', 'username', 'email', 'is_staff')
# ViewSets define the view behavior.
class UserViewSet(viewsets.ModelViewSet):
queryset = User.objects.all()
serializer_class = UserSerializer
def home_page(request):
+ csrf_token = {}
+ csrf_token.update(csrf(request))
+
- return render_to_response('home/home.html')
+ return render_to_response('home/home.html', csrf_token)
def compare(request):
return render_to_response('home/compare.html')
-
|
Return a csrf token on the homepage.
|
## Code Before:
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from django.contrib.auth.models import User
from django.shortcuts import render_to_response
from rest_framework import serializers, viewsets
# Serializers define the API representation.
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('url', 'username', 'email', 'is_staff')
# ViewSets define the view behavior.
class UserViewSet(viewsets.ModelViewSet):
queryset = User.objects.all()
serializer_class = UserSerializer
def home_page(request):
return render_to_response('home/home.html')
def compare(request):
return render_to_response('home/compare.html')
## Instruction:
Return a csrf token on the homepage.
## Code After:
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from django.contrib.auth.models import User
from django.shortcuts import render_to_response
from django.template.context_processors import csrf
from rest_framework import serializers, viewsets
# Serializers define the API representation.
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('url', 'username', 'email', 'is_staff')
# ViewSets define the view behavior.
class UserViewSet(viewsets.ModelViewSet):
queryset = User.objects.all()
serializer_class = UserSerializer
def home_page(request):
csrf_token = {}
csrf_token.update(csrf(request))
return render_to_response('home/home.html', csrf_token)
def compare(request):
return render_to_response('home/compare.html')
|
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from django.contrib.auth.models import User
from django.shortcuts import render_to_response
+ from django.template.context_processors import csrf
from rest_framework import serializers, viewsets
# Serializers define the API representation.
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('url', 'username', 'email', 'is_staff')
# ViewSets define the view behavior.
class UserViewSet(viewsets.ModelViewSet):
queryset = User.objects.all()
serializer_class = UserSerializer
def home_page(request):
+ csrf_token = {}
+ csrf_token.update(csrf(request))
+
- return render_to_response('home/home.html')
+ return render_to_response('home/home.html', csrf_token)
? ++++++++++++
def compare(request):
return render_to_response('home/compare.html')
|
2492803223060ecf92f8c60a2aa07db8f450e7b1
|
get_dev_id.py
|
get_dev_id.py
|
from gmusicapi import Mobileclient
# Try to print out some valid device IDs.
if __name__ == '__main__':
api = Mobileclient()
email = input('Enter your email: ')
password = input('Enter your password: ')
if not api.login(email, password, Mobileclient.FROM_MAC_ADDRESS):
print('Login failed, verify your email and password:'
'enter any key to exit.')
quit()
devices = api.get_registered_devices()
i = 1
for device in devices:
print('%d: %s' % (
i, device['id'][2:] if device['id'].startswith('0x')
else device['id']))
i += 1
|
import getpass
from gmusicapi import Mobileclient
# Try to print out some valid device IDs.
if __name__ == '__main__':
api = Mobileclient()
email = input('Enter your email: ').strip()
assert '@' in email, 'Please enter a valid email.'
password = getpass.getpass('Enter password for {}: '.format(email))
if not api.login(email, password, Mobileclient.FROM_MAC_ADDRESS):
print('Login failed, verify your email and password: '
'enter any key to exit.')
quit()
for i, device in enumerate(api.get_registered_devices()):
d_id = device['id']
print('%d: %s' % (i + 1, d_id[2:] if d_id.startswith('0x') else d_id))
|
Secure with getpass(), Simplify with enumerate()
|
Secure with getpass(), Simplify with enumerate()
|
Python
|
mit
|
christopher-dG/pmcli,christopher-dG/pmcli
|
+ import getpass
from gmusicapi import Mobileclient
+
# Try to print out some valid device IDs.
if __name__ == '__main__':
api = Mobileclient()
- email = input('Enter your email: ')
+ email = input('Enter your email: ').strip()
- password = input('Enter your password: ')
+ assert '@' in email, 'Please enter a valid email.'
+ password = getpass.getpass('Enter password for {}: '.format(email))
if not api.login(email, password, Mobileclient.FROM_MAC_ADDRESS):
- print('Login failed, verify your email and password:'
+ print('Login failed, verify your email and password: '
'enter any key to exit.')
quit()
+ for i, device in enumerate(api.get_registered_devices()):
+ d_id = device['id']
+ print('%d: %s' % (i + 1, d_id[2:] if d_id.startswith('0x') else d_id))
- devices = api.get_registered_devices()
- i = 1
- for device in devices:
- print('%d: %s' % (
- i, device['id'][2:] if device['id'].startswith('0x')
- else device['id']))
- i += 1
|
Secure with getpass(), Simplify with enumerate()
|
## Code Before:
from gmusicapi import Mobileclient
# Try to print out some valid device IDs.
if __name__ == '__main__':
api = Mobileclient()
email = input('Enter your email: ')
password = input('Enter your password: ')
if not api.login(email, password, Mobileclient.FROM_MAC_ADDRESS):
print('Login failed, verify your email and password:'
'enter any key to exit.')
quit()
devices = api.get_registered_devices()
i = 1
for device in devices:
print('%d: %s' % (
i, device['id'][2:] if device['id'].startswith('0x')
else device['id']))
i += 1
## Instruction:
Secure with getpass(), Simplify with enumerate()
## Code After:
import getpass
from gmusicapi import Mobileclient
# Try to print out some valid device IDs.
if __name__ == '__main__':
api = Mobileclient()
email = input('Enter your email: ').strip()
assert '@' in email, 'Please enter a valid email.'
password = getpass.getpass('Enter password for {}: '.format(email))
if not api.login(email, password, Mobileclient.FROM_MAC_ADDRESS):
print('Login failed, verify your email and password: '
'enter any key to exit.')
quit()
for i, device in enumerate(api.get_registered_devices()):
d_id = device['id']
print('%d: %s' % (i + 1, d_id[2:] if d_id.startswith('0x') else d_id))
|
+ import getpass
from gmusicapi import Mobileclient
+
# Try to print out some valid device IDs.
if __name__ == '__main__':
api = Mobileclient()
- email = input('Enter your email: ')
+ email = input('Enter your email: ').strip()
? ++++++++
- password = input('Enter your password: ')
+ assert '@' in email, 'Please enter a valid email.'
+ password = getpass.getpass('Enter password for {}: '.format(email))
if not api.login(email, password, Mobileclient.FROM_MAC_ADDRESS):
- print('Login failed, verify your email and password:'
+ print('Login failed, verify your email and password: '
? +
'enter any key to exit.')
quit()
- devices = api.get_registered_devices()
? - ^
+ for i, device in enumerate(api.get_registered_devices()):
? +++++++ ^^ ++++++++++ ++
- i = 1
- for device in devices:
- print('%d: %s' % (
- i, device['id'][2:] if device['id'].startswith('0x')
- else device['id']))
? ^^^^^^^ --
+ d_id = device['id']
? ++++ ^
- i += 1
+ print('%d: %s' % (i + 1, d_id[2:] if d_id.startswith('0x') else d_id))
|
05551b6b7ed1ed9a97be635f3d32b5bd4f26f635
|
tests/mltils/test_infrequent_value_encoder.py
|
tests/mltils/test_infrequent_value_encoder.py
|
import pandas as pd
from mltils.encoders import InfrequentValueEncoder
def test_infrequent_value_encoder_1():
ive = InfrequentValueEncoder()
assert ive is not None
def test_infrequent_value_encoder_2():
df = pd.DataFrame({'A': ['a', 'a', 'b', 'b', 'c']})
ive = InfrequentValueEncoder(thrshld=1, str_rpl='ifq')
encoded = ive.fit_transform(df)
expected = pd.DataFrame({'A': ['a', 'a', 'b', 'b', 'ifq']})
assert expected.equals(encoded)
|
import pandas as pd
from mltils.encoders import InfrequentValueEncoder
def test_infrequent_value_encoder_1():
ive = InfrequentValueEncoder()
assert ive is not None
def test_infrequent_value_encoder_2():
df = pd.DataFrame({'A': ['a', 'a', 'b', 'b', 'c']})
ive = InfrequentValueEncoder(thrshld=1, str_rpl='ifq')
encoded = ive.fit_transform(df)
expected = pd.DataFrame({'A': ['a', 'a', 'b', 'b', 'ifq']})
assert expected.equals(encoded)
def test_infrequent_value_encoder_3():
df = pd.DataFrame({'A': ['a', 'a', 'b', 'b', 'c']})
ive = InfrequentValueEncoder(thrshld=0, str_rpl='ifq')
encoded = ive.fit_transform(df)
expected = pd.DataFrame({'A': ['a', 'a', 'b', 'b', 'c']})
assert expected.equals(encoded)
def test_infrequent_value_encoder_4():
df = pd.DataFrame({'A': ['a', 'a', 'b', 'b', 'c']})
ive = InfrequentValueEncoder(thrshld=0, str_rpl='ifq')
encoded = ive.fit_transform(df)
expected = pd.DataFrame({'A': ['a', 'a', 'b', 'b', 'c']})
assert expected.equals(encoded)
|
Add more unit tests for InfrequentValueEncoder
|
Add more unit tests for InfrequentValueEncoder
|
Python
|
mit
|
rladeira/mltils
|
import pandas as pd
from mltils.encoders import InfrequentValueEncoder
def test_infrequent_value_encoder_1():
ive = InfrequentValueEncoder()
assert ive is not None
def test_infrequent_value_encoder_2():
df = pd.DataFrame({'A': ['a', 'a', 'b', 'b', 'c']})
ive = InfrequentValueEncoder(thrshld=1, str_rpl='ifq')
encoded = ive.fit_transform(df)
expected = pd.DataFrame({'A': ['a', 'a', 'b', 'b', 'ifq']})
assert expected.equals(encoded)
+
+ def test_infrequent_value_encoder_3():
+ df = pd.DataFrame({'A': ['a', 'a', 'b', 'b', 'c']})
+ ive = InfrequentValueEncoder(thrshld=0, str_rpl='ifq')
+ encoded = ive.fit_transform(df)
+ expected = pd.DataFrame({'A': ['a', 'a', 'b', 'b', 'c']})
+ assert expected.equals(encoded)
+
+
+ def test_infrequent_value_encoder_4():
+ df = pd.DataFrame({'A': ['a', 'a', 'b', 'b', 'c']})
+ ive = InfrequentValueEncoder(thrshld=0, str_rpl='ifq')
+ encoded = ive.fit_transform(df)
+ expected = pd.DataFrame({'A': ['a', 'a', 'b', 'b', 'c']})
+ assert expected.equals(encoded)
+
|
Add more unit tests for InfrequentValueEncoder
|
## Code Before:
import pandas as pd
from mltils.encoders import InfrequentValueEncoder
def test_infrequent_value_encoder_1():
ive = InfrequentValueEncoder()
assert ive is not None
def test_infrequent_value_encoder_2():
df = pd.DataFrame({'A': ['a', 'a', 'b', 'b', 'c']})
ive = InfrequentValueEncoder(thrshld=1, str_rpl='ifq')
encoded = ive.fit_transform(df)
expected = pd.DataFrame({'A': ['a', 'a', 'b', 'b', 'ifq']})
assert expected.equals(encoded)
## Instruction:
Add more unit tests for InfrequentValueEncoder
## Code After:
import pandas as pd
from mltils.encoders import InfrequentValueEncoder
def test_infrequent_value_encoder_1():
ive = InfrequentValueEncoder()
assert ive is not None
def test_infrequent_value_encoder_2():
df = pd.DataFrame({'A': ['a', 'a', 'b', 'b', 'c']})
ive = InfrequentValueEncoder(thrshld=1, str_rpl='ifq')
encoded = ive.fit_transform(df)
expected = pd.DataFrame({'A': ['a', 'a', 'b', 'b', 'ifq']})
assert expected.equals(encoded)
def test_infrequent_value_encoder_3():
df = pd.DataFrame({'A': ['a', 'a', 'b', 'b', 'c']})
ive = InfrequentValueEncoder(thrshld=0, str_rpl='ifq')
encoded = ive.fit_transform(df)
expected = pd.DataFrame({'A': ['a', 'a', 'b', 'b', 'c']})
assert expected.equals(encoded)
def test_infrequent_value_encoder_4():
df = pd.DataFrame({'A': ['a', 'a', 'b', 'b', 'c']})
ive = InfrequentValueEncoder(thrshld=0, str_rpl='ifq')
encoded = ive.fit_transform(df)
expected = pd.DataFrame({'A': ['a', 'a', 'b', 'b', 'c']})
assert expected.equals(encoded)
|
import pandas as pd
from mltils.encoders import InfrequentValueEncoder
def test_infrequent_value_encoder_1():
ive = InfrequentValueEncoder()
assert ive is not None
def test_infrequent_value_encoder_2():
df = pd.DataFrame({'A': ['a', 'a', 'b', 'b', 'c']})
ive = InfrequentValueEncoder(thrshld=1, str_rpl='ifq')
encoded = ive.fit_transform(df)
expected = pd.DataFrame({'A': ['a', 'a', 'b', 'b', 'ifq']})
assert expected.equals(encoded)
+
+
+ def test_infrequent_value_encoder_3():
+ df = pd.DataFrame({'A': ['a', 'a', 'b', 'b', 'c']})
+ ive = InfrequentValueEncoder(thrshld=0, str_rpl='ifq')
+ encoded = ive.fit_transform(df)
+ expected = pd.DataFrame({'A': ['a', 'a', 'b', 'b', 'c']})
+ assert expected.equals(encoded)
+
+
+ def test_infrequent_value_encoder_4():
+ df = pd.DataFrame({'A': ['a', 'a', 'b', 'b', 'c']})
+ ive = InfrequentValueEncoder(thrshld=0, str_rpl='ifq')
+ encoded = ive.fit_transform(df)
+ expected = pd.DataFrame({'A': ['a', 'a', 'b', 'b', 'c']})
+ assert expected.equals(encoded)
|
9a19da30a933bc2872b9fc5b5966823c43e1982f
|
website/pages/tests.py
|
website/pages/tests.py
|
from django.core.urlresolvers import resolve
from django.test import TestCase
from django.http import HttpRequest
from django.template.loader import render_to_string
from website.pages.views import home_page, send_email
class HomePageTest(TestCase):
def test_root_url_resolves_to_home_page_view(self):
found = resolve('/')
self.assertEqual(found.func, home_page)
def test_home_page_returns_correct_html(self):
request = HttpRequest()
response = home_page(request)
expected_html = render_to_string('pages/home.html')
self.assertEqual(response.content.decode(), expected_html)
class SendEmailTest(TestCase):
def test_send_email_url_resolves_to_send_email_view(self):
found = resolve('/send-email/')
self.assertEqual(found.func, send_email)
def test_send_email_returns_correct_html(self):
request = HttpRequest()
response = send_email(request)
expected_html = render_to_string('pages/send_email.html')
self.assertEqual(response.content.decode(), expected_html)
|
from django.core.urlresolvers import resolve
from django.test import TestCase
from django.http import HttpRequest
from django.template.loader import render_to_string
from website.pages.views import home_page, contact
class HomePageTest(TestCase):
def test_root_url_resolves_to_home_page_view(self):
found = resolve('/')
self.assertEqual(found.func, home_page)
def test_home_page_returns_correct_html(self):
request = HttpRequest()
response = home_page(request)
expected_html = render_to_string('pages/home.html')
self.assertEqual(response.content.decode(), expected_html)
class ContactTest(TestCase):
def test_contact_url_resolves_to_contact_view(self):
found = resolve('/contact/')
self.assertEqual(found.func, contact)
def test_contact_returns_correct_html(self):
request = HttpRequest()
response = contact(request)
expected_html = render_to_string('pages/contact.html')
self.assertEqual(response.content.decode(), expected_html)
|
Change send email to contact namespace
|
Change send email to contact namespace
|
Python
|
mit
|
MazeFX/cookiecutter_website_project,MazeFX/cookiecutter_website_project,MazeFX/cookiecutter_website_project,MazeFX/cookiecutter_website_project
|
from django.core.urlresolvers import resolve
from django.test import TestCase
from django.http import HttpRequest
from django.template.loader import render_to_string
- from website.pages.views import home_page, send_email
+ from website.pages.views import home_page, contact
class HomePageTest(TestCase):
def test_root_url_resolves_to_home_page_view(self):
found = resolve('/')
self.assertEqual(found.func, home_page)
def test_home_page_returns_correct_html(self):
request = HttpRequest()
response = home_page(request)
expected_html = render_to_string('pages/home.html')
self.assertEqual(response.content.decode(), expected_html)
- class SendEmailTest(TestCase):
+ class ContactTest(TestCase):
- def test_send_email_url_resolves_to_send_email_view(self):
+ def test_contact_url_resolves_to_contact_view(self):
- found = resolve('/send-email/')
+ found = resolve('/contact/')
- self.assertEqual(found.func, send_email)
+ self.assertEqual(found.func, contact)
- def test_send_email_returns_correct_html(self):
+ def test_contact_returns_correct_html(self):
request = HttpRequest()
- response = send_email(request)
+ response = contact(request)
- expected_html = render_to_string('pages/send_email.html')
+ expected_html = render_to_string('pages/contact.html')
self.assertEqual(response.content.decode(), expected_html)
|
Change send email to contact namespace
|
## Code Before:
from django.core.urlresolvers import resolve
from django.test import TestCase
from django.http import HttpRequest
from django.template.loader import render_to_string
from website.pages.views import home_page, send_email
class HomePageTest(TestCase):
def test_root_url_resolves_to_home_page_view(self):
found = resolve('/')
self.assertEqual(found.func, home_page)
def test_home_page_returns_correct_html(self):
request = HttpRequest()
response = home_page(request)
expected_html = render_to_string('pages/home.html')
self.assertEqual(response.content.decode(), expected_html)
class SendEmailTest(TestCase):
def test_send_email_url_resolves_to_send_email_view(self):
found = resolve('/send-email/')
self.assertEqual(found.func, send_email)
def test_send_email_returns_correct_html(self):
request = HttpRequest()
response = send_email(request)
expected_html = render_to_string('pages/send_email.html')
self.assertEqual(response.content.decode(), expected_html)
## Instruction:
Change send email to contact namespace
## Code After:
from django.core.urlresolvers import resolve
from django.test import TestCase
from django.http import HttpRequest
from django.template.loader import render_to_string
from website.pages.views import home_page, contact
class HomePageTest(TestCase):
def test_root_url_resolves_to_home_page_view(self):
found = resolve('/')
self.assertEqual(found.func, home_page)
def test_home_page_returns_correct_html(self):
request = HttpRequest()
response = home_page(request)
expected_html = render_to_string('pages/home.html')
self.assertEqual(response.content.decode(), expected_html)
class ContactTest(TestCase):
def test_contact_url_resolves_to_contact_view(self):
found = resolve('/contact/')
self.assertEqual(found.func, contact)
def test_contact_returns_correct_html(self):
request = HttpRequest()
response = contact(request)
expected_html = render_to_string('pages/contact.html')
self.assertEqual(response.content.decode(), expected_html)
|
from django.core.urlresolvers import resolve
from django.test import TestCase
from django.http import HttpRequest
from django.template.loader import render_to_string
- from website.pages.views import home_page, send_email
? ^^ ^^^^ ^^
+ from website.pages.views import home_page, contact
? ^^ ^ ^^
class HomePageTest(TestCase):
def test_root_url_resolves_to_home_page_view(self):
found = resolve('/')
self.assertEqual(found.func, home_page)
def test_home_page_returns_correct_html(self):
request = HttpRequest()
response = home_page(request)
expected_html = render_to_string('pages/home.html')
self.assertEqual(response.content.decode(), expected_html)
- class SendEmailTest(TestCase):
? ^^ ^^^ ^^
+ class ContactTest(TestCase):
? ^^ ^ ^^
- def test_send_email_url_resolves_to_send_email_view(self):
? ^^ ^^^^ ^^ ^^ ^^^^ ^^
+ def test_contact_url_resolves_to_contact_view(self):
? ^^ ^ ^^ ^^ ^ ^^
- found = resolve('/send-email/')
? ^^ ^^^^ ^^
+ found = resolve('/contact/')
? ^^ ^ ^^
- self.assertEqual(found.func, send_email)
? ^^ ^^^^ ^^
+ self.assertEqual(found.func, contact)
? ^^ ^ ^^
- def test_send_email_returns_correct_html(self):
? ^^ ^^^^ ^^
+ def test_contact_returns_correct_html(self):
? ^^ ^ ^^
request = HttpRequest()
- response = send_email(request)
? ^^ ^^^^ ^^
+ response = contact(request)
? ^^ ^ ^^
- expected_html = render_to_string('pages/send_email.html')
? ^^ ^^^^ ^^
+ expected_html = render_to_string('pages/contact.html')
? ^^ ^ ^^
self.assertEqual(response.content.decode(), expected_html)
|
d2cc077bfce9bef654a8ef742996e5aca8858fc7
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(name='Pyranha',
description='Elegant IRC client',
version='0.1',
author='John Reese',
author_email='[email protected]',
url='https://github.com/jreese/pyranha',
classifiers=['License :: OSI Approved :: MIT License',
'Topic :: Communications :: Chat :: Internet Relay Chat',
'Development Status :: 2 - Pre-Alpha',
],
license='MIT License',
packages=['pyranha', 'pyranha.irc'],
package_data={'pyranha': []},
scripts=['bin/pyranha'],
)
|
from distutils.core import setup
setup(name='Pyranha',
description='Elegant IRC client',
version='0.1',
author='John Reese',
author_email='[email protected]',
url='https://github.com/jreese/pyranha',
classifiers=['License :: OSI Approved :: MIT License',
'Topic :: Communications :: Chat :: Internet Relay Chat',
'Development Status :: 2 - Pre-Alpha',
],
license='MIT License',
packages=['pyranha', 'pyranha.irc'],
package_data={'pyranha': ['dotfiles/*']},
scripts=['bin/pyranha'],
)
|
Install default dotfiles with package
|
Install default dotfiles with package
|
Python
|
mit
|
jreese/pyranha
|
from distutils.core import setup
setup(name='Pyranha',
description='Elegant IRC client',
version='0.1',
author='John Reese',
author_email='[email protected]',
url='https://github.com/jreese/pyranha',
classifiers=['License :: OSI Approved :: MIT License',
'Topic :: Communications :: Chat :: Internet Relay Chat',
'Development Status :: 2 - Pre-Alpha',
],
license='MIT License',
packages=['pyranha', 'pyranha.irc'],
- package_data={'pyranha': []},
+ package_data={'pyranha': ['dotfiles/*']},
scripts=['bin/pyranha'],
)
|
Install default dotfiles with package
|
## Code Before:
from distutils.core import setup
setup(name='Pyranha',
description='Elegant IRC client',
version='0.1',
author='John Reese',
author_email='[email protected]',
url='https://github.com/jreese/pyranha',
classifiers=['License :: OSI Approved :: MIT License',
'Topic :: Communications :: Chat :: Internet Relay Chat',
'Development Status :: 2 - Pre-Alpha',
],
license='MIT License',
packages=['pyranha', 'pyranha.irc'],
package_data={'pyranha': []},
scripts=['bin/pyranha'],
)
## Instruction:
Install default dotfiles with package
## Code After:
from distutils.core import setup
setup(name='Pyranha',
description='Elegant IRC client',
version='0.1',
author='John Reese',
author_email='[email protected]',
url='https://github.com/jreese/pyranha',
classifiers=['License :: OSI Approved :: MIT License',
'Topic :: Communications :: Chat :: Internet Relay Chat',
'Development Status :: 2 - Pre-Alpha',
],
license='MIT License',
packages=['pyranha', 'pyranha.irc'],
package_data={'pyranha': ['dotfiles/*']},
scripts=['bin/pyranha'],
)
|
from distutils.core import setup
setup(name='Pyranha',
description='Elegant IRC client',
version='0.1',
author='John Reese',
author_email='[email protected]',
url='https://github.com/jreese/pyranha',
classifiers=['License :: OSI Approved :: MIT License',
'Topic :: Communications :: Chat :: Internet Relay Chat',
'Development Status :: 2 - Pre-Alpha',
],
license='MIT License',
packages=['pyranha', 'pyranha.irc'],
- package_data={'pyranha': []},
+ package_data={'pyranha': ['dotfiles/*']},
? ++++++++++++
scripts=['bin/pyranha'],
)
|
556e9f5a9f04b730260268a769cbd7170868f693
|
opps/__init__.py
|
opps/__init__.py
|
try:
__import__('pkg_resources').declare_namespace(__name__)
except ImportError:
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
|
import pkg_resources
pkg_resources.declare_namespace(__name__)
|
Fix pkg resources declare namespace
|
Fix pkg resources declare namespace
|
Python
|
mit
|
opps/opps-polls,opps/opps-polls
|
+ import pkg_resources
- try:
- __import__('pkg_resources').declare_namespace(__name__)
- except ImportError:
- from pkgutil import extend_path
- __path__ = extend_path(__path__, __name__)
+ pkg_resources.declare_namespace(__name__)
+
|
Fix pkg resources declare namespace
|
## Code Before:
try:
__import__('pkg_resources').declare_namespace(__name__)
except ImportError:
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
## Instruction:
Fix pkg resources declare namespace
## Code After:
import pkg_resources
pkg_resources.declare_namespace(__name__)
|
- try:
+ import pkg_resources
+
- __import__('pkg_resources').declare_namespace(__name__)
? ---------------- --
+ pkg_resources.declare_namespace(__name__)
- except ImportError:
- from pkgutil import extend_path
- __path__ = extend_path(__path__, __name__)
|
af6c31d09aef686ba896b2a2c74fbb88cc7f1be0
|
tests/test_utils.py
|
tests/test_utils.py
|
__authors__ = [
'"Augie Fackler" <[email protected]>',
]
class MockRequest(object):
"""Shared dummy request object to mock common aspects of a request.
"""
def __init__(self, path=None):
self.REQUEST = self.GET = self.POST = {}
self.path = path
|
__authors__ = [
'"Augie Fackler" <[email protected]>',
'"Sverre Rabbelier" <[email protected]>',
]
from soc.modules import callback
class MockRequest(object):
"""Shared dummy request object to mock common aspects of a request.
Before using the object, start should be called, when done (and
before calling start on a new request), end should be called.
"""
def __init__(self, path=None):
"""Creates a new empty request object.
self.REQUEST, self.GET and self.POST are set to an empty
dictionary, and path to the value specified.
"""
self.REQUEST = {}
self.GET = {}
self.POST = {}
self.path = path
def start(self):
"""Readies the core for a new request.
"""
core = callback.getCore()
core.startNewRequest(self)
def end(self):
"""Finishes up the current request.
"""
core = callback.getCore()
core.endRequest(self, False)
|
Add a start and end method to MockRequest
|
Add a start and end method to MockRequest
|
Python
|
apache-2.0
|
SRabbelier/Melange,SRabbelier/Melange,SRabbelier/Melange,SRabbelier/Melange,SRabbelier/Melange,SRabbelier/Melange,SRabbelier/Melange,SRabbelier/Melange,SRabbelier/Melange
|
__authors__ = [
'"Augie Fackler" <[email protected]>',
+ '"Sverre Rabbelier" <[email protected]>',
]
+
+
+ from soc.modules import callback
class MockRequest(object):
"""Shared dummy request object to mock common aspects of a request.
+
+ Before using the object, start should be called, when done (and
+ before calling start on a new request), end should be called.
"""
+
def __init__(self, path=None):
- self.REQUEST = self.GET = self.POST = {}
+ """Creates a new empty request object.
+
+ self.REQUEST, self.GET and self.POST are set to an empty
+ dictionary, and path to the value specified.
+ """
+
+ self.REQUEST = {}
+ self.GET = {}
+ self.POST = {}
self.path = path
+ def start(self):
+ """Readies the core for a new request.
+ """
+
+ core = callback.getCore()
+ core.startNewRequest(self)
+
+ def end(self):
+ """Finishes up the current request.
+ """
+
+ core = callback.getCore()
+ core.endRequest(self, False)
+
|
Add a start and end method to MockRequest
|
## Code Before:
__authors__ = [
'"Augie Fackler" <[email protected]>',
]
class MockRequest(object):
"""Shared dummy request object to mock common aspects of a request.
"""
def __init__(self, path=None):
self.REQUEST = self.GET = self.POST = {}
self.path = path
## Instruction:
Add a start and end method to MockRequest
## Code After:
__authors__ = [
'"Augie Fackler" <[email protected]>',
'"Sverre Rabbelier" <[email protected]>',
]
from soc.modules import callback
class MockRequest(object):
"""Shared dummy request object to mock common aspects of a request.
Before using the object, start should be called, when done (and
before calling start on a new request), end should be called.
"""
def __init__(self, path=None):
"""Creates a new empty request object.
self.REQUEST, self.GET and self.POST are set to an empty
dictionary, and path to the value specified.
"""
self.REQUEST = {}
self.GET = {}
self.POST = {}
self.path = path
def start(self):
"""Readies the core for a new request.
"""
core = callback.getCore()
core.startNewRequest(self)
def end(self):
"""Finishes up the current request.
"""
core = callback.getCore()
core.endRequest(self, False)
|
__authors__ = [
'"Augie Fackler" <[email protected]>',
+ '"Sverre Rabbelier" <[email protected]>',
]
+
+
+ from soc.modules import callback
class MockRequest(object):
"""Shared dummy request object to mock common aspects of a request.
+
+ Before using the object, start should be called, when done (and
+ before calling start on a new request), end should be called.
"""
+
def __init__(self, path=None):
- self.REQUEST = self.GET = self.POST = {}
+ """Creates a new empty request object.
+
+ self.REQUEST, self.GET and self.POST are set to an empty
+ dictionary, and path to the value specified.
+ """
+
+ self.REQUEST = {}
+ self.GET = {}
+ self.POST = {}
self.path = path
+
+ def start(self):
+ """Readies the core for a new request.
+ """
+
+ core = callback.getCore()
+ core.startNewRequest(self)
+
+ def end(self):
+ """Finishes up the current request.
+ """
+
+ core = callback.getCore()
+ core.endRequest(self, False)
|
34015dbc34b2f4e44b104070bae8c3d1956d7e12
|
is_valid/wrapper_predicates.py
|
is_valid/wrapper_predicates.py
|
import json
def is_transformed(transform, predicate, *args, exceptions=[
Exception
], msg='data can\'t be transformed', **kwargs):
def is_valid(data, explain=False):
try:
data = transform(data, *args, **kwargs)
except Exception as e:
if not any(isinstance(e, exc) for exc in exceptions):
raise e
return (False, msg) if explain else False
return predicate(data, explain=explain)
return is_valid
def is_json(predicate, *args, **kwargs):
return is_transformed(json.loads, predicate, *args, exceptions=[
json.JSONDecodeError
], msg='data is not valid json', **kwargs)
|
import json
def is_transformed(transform, predicate, *args, exceptions=[
Exception
], msg='data can\'t be transformed', **kwargs):
def is_valid(data, explain=False, include=False):
try:
data = transform(data, *args, **kwargs)
except Exception as e:
if not any(isinstance(e, exc) for exc in exceptions):
raise e
return (
(False, msg, None) if explain else (False, None)
) if include else (
(False, msg) if explain else False
)
return ((
predicate(data, explain=True) + (data,)
) if explain else (
predicate(data), data
)) if include else predicate(data, explain=explain)
return is_valid
def is_json(predicate, *args, **kwargs):
return is_transformed(json.loads, predicate, *args, exceptions=[
json.JSONDecodeError
], msg='data is not valid json', **kwargs)
|
Add include keyword arg to is_tranformed
|
Add include keyword arg to is_tranformed
|
Python
|
mit
|
Daanvdk/is_valid
|
import json
def is_transformed(transform, predicate, *args, exceptions=[
Exception
], msg='data can\'t be transformed', **kwargs):
- def is_valid(data, explain=False):
+ def is_valid(data, explain=False, include=False):
try:
data = transform(data, *args, **kwargs)
except Exception as e:
if not any(isinstance(e, exc) for exc in exceptions):
raise e
+ return (
+ (False, msg, None) if explain else (False, None)
+ ) if include else (
- return (False, msg) if explain else False
+ (False, msg) if explain else False
+ )
+ return ((
+ predicate(data, explain=True) + (data,)
+ ) if explain else (
+ predicate(data), data
- return predicate(data, explain=explain)
+ )) if include else predicate(data, explain=explain)
return is_valid
def is_json(predicate, *args, **kwargs):
return is_transformed(json.loads, predicate, *args, exceptions=[
json.JSONDecodeError
], msg='data is not valid json', **kwargs)
|
Add include keyword arg to is_tranformed
|
## Code Before:
import json
def is_transformed(transform, predicate, *args, exceptions=[
Exception
], msg='data can\'t be transformed', **kwargs):
def is_valid(data, explain=False):
try:
data = transform(data, *args, **kwargs)
except Exception as e:
if not any(isinstance(e, exc) for exc in exceptions):
raise e
return (False, msg) if explain else False
return predicate(data, explain=explain)
return is_valid
def is_json(predicate, *args, **kwargs):
return is_transformed(json.loads, predicate, *args, exceptions=[
json.JSONDecodeError
], msg='data is not valid json', **kwargs)
## Instruction:
Add include keyword arg to is_tranformed
## Code After:
import json
def is_transformed(transform, predicate, *args, exceptions=[
Exception
], msg='data can\'t be transformed', **kwargs):
def is_valid(data, explain=False, include=False):
try:
data = transform(data, *args, **kwargs)
except Exception as e:
if not any(isinstance(e, exc) for exc in exceptions):
raise e
return (
(False, msg, None) if explain else (False, None)
) if include else (
(False, msg) if explain else False
)
return ((
predicate(data, explain=True) + (data,)
) if explain else (
predicate(data), data
)) if include else predicate(data, explain=explain)
return is_valid
def is_json(predicate, *args, **kwargs):
return is_transformed(json.loads, predicate, *args, exceptions=[
json.JSONDecodeError
], msg='data is not valid json', **kwargs)
|
import json
def is_transformed(transform, predicate, *args, exceptions=[
Exception
], msg='data can\'t be transformed', **kwargs):
- def is_valid(data, explain=False):
+ def is_valid(data, explain=False, include=False):
? +++++++++++++++
try:
data = transform(data, *args, **kwargs)
except Exception as e:
if not any(isinstance(e, exc) for exc in exceptions):
raise e
+ return (
+ (False, msg, None) if explain else (False, None)
+ ) if include else (
- return (False, msg) if explain else False
? ^^^^^^
+ (False, msg) if explain else False
? ^^^
+ )
+ return ((
+ predicate(data, explain=True) + (data,)
+ ) if explain else (
+ predicate(data), data
- return predicate(data, explain=explain)
? ^ ^^^^
+ )) if include else predicate(data, explain=explain)
? ^^^^^^^^^^^^ ^^^^^
return is_valid
def is_json(predicate, *args, **kwargs):
return is_transformed(json.loads, predicate, *args, exceptions=[
json.JSONDecodeError
], msg='data is not valid json', **kwargs)
|
e59055e29b5cc6a027d3a24803cc05fd709cca90
|
functest/opnfv_tests/features/odl_sfc.py
|
functest/opnfv_tests/features/odl_sfc.py
|
import functest.core.feature_base as base
from sfc.tests.functest import run_tests
class OpenDaylightSFC(base.FeatureBase):
def __init__(self):
super(OpenDaylightSFC, self).__init__(project='sfc',
case='functest-odl-sfc',
repo='dir_repo_sfc')
def execute(self):
return run_tests.main()
|
import functest.core.feature_base as base
class OpenDaylightSFC(base.FeatureBase):
def __init__(self):
super(OpenDaylightSFC, self).__init__(project='sfc',
case='functest-odl-sfc',
repo='dir_repo_sfc')
dir_sfc_functest = '{}/sfc/tests/functest'.format(self.repo)
self.cmd = 'cd %s && python ./run_tests.py' % dir_sfc_functest
|
Revert "Make SFC test a python call to main()"
|
Revert "Make SFC test a python call to main()"
This reverts commit d5820bef80ea4bdb871380dbfe41db12290fc5f8.
Robot test runs before SFC test and it imports
https://github.com/robotframework/SSHLibrary
which does a monkey patching in
the python runtime / paramiko.
Untill now sfc run in a new python process (clean)
because it run using the bash command.
But when importing it as a module and call main()
from python, it will run in the patched runtime
and it will error out.
https://hastebin.com/iyobuxutib.py
Change-Id: I54237c32c957718b363d302efe84e01bc78e4f47
Signed-off-by: George Paraskevopoulos <[email protected]>
|
Python
|
apache-2.0
|
opnfv/functest,mywulin/functest,opnfv/functest,mywulin/functest
|
import functest.core.feature_base as base
- from sfc.tests.functest import run_tests
class OpenDaylightSFC(base.FeatureBase):
def __init__(self):
super(OpenDaylightSFC, self).__init__(project='sfc',
case='functest-odl-sfc',
repo='dir_repo_sfc')
+ dir_sfc_functest = '{}/sfc/tests/functest'.format(self.repo)
+ self.cmd = 'cd %s && python ./run_tests.py' % dir_sfc_functest
- def execute(self):
- return run_tests.main()
-
|
Revert "Make SFC test a python call to main()"
|
## Code Before:
import functest.core.feature_base as base
from sfc.tests.functest import run_tests
class OpenDaylightSFC(base.FeatureBase):
def __init__(self):
super(OpenDaylightSFC, self).__init__(project='sfc',
case='functest-odl-sfc',
repo='dir_repo_sfc')
def execute(self):
return run_tests.main()
## Instruction:
Revert "Make SFC test a python call to main()"
## Code After:
import functest.core.feature_base as base
class OpenDaylightSFC(base.FeatureBase):
def __init__(self):
super(OpenDaylightSFC, self).__init__(project='sfc',
case='functest-odl-sfc',
repo='dir_repo_sfc')
dir_sfc_functest = '{}/sfc/tests/functest'.format(self.repo)
self.cmd = 'cd %s && python ./run_tests.py' % dir_sfc_functest
|
import functest.core.feature_base as base
- from sfc.tests.functest import run_tests
class OpenDaylightSFC(base.FeatureBase):
def __init__(self):
super(OpenDaylightSFC, self).__init__(project='sfc',
case='functest-odl-sfc',
repo='dir_repo_sfc')
+ dir_sfc_functest = '{}/sfc/tests/functest'.format(self.repo)
+ self.cmd = 'cd %s && python ./run_tests.py' % dir_sfc_functest
-
- def execute(self):
- return run_tests.main()
|
55ba2c2310a0f3a4a413801ce8edf52e001c9ffd
|
tornado_srv.py
|
tornado_srv.py
|
import tornado.web
import tornado.wsgi
import tornado.httpserver
import tornado.ioloop
from mojibake.main import app
from mojibake.settings import PORT
container = tornado.wsgi.WSGIContainer(app)
http_server = tornado.httpserver.HTTPServer(container)
http_server.listen(PORT)
tornado.ioloop.IOLoop.instance().start()
|
import tornado.web
import tornado.wsgi
import tornado.httpserver
import tornado.ioloop
import os
from mojibake.main import app
from mojibake.settings import PORT
if os.name == 'posix':
import setproctitle
setproctitle.setproctitle('mojibake') # Set the process title to mojibake
print('Starting Mojibake...')
container = tornado.wsgi.WSGIContainer(app)
http_server = tornado.httpserver.HTTPServer(container)
http_server.listen(PORT)
tornado.ioloop.IOLoop.instance().start()
|
Set the process title on posix systems
|
Set the process title on posix systems
|
Python
|
mit
|
ardinor/mojibake,ardinor/mojibake,ardinor/mojibake
|
import tornado.web
import tornado.wsgi
import tornado.httpserver
import tornado.ioloop
+ import os
from mojibake.main import app
from mojibake.settings import PORT
+ if os.name == 'posix':
+ import setproctitle
+ setproctitle.setproctitle('mojibake') # Set the process title to mojibake
+
+ print('Starting Mojibake...')
container = tornado.wsgi.WSGIContainer(app)
http_server = tornado.httpserver.HTTPServer(container)
http_server.listen(PORT)
tornado.ioloop.IOLoop.instance().start()
|
Set the process title on posix systems
|
## Code Before:
import tornado.web
import tornado.wsgi
import tornado.httpserver
import tornado.ioloop
from mojibake.main import app
from mojibake.settings import PORT
container = tornado.wsgi.WSGIContainer(app)
http_server = tornado.httpserver.HTTPServer(container)
http_server.listen(PORT)
tornado.ioloop.IOLoop.instance().start()
## Instruction:
Set the process title on posix systems
## Code After:
import tornado.web
import tornado.wsgi
import tornado.httpserver
import tornado.ioloop
import os
from mojibake.main import app
from mojibake.settings import PORT
if os.name == 'posix':
import setproctitle
setproctitle.setproctitle('mojibake') # Set the process title to mojibake
print('Starting Mojibake...')
container = tornado.wsgi.WSGIContainer(app)
http_server = tornado.httpserver.HTTPServer(container)
http_server.listen(PORT)
tornado.ioloop.IOLoop.instance().start()
|
import tornado.web
import tornado.wsgi
import tornado.httpserver
import tornado.ioloop
+ import os
from mojibake.main import app
from mojibake.settings import PORT
+ if os.name == 'posix':
+ import setproctitle
+ setproctitle.setproctitle('mojibake') # Set the process title to mojibake
+
+ print('Starting Mojibake...')
container = tornado.wsgi.WSGIContainer(app)
http_server = tornado.httpserver.HTTPServer(container)
http_server.listen(PORT)
tornado.ioloop.IOLoop.instance().start()
|
e4d06cf4121bc9e1a1f9635e159187b8bed1b2ee
|
pyalysis/analysers/raw.py
|
pyalysis/analysers/raw.py
|
import codecs
from blinker import Signal
from pyalysis.utils import detect_encoding, Location
from pyalysis.warnings import LineTooLong
class LineAnalyser(object):
"""
Line-level analyser of Python source code.
"""
on_analyse = Signal()
on_line = Signal()
def __init__(self, module):
self.module = module
self.encoding = detect_encoding(module)
self.warnings = []
def emit(self, warning_cls, message, lineno, start, end):
self.warnings.append(
warning_cls(
message, self.module.name,
Location(lineno, start),
Location(lineno, end)
)
)
def analyse(self):
self.on_analyse.send(self)
reader = codecs.lookup(self.encoding).streamreader(self.module)
for i, line in enumerate(reader, 1):
self.on_line.send(self, lineno=i, line=line)
return self.warnings
@LineAnalyser.on_line.connect
def check_line_length(analyser, lineno, line):
if len(line.rstrip()) > 79:
analyser.emit(
LineTooLong,
u'Line is longer than 79 characters. '
u'You should keep it below that',
lineno,
79,
len(line.rstrip())
)
|
import codecs
from blinker import Signal
from pyalysis.utils import detect_encoding, Location
from pyalysis.warnings import LineTooLong
class LineAnalyser(object):
"""
Line-level analyser of Python source code.
"""
on_analyse = Signal()
on_line = Signal()
def __init__(self, module):
self.module = module
self.encoding = detect_encoding(module)
self.warnings = []
def emit(self, warning_cls, message):
self.warnings.append(
warning_cls(
message, self.module.name,
Location(self.lineno, 0),
Location(self.lineno, len(self.line))
)
)
def analyse(self):
self.on_analyse.send(self)
reader = codecs.lookup(self.encoding).streamreader(self.module)
for i, line in enumerate(reader, 1):
self.lineno = i
self.line = line
self.on_line.send(self, lineno=i, line=line)
return self.warnings
@LineAnalyser.on_line.connect
def check_line_length(analyser, lineno, line):
if len(line.rstrip()) > 79:
analyser.emit(
LineTooLong,
u'Line is longer than 79 characters. '
u'You should keep it below that',
)
|
Fix location of line length check
|
Fix location of line length check
|
Python
|
bsd-3-clause
|
DasIch/pyalysis,DasIch/pyalysis
|
import codecs
from blinker import Signal
from pyalysis.utils import detect_encoding, Location
from pyalysis.warnings import LineTooLong
class LineAnalyser(object):
"""
Line-level analyser of Python source code.
"""
on_analyse = Signal()
on_line = Signal()
def __init__(self, module):
self.module = module
self.encoding = detect_encoding(module)
self.warnings = []
- def emit(self, warning_cls, message, lineno, start, end):
+ def emit(self, warning_cls, message):
self.warnings.append(
warning_cls(
message, self.module.name,
- Location(lineno, start),
- Location(lineno, end)
+ Location(self.lineno, 0),
+ Location(self.lineno, len(self.line))
)
)
def analyse(self):
self.on_analyse.send(self)
reader = codecs.lookup(self.encoding).streamreader(self.module)
for i, line in enumerate(reader, 1):
+ self.lineno = i
+ self.line = line
self.on_line.send(self, lineno=i, line=line)
return self.warnings
@LineAnalyser.on_line.connect
def check_line_length(analyser, lineno, line):
if len(line.rstrip()) > 79:
analyser.emit(
LineTooLong,
u'Line is longer than 79 characters. '
u'You should keep it below that',
- lineno,
- 79,
- len(line.rstrip())
)
|
Fix location of line length check
|
## Code Before:
import codecs
from blinker import Signal
from pyalysis.utils import detect_encoding, Location
from pyalysis.warnings import LineTooLong
class LineAnalyser(object):
"""
Line-level analyser of Python source code.
"""
on_analyse = Signal()
on_line = Signal()
def __init__(self, module):
self.module = module
self.encoding = detect_encoding(module)
self.warnings = []
def emit(self, warning_cls, message, lineno, start, end):
self.warnings.append(
warning_cls(
message, self.module.name,
Location(lineno, start),
Location(lineno, end)
)
)
def analyse(self):
self.on_analyse.send(self)
reader = codecs.lookup(self.encoding).streamreader(self.module)
for i, line in enumerate(reader, 1):
self.on_line.send(self, lineno=i, line=line)
return self.warnings
@LineAnalyser.on_line.connect
def check_line_length(analyser, lineno, line):
if len(line.rstrip()) > 79:
analyser.emit(
LineTooLong,
u'Line is longer than 79 characters. '
u'You should keep it below that',
lineno,
79,
len(line.rstrip())
)
## Instruction:
Fix location of line length check
## Code After:
import codecs
from blinker import Signal
from pyalysis.utils import detect_encoding, Location
from pyalysis.warnings import LineTooLong
class LineAnalyser(object):
"""
Line-level analyser of Python source code.
"""
on_analyse = Signal()
on_line = Signal()
def __init__(self, module):
self.module = module
self.encoding = detect_encoding(module)
self.warnings = []
def emit(self, warning_cls, message):
self.warnings.append(
warning_cls(
message, self.module.name,
Location(self.lineno, 0),
Location(self.lineno, len(self.line))
)
)
def analyse(self):
self.on_analyse.send(self)
reader = codecs.lookup(self.encoding).streamreader(self.module)
for i, line in enumerate(reader, 1):
self.lineno = i
self.line = line
self.on_line.send(self, lineno=i, line=line)
return self.warnings
@LineAnalyser.on_line.connect
def check_line_length(analyser, lineno, line):
if len(line.rstrip()) > 79:
analyser.emit(
LineTooLong,
u'Line is longer than 79 characters. '
u'You should keep it below that',
)
|
import codecs
from blinker import Signal
from pyalysis.utils import detect_encoding, Location
from pyalysis.warnings import LineTooLong
class LineAnalyser(object):
"""
Line-level analyser of Python source code.
"""
on_analyse = Signal()
on_line = Signal()
def __init__(self, module):
self.module = module
self.encoding = detect_encoding(module)
self.warnings = []
- def emit(self, warning_cls, message, lineno, start, end):
? --------------------
+ def emit(self, warning_cls, message):
self.warnings.append(
warning_cls(
message, self.module.name,
- Location(lineno, start),
- Location(lineno, end)
? ^^^
+ Location(self.lineno, 0),
? +++++ ^ +
+ Location(self.lineno, len(self.line))
)
)
def analyse(self):
self.on_analyse.send(self)
reader = codecs.lookup(self.encoding).streamreader(self.module)
for i, line in enumerate(reader, 1):
+ self.lineno = i
+ self.line = line
self.on_line.send(self, lineno=i, line=line)
return self.warnings
@LineAnalyser.on_line.connect
def check_line_length(analyser, lineno, line):
if len(line.rstrip()) > 79:
analyser.emit(
LineTooLong,
u'Line is longer than 79 characters. '
u'You should keep it below that',
- lineno,
- 79,
- len(line.rstrip())
)
|
3e8921b2edcf8a675b6ed496cf5e282c76cc2070
|
retrieveData.py
|
retrieveData.py
|
import json, os, requests
from models import db, FoodMenu, FoodServices
key = os.environ.get('UWOPENDATA_APIKEY')
def getData(service):
payload = {'key': key, 'service': service}
r = requests.get('http://api.uwaterloo.ca/public/v1/', params=payload)
return r
foodMenu = getData('FoodMenu').text
foodMenuData = FoodMenu(foodMenu)
serviceInfo = getData('FoodServices').text
serviceInfoData = FoodServices(serviceInfo)
db.session.add(foodMenuData)
db.session.add(serviceInfoData)
db.session.commit()
|
import json, os, requests
from models import db, FoodMenu, FoodServices
key = os.environ.get('UWOPENDATA_APIKEY')
def getData(service):
payload = {'key': key, 'service': service}
r = requests.get('http://api.uwaterloo.ca/public/v1/', params=payload)
return r
def retrieve():
payload = {'key': key}
url = os.environ.get('API_URL')
r = requests.get(url, params=payload)
return r
foodMenu = retrieve().text
foodMenuData = FoodMenu(foodMenu)
serviceInfo = getData('FoodServices').text
serviceInfoData = FoodServices(serviceInfo)
db.session.add(foodMenuData)
db.session.add(serviceInfoData)
db.session.commit()
|
Update retrieve() for FoodMenu data
|
Update retrieve() for FoodMenu data
|
Python
|
mit
|
alykhank/FoodMenu,alykhank/FoodMenu,alykhank/FoodMenu
|
import json, os, requests
from models import db, FoodMenu, FoodServices
key = os.environ.get('UWOPENDATA_APIKEY')
def getData(service):
payload = {'key': key, 'service': service}
r = requests.get('http://api.uwaterloo.ca/public/v1/', params=payload)
return r
- foodMenu = getData('FoodMenu').text
+ def retrieve():
+ payload = {'key': key}
+ url = os.environ.get('API_URL')
+ r = requests.get(url, params=payload)
+ return r
+
+ foodMenu = retrieve().text
foodMenuData = FoodMenu(foodMenu)
serviceInfo = getData('FoodServices').text
serviceInfoData = FoodServices(serviceInfo)
db.session.add(foodMenuData)
db.session.add(serviceInfoData)
db.session.commit()
|
Update retrieve() for FoodMenu data
|
## Code Before:
import json, os, requests
from models import db, FoodMenu, FoodServices
key = os.environ.get('UWOPENDATA_APIKEY')
def getData(service):
payload = {'key': key, 'service': service}
r = requests.get('http://api.uwaterloo.ca/public/v1/', params=payload)
return r
foodMenu = getData('FoodMenu').text
foodMenuData = FoodMenu(foodMenu)
serviceInfo = getData('FoodServices').text
serviceInfoData = FoodServices(serviceInfo)
db.session.add(foodMenuData)
db.session.add(serviceInfoData)
db.session.commit()
## Instruction:
Update retrieve() for FoodMenu data
## Code After:
import json, os, requests
from models import db, FoodMenu, FoodServices
key = os.environ.get('UWOPENDATA_APIKEY')
def getData(service):
payload = {'key': key, 'service': service}
r = requests.get('http://api.uwaterloo.ca/public/v1/', params=payload)
return r
def retrieve():
payload = {'key': key}
url = os.environ.get('API_URL')
r = requests.get(url, params=payload)
return r
foodMenu = retrieve().text
foodMenuData = FoodMenu(foodMenu)
serviceInfo = getData('FoodServices').text
serviceInfoData = FoodServices(serviceInfo)
db.session.add(foodMenuData)
db.session.add(serviceInfoData)
db.session.commit()
|
import json, os, requests
from models import db, FoodMenu, FoodServices
key = os.environ.get('UWOPENDATA_APIKEY')
def getData(service):
payload = {'key': key, 'service': service}
r = requests.get('http://api.uwaterloo.ca/public/v1/', params=payload)
return r
- foodMenu = getData('FoodMenu').text
+ def retrieve():
+ payload = {'key': key}
+ url = os.environ.get('API_URL')
+ r = requests.get(url, params=payload)
+ return r
+
+ foodMenu = retrieve().text
foodMenuData = FoodMenu(foodMenu)
serviceInfo = getData('FoodServices').text
serviceInfoData = FoodServices(serviceInfo)
db.session.add(foodMenuData)
db.session.add(serviceInfoData)
db.session.commit()
|
4613daea5d9d603b5f092005627fabd805de8a45
|
example/app/utils.py
|
example/app/utils.py
|
from django.contrib.auth import get_user_model
def disable_admin_login():
"""
Disable admin login, but allow editing.
amended from: https://stackoverflow.com/a/40008282/517560
"""
User = get_user_model()
user, created = User.objects.update_or_create(
id=1,
defaults=dict(
first_name="Default Admin",
last_name="User",
is_superuser=True,
is_active=True,
is_staff=True,
),
)
def no_login_has_permission(request):
setattr(request, "user", user)
return True
return no_login_has_permission
|
from django.contrib.auth import get_user_model
from django.db.utils import ProgrammingError
def disable_admin_login():
"""
Disable admin login, but allow editing.
amended from: https://stackoverflow.com/a/40008282/517560
"""
User = get_user_model()
try:
user, created = User.objects.update_or_create(
id=1,
defaults=dict(
first_name="Default Admin",
last_name="User",
is_superuser=True,
is_active=True,
is_staff=True,
),
)
except ProgrammingError:
# auth_user doesn't exist, this allows the migrations to run properly.
user = None
def no_login_has_permission(request):
setattr(request, "user", user)
return True
return no_login_has_permission
|
Make initial ./manage.py migrate work in example
|
Make initial ./manage.py migrate work in example
|
Python
|
bsd-3-clause
|
zostera/django-modeltrans,zostera/django-modeltrans
|
from django.contrib.auth import get_user_model
+ from django.db.utils import ProgrammingError
def disable_admin_login():
"""
Disable admin login, but allow editing.
amended from: https://stackoverflow.com/a/40008282/517560
"""
User = get_user_model()
+ try:
- user, created = User.objects.update_or_create(
+ user, created = User.objects.update_or_create(
- id=1,
+ id=1,
- defaults=dict(
+ defaults=dict(
- first_name="Default Admin",
+ first_name="Default Admin",
- last_name="User",
+ last_name="User",
- is_superuser=True,
+ is_superuser=True,
- is_active=True,
+ is_active=True,
- is_staff=True,
+ is_staff=True,
+ ),
- ),
+ )
- )
+ except ProgrammingError:
+ # auth_user doesn't exist, this allows the migrations to run properly.
+ user = None
def no_login_has_permission(request):
setattr(request, "user", user)
return True
return no_login_has_permission
|
Make initial ./manage.py migrate work in example
|
## Code Before:
from django.contrib.auth import get_user_model
def disable_admin_login():
"""
Disable admin login, but allow editing.
amended from: https://stackoverflow.com/a/40008282/517560
"""
User = get_user_model()
user, created = User.objects.update_or_create(
id=1,
defaults=dict(
first_name="Default Admin",
last_name="User",
is_superuser=True,
is_active=True,
is_staff=True,
),
)
def no_login_has_permission(request):
setattr(request, "user", user)
return True
return no_login_has_permission
## Instruction:
Make initial ./manage.py migrate work in example
## Code After:
from django.contrib.auth import get_user_model
from django.db.utils import ProgrammingError
def disable_admin_login():
"""
Disable admin login, but allow editing.
amended from: https://stackoverflow.com/a/40008282/517560
"""
User = get_user_model()
try:
user, created = User.objects.update_or_create(
id=1,
defaults=dict(
first_name="Default Admin",
last_name="User",
is_superuser=True,
is_active=True,
is_staff=True,
),
)
except ProgrammingError:
# auth_user doesn't exist, this allows the migrations to run properly.
user = None
def no_login_has_permission(request):
setattr(request, "user", user)
return True
return no_login_has_permission
|
from django.contrib.auth import get_user_model
+ from django.db.utils import ProgrammingError
def disable_admin_login():
"""
Disable admin login, but allow editing.
amended from: https://stackoverflow.com/a/40008282/517560
"""
User = get_user_model()
+ try:
- user, created = User.objects.update_or_create(
+ user, created = User.objects.update_or_create(
? ++++
- id=1,
+ id=1,
? ++++
- defaults=dict(
+ defaults=dict(
? ++++
- first_name="Default Admin",
+ first_name="Default Admin",
? ++++
- last_name="User",
+ last_name="User",
? ++++
- is_superuser=True,
+ is_superuser=True,
? ++++
- is_active=True,
+ is_active=True,
? ++++
- is_staff=True,
+ is_staff=True,
? ++++
+ ),
- ),
? -
+ )
- )
+ except ProgrammingError:
+ # auth_user doesn't exist, this allows the migrations to run properly.
+ user = None
def no_login_has_permission(request):
setattr(request, "user", user)
return True
return no_login_has_permission
|
b2eebbdcc14dd47d6ad8bb385966f13ed13890c1
|
superdesk/coverages.py
|
superdesk/coverages.py
|
from superdesk.base_model import BaseModel
def init_app(app):
CoverageModel(app=app)
def rel(resource, embeddable=False):
return {
'type': 'objectid',
'data_relation': {'resource': resource, 'field': '_id', 'embeddable': embeddable}
}
class CoverageModel(BaseModel):
endpoint_name = 'coverages'
schema = {
'headline': {'type': 'string'},
'type': {'type': 'string'},
'ed_note': {'type': 'string'},
'scheduled': {'type': 'datetime'},
'delivery': rel('archive'),
'assigned_user': rel('users', True),
'assigned_desk': rel('desks', True),
'planning_item': rel('planning'),
}
|
from superdesk.base_model import BaseModel
def init_app(app):
CoverageModel(app=app)
def rel(resource, embeddable=False):
return {
'type': 'objectid',
'data_relation': {'resource': resource, 'field': '_id', 'embeddable': embeddable}
}
class CoverageModel(BaseModel):
endpoint_name = 'coverages'
schema = {
'headline': {'type': 'string'},
'type': {'type': 'string'},
'ed_note': {'type': 'string'},
'scheduled': {'type': 'datetime'},
'delivery': {'type': 'string'},
'assigned_user': rel('users', True),
'assigned_desk': rel('desks', True),
'planning_item': {'type': 'string'},
}
|
Fix data relation not working for custom Guids
|
Fix data relation not working for custom Guids
|
Python
|
agpl-3.0
|
plamut/superdesk,sivakuna-aap/superdesk,mdhaman/superdesk-aap,sivakuna-aap/superdesk,liveblog/superdesk,pavlovicnemanja/superdesk,petrjasek/superdesk,mugurrus/superdesk,ioanpocol/superdesk,pavlovicnemanja/superdesk,Aca-jov/superdesk,akintolga/superdesk,vied12/superdesk,gbbr/superdesk,fritzSF/superdesk,ancafarcas/superdesk,ioanpocol/superdesk-ntb,mdhaman/superdesk-aap,marwoodandrew/superdesk-aap,darconny/superdesk,akintolga/superdesk-aap,amagdas/superdesk,sivakuna-aap/superdesk,thnkloud9/superdesk,fritzSF/superdesk,akintolga/superdesk-aap,ancafarcas/superdesk,akintolga/superdesk,pavlovicnemanja92/superdesk,amagdas/superdesk,vied12/superdesk,verifiedpixel/superdesk,superdesk/superdesk-ntb,Aca-jov/superdesk,superdesk/superdesk,akintolga/superdesk,marwoodandrew/superdesk-aap,hlmnrmr/superdesk,verifiedpixel/superdesk,pavlovicnemanja/superdesk,petrjasek/superdesk-server,pavlovicnemanja/superdesk,liveblog/superdesk,thnkloud9/superdesk,superdesk/superdesk-aap,plamut/superdesk,vied12/superdesk,ioanpocol/superdesk-ntb,plamut/superdesk,darconny/superdesk,sjunaid/superdesk,superdesk/superdesk-aap,amagdas/superdesk,verifiedpixel/superdesk,vied12/superdesk,darconny/superdesk,fritzSF/superdesk,sivakuna-aap/superdesk,amagdas/superdesk,superdesk/superdesk-aap,gbbr/superdesk,marwoodandrew/superdesk-aap,mdhaman/superdesk,petrjasek/superdesk-ntb,sivakuna-aap/superdesk,akintolga/superdesk-aap,akintolga/superdesk,superdesk/superdesk-ntb,fritzSF/superdesk,marwoodandrew/superdesk,marwoodandrew/superdesk,verifiedpixel/superdesk,amagdas/superdesk,marwoodandrew/superdesk-aap,sjunaid/superdesk,petrjasek/superdesk-ntb,sjunaid/superdesk,Aca-jov/superdesk,ioanpocol/superdesk-ntb,petrjasek/superdesk-server,pavlovicnemanja92/superdesk,superdesk/superdesk,ancafarcas/superdesk,superdesk/superdesk,mdhaman/superdesk,fritzSF/superdesk,vied12/superdesk,pavlovicnemanja92/superdesk,petrjasek/superdesk-ntb,hlmnrmr/superdesk,marwoodandrew/superdesk,superdesk/superdesk-ntb,petrjasek/superdesk,hlmnrmr/superdesk,petrjasek/superdesk,mugurrus/superdesk,gbbr/superdesk,pavlovicnemanja92/superdesk,plamut/superdesk,liveblog/superdesk,verifiedpixel/superdesk,petrjasek/superdesk,mdhaman/superdesk,petrjasek/superdesk-ntb,mugurrus/superdesk,mdhaman/superdesk-aap,superdesk/superdesk-aap,superdesk/superdesk-ntb,marwoodandrew/superdesk,pavlovicnemanja92/superdesk,ioanpocol/superdesk,thnkloud9/superdesk,marwoodandrew/superdesk,liveblog/superdesk,mdhaman/superdesk-aap,ioanpocol/superdesk,akintolga/superdesk,plamut/superdesk,liveblog/superdesk,superdesk/superdesk,akintolga/superdesk-aap
|
from superdesk.base_model import BaseModel
def init_app(app):
CoverageModel(app=app)
def rel(resource, embeddable=False):
return {
'type': 'objectid',
'data_relation': {'resource': resource, 'field': '_id', 'embeddable': embeddable}
}
class CoverageModel(BaseModel):
endpoint_name = 'coverages'
schema = {
'headline': {'type': 'string'},
'type': {'type': 'string'},
'ed_note': {'type': 'string'},
'scheduled': {'type': 'datetime'},
- 'delivery': rel('archive'),
+ 'delivery': {'type': 'string'},
'assigned_user': rel('users', True),
'assigned_desk': rel('desks', True),
- 'planning_item': rel('planning'),
+ 'planning_item': {'type': 'string'},
}
|
Fix data relation not working for custom Guids
|
## Code Before:
from superdesk.base_model import BaseModel
def init_app(app):
CoverageModel(app=app)
def rel(resource, embeddable=False):
return {
'type': 'objectid',
'data_relation': {'resource': resource, 'field': '_id', 'embeddable': embeddable}
}
class CoverageModel(BaseModel):
endpoint_name = 'coverages'
schema = {
'headline': {'type': 'string'},
'type': {'type': 'string'},
'ed_note': {'type': 'string'},
'scheduled': {'type': 'datetime'},
'delivery': rel('archive'),
'assigned_user': rel('users', True),
'assigned_desk': rel('desks', True),
'planning_item': rel('planning'),
}
## Instruction:
Fix data relation not working for custom Guids
## Code After:
from superdesk.base_model import BaseModel
def init_app(app):
CoverageModel(app=app)
def rel(resource, embeddable=False):
return {
'type': 'objectid',
'data_relation': {'resource': resource, 'field': '_id', 'embeddable': embeddable}
}
class CoverageModel(BaseModel):
endpoint_name = 'coverages'
schema = {
'headline': {'type': 'string'},
'type': {'type': 'string'},
'ed_note': {'type': 'string'},
'scheduled': {'type': 'datetime'},
'delivery': {'type': 'string'},
'assigned_user': rel('users', True),
'assigned_desk': rel('desks', True),
'planning_item': {'type': 'string'},
}
|
from superdesk.base_model import BaseModel
def init_app(app):
CoverageModel(app=app)
def rel(resource, embeddable=False):
return {
'type': 'objectid',
'data_relation': {'resource': resource, 'field': '_id', 'embeddable': embeddable}
}
class CoverageModel(BaseModel):
endpoint_name = 'coverages'
schema = {
'headline': {'type': 'string'},
'type': {'type': 'string'},
'ed_note': {'type': 'string'},
'scheduled': {'type': 'datetime'},
- 'delivery': rel('archive'),
+ 'delivery': {'type': 'string'},
'assigned_user': rel('users', True),
'assigned_desk': rel('desks', True),
- 'planning_item': rel('planning'),
+ 'planning_item': {'type': 'string'},
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.