commit
stringlengths 40
40
| old_file
stringlengths 4
106
| new_file
stringlengths 4
106
| old_contents
stringlengths 10
2.94k
| new_contents
stringlengths 21
2.95k
| subject
stringlengths 16
444
| message
stringlengths 17
2.63k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 7
43k
| ndiff
stringlengths 52
3.31k
| instruction
stringlengths 16
444
| content
stringlengths 133
4.32k
| diff
stringlengths 49
3.61k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
bcf4f87e3690986827d8d34eea5e7edfc03485e2
|
cassandra_migrate/test/test_cql.py
|
cassandra_migrate/test/test_cql.py
|
from __future__ import unicode_literals
import pytest
from cassandra_migrate.cql import CqlSplitter
@pytest.mark.parametrize('cql,statements', [
# Two statements, with whitespace
('''
CREATE TABLE hello;
CREATE TABLE world;
''',
['CREATE TABLE hello', 'CREATE TABLE world']),
# Two statements, no whitespace
('''CREATE TABLE hello;CREATE TABLE world;''',
['CREATE TABLE hello', 'CREATE TABLE world']),
# Two statements, with line and block comments
('''
// comment
-- comment
CREATE TABLE hello;
/* comment; comment
*/
CREATE TABLE world;
''',
['CREATE TABLE hello', 'CREATE TABLE world']),
# Statements with semicolons inside strings
('''
CREATE TABLE 'hello;';
CREATE TABLE "world;"
''',
["CREATE TABLE 'hello;'", 'CREATE TABLE "world;"'])
])
def test_cql_split(cql, statements):
result = CqlSplitter.split(cql.strip())
assert result == statements
|
from __future__ import unicode_literals
import pytest
from cassandra_migrate.cql import CqlSplitter
@pytest.mark.parametrize('cql,statements', [
# Two statements, with whitespace
('''
CREATE TABLE hello;
CREATE TABLE world;
''',
['CREATE TABLE hello', 'CREATE TABLE world']),
# Two statements, no whitespace
('''CREATE TABLE hello;CREATE TABLE world;''',
['CREATE TABLE hello', 'CREATE TABLE world']),
# Two statements, with line and block comments
('''
// comment
-- comment
CREATE TABLE hello;
/* comment; comment
*/
CREATE TABLE world;
''',
['CREATE TABLE hello', 'CREATE TABLE world']),
# Statements with semicolons inside strings
('''
CREATE TABLE 'hello;';
CREATE TABLE "world;"
''',
["CREATE TABLE 'hello;'", 'CREATE TABLE "world;"']),
# Double-dollar-sign quoted strings, as reported in PR #24
('INSERT INTO test (test) VALUES '
'($$Pesky semicolon here ;Hello$$);',
["INSERT INTO test (test) VALUES ($$Pesky semicolon here ;Hello$$)"])
])
def test_cql_split(cql, statements):
result = CqlSplitter.split(cql.strip())
assert result == statements
|
Add CQL-splitting test case for double-dollar-sign strings
|
Add CQL-splitting test case for double-dollar-sign strings
|
Python
|
mit
|
Cobliteam/cassandra-migrate,Cobliteam/cassandra-migrate
|
from __future__ import unicode_literals
import pytest
from cassandra_migrate.cql import CqlSplitter
@pytest.mark.parametrize('cql,statements', [
# Two statements, with whitespace
('''
CREATE TABLE hello;
CREATE TABLE world;
''',
['CREATE TABLE hello', 'CREATE TABLE world']),
# Two statements, no whitespace
('''CREATE TABLE hello;CREATE TABLE world;''',
['CREATE TABLE hello', 'CREATE TABLE world']),
# Two statements, with line and block comments
('''
// comment
-- comment
CREATE TABLE hello;
/* comment; comment
*/
CREATE TABLE world;
''',
['CREATE TABLE hello', 'CREATE TABLE world']),
# Statements with semicolons inside strings
('''
CREATE TABLE 'hello;';
CREATE TABLE "world;"
''',
- ["CREATE TABLE 'hello;'", 'CREATE TABLE "world;"'])
+ ["CREATE TABLE 'hello;'", 'CREATE TABLE "world;"']),
+ # Double-dollar-sign quoted strings, as reported in PR #24
+ ('INSERT INTO test (test) VALUES '
+ '($$Pesky semicolon here ;Hello$$);',
+ ["INSERT INTO test (test) VALUES ($$Pesky semicolon here ;Hello$$)"])
])
def test_cql_split(cql, statements):
result = CqlSplitter.split(cql.strip())
assert result == statements
|
Add CQL-splitting test case for double-dollar-sign strings
|
## Code Before:
from __future__ import unicode_literals
import pytest
from cassandra_migrate.cql import CqlSplitter
@pytest.mark.parametrize('cql,statements', [
# Two statements, with whitespace
('''
CREATE TABLE hello;
CREATE TABLE world;
''',
['CREATE TABLE hello', 'CREATE TABLE world']),
# Two statements, no whitespace
('''CREATE TABLE hello;CREATE TABLE world;''',
['CREATE TABLE hello', 'CREATE TABLE world']),
# Two statements, with line and block comments
('''
// comment
-- comment
CREATE TABLE hello;
/* comment; comment
*/
CREATE TABLE world;
''',
['CREATE TABLE hello', 'CREATE TABLE world']),
# Statements with semicolons inside strings
('''
CREATE TABLE 'hello;';
CREATE TABLE "world;"
''',
["CREATE TABLE 'hello;'", 'CREATE TABLE "world;"'])
])
def test_cql_split(cql, statements):
result = CqlSplitter.split(cql.strip())
assert result == statements
## Instruction:
Add CQL-splitting test case for double-dollar-sign strings
## Code After:
from __future__ import unicode_literals
import pytest
from cassandra_migrate.cql import CqlSplitter
@pytest.mark.parametrize('cql,statements', [
# Two statements, with whitespace
('''
CREATE TABLE hello;
CREATE TABLE world;
''',
['CREATE TABLE hello', 'CREATE TABLE world']),
# Two statements, no whitespace
('''CREATE TABLE hello;CREATE TABLE world;''',
['CREATE TABLE hello', 'CREATE TABLE world']),
# Two statements, with line and block comments
('''
// comment
-- comment
CREATE TABLE hello;
/* comment; comment
*/
CREATE TABLE world;
''',
['CREATE TABLE hello', 'CREATE TABLE world']),
# Statements with semicolons inside strings
('''
CREATE TABLE 'hello;';
CREATE TABLE "world;"
''',
["CREATE TABLE 'hello;'", 'CREATE TABLE "world;"']),
# Double-dollar-sign quoted strings, as reported in PR #24
('INSERT INTO test (test) VALUES '
'($$Pesky semicolon here ;Hello$$);',
["INSERT INTO test (test) VALUES ($$Pesky semicolon here ;Hello$$)"])
])
def test_cql_split(cql, statements):
result = CqlSplitter.split(cql.strip())
assert result == statements
|
from __future__ import unicode_literals
import pytest
from cassandra_migrate.cql import CqlSplitter
@pytest.mark.parametrize('cql,statements', [
# Two statements, with whitespace
('''
CREATE TABLE hello;
CREATE TABLE world;
''',
['CREATE TABLE hello', 'CREATE TABLE world']),
# Two statements, no whitespace
('''CREATE TABLE hello;CREATE TABLE world;''',
['CREATE TABLE hello', 'CREATE TABLE world']),
# Two statements, with line and block comments
('''
// comment
-- comment
CREATE TABLE hello;
/* comment; comment
*/
CREATE TABLE world;
''',
['CREATE TABLE hello', 'CREATE TABLE world']),
# Statements with semicolons inside strings
('''
CREATE TABLE 'hello;';
CREATE TABLE "world;"
''',
- ["CREATE TABLE 'hello;'", 'CREATE TABLE "world;"'])
+ ["CREATE TABLE 'hello;'", 'CREATE TABLE "world;"']),
? +
+ # Double-dollar-sign quoted strings, as reported in PR #24
+ ('INSERT INTO test (test) VALUES '
+ '($$Pesky semicolon here ;Hello$$);',
+ ["INSERT INTO test (test) VALUES ($$Pesky semicolon here ;Hello$$)"])
])
def test_cql_split(cql, statements):
result = CqlSplitter.split(cql.strip())
assert result == statements
|
f2f77cd326c3b121eb7e6e53dfcab3964f473451
|
fileupload/models.py
|
fileupload/models.py
|
from django.db import models
class Picture(models.Model):
file = models.ImageField(upload_to="pictures")
slug = models.SlugField(max_length=50, blank=True)
def __unicode__(self):
return self.file
@models.permalink
def get_absolute_url(self):
return ('upload-new', )
def save(self, *args, **kwargs):
self.slug = self.file.name
super(Picture, self).save(*args, **kwargs)
|
from django.db import models
class Picture(models.Model):
# This is a small demo using FileField instead of ImageField, not
# depending on PIL. You will probably want ImageField in your app.
file = models.FileField(upload_to="pictures")
slug = models.SlugField(max_length=50, blank=True)
def __unicode__(self):
return self.file
@models.permalink
def get_absolute_url(self):
return ('upload-new', )
def save(self, *args, **kwargs):
self.slug = self.file.name
super(Picture, self).save(*args, **kwargs)
|
Use FileField instead of ImageField, we don't need PIL in this demo.
|
Use FileField instead of ImageField, we don't need PIL in this demo.
|
Python
|
mit
|
extremoburo/django-jquery-file-upload,indrajithi/mgc-django,Imaginashion/cloud-vision,madteckhead/django-jquery-file-upload,sigurdga/django-jquery-file-upload,vaniakov/django-jquery-file-upload,extremoburo/django-jquery-file-upload,sigurdga/django-jquery-file-upload,Imaginashion/cloud-vision,Imaginashion/cloud-vision,madteckhead/django-jquery-file-upload,Imaginashion/cloud-vision,Imaginashion/cloud-vision,indrajithi/mgc-django,extremoburo/django-jquery-file-upload,sigurdga/django-jquery-file-upload,Imaginashion/cloud-vision,minhlongdo/django-jquery-file-upload,vaniakov/django-jquery-file-upload,minhlongdo/django-jquery-file-upload,vaniakov/django-jquery-file-upload,minhlongdo/django-jquery-file-upload
|
from django.db import models
class Picture(models.Model):
+ # This is a small demo using FileField instead of ImageField, not
+ # depending on PIL. You will probably want ImageField in your app.
- file = models.ImageField(upload_to="pictures")
+ file = models.FileField(upload_to="pictures")
slug = models.SlugField(max_length=50, blank=True)
def __unicode__(self):
return self.file
@models.permalink
def get_absolute_url(self):
return ('upload-new', )
def save(self, *args, **kwargs):
self.slug = self.file.name
super(Picture, self).save(*args, **kwargs)
|
Use FileField instead of ImageField, we don't need PIL in this demo.
|
## Code Before:
from django.db import models
class Picture(models.Model):
file = models.ImageField(upload_to="pictures")
slug = models.SlugField(max_length=50, blank=True)
def __unicode__(self):
return self.file
@models.permalink
def get_absolute_url(self):
return ('upload-new', )
def save(self, *args, **kwargs):
self.slug = self.file.name
super(Picture, self).save(*args, **kwargs)
## Instruction:
Use FileField instead of ImageField, we don't need PIL in this demo.
## Code After:
from django.db import models
class Picture(models.Model):
# This is a small demo using FileField instead of ImageField, not
# depending on PIL. You will probably want ImageField in your app.
file = models.FileField(upload_to="pictures")
slug = models.SlugField(max_length=50, blank=True)
def __unicode__(self):
return self.file
@models.permalink
def get_absolute_url(self):
return ('upload-new', )
def save(self, *args, **kwargs):
self.slug = self.file.name
super(Picture, self).save(*args, **kwargs)
|
from django.db import models
class Picture(models.Model):
+ # This is a small demo using FileField instead of ImageField, not
+ # depending on PIL. You will probably want ImageField in your app.
- file = models.ImageField(upload_to="pictures")
? ^^^^
+ file = models.FileField(upload_to="pictures")
? ^^^
slug = models.SlugField(max_length=50, blank=True)
def __unicode__(self):
return self.file
@models.permalink
def get_absolute_url(self):
return ('upload-new', )
def save(self, *args, **kwargs):
self.slug = self.file.name
super(Picture, self).save(*args, **kwargs)
|
1cff28b9612c156363ed87cdde1718ee83b65776
|
real_estate_agency/resale/serializers.py
|
real_estate_agency/resale/serializers.py
|
from rest_framework import serializers
from .models import ResaleApartment, ResaleApartmentImage
class ResaleApartmentImageSerializer(serializers.ModelSerializer):
class Meta:
model = ResaleApartmentImage
fields = '__all__'
class ResaleApartmentSerializer(serializers.ModelSerializer):
# images = ResaleApartmentImageSerializer(source='photos', many=True)
get_building_type_display = serializers.ReadOnlyField()
price_per_square_meter = serializers.ReadOnlyField()
neighbourhood = serializers.StringRelatedField()
class Meta:
model = ResaleApartment
fields = (
'total_area',
'address',
'floor',
'number_of_storeys',
# 'images',
'full_price',
'old_price',
'price_per_square_meter',
'neighbourhood',
'get_building_type_display',
'number_of_storeys',
'date_of_construction',
'celling_height',
'decoration',
'kitchen_area',
'balcony_area',
'id',
)
|
from rest_framework import serializers
from .models import ResaleApartment, ResaleApartmentImage
class ResaleApartmentImageSerializer(serializers.ModelSerializer):
class Meta:
model = ResaleApartmentImage
fields = '__all__'
class ResaleApartmentSerializer(serializers.ModelSerializer):
# images = ResaleApartmentImageSerializer(source='photos', many=True)
get_building_type_display = serializers.ReadOnlyField()
price_per_square_meter = serializers.ReadOnlyField()
neighbourhood = serializers.StringRelatedField()
decoration = serializers.ReadOnlyField(source='decoration.name')
class Meta:
model = ResaleApartment
fields = (
'total_area',
'address',
'floor',
'number_of_storeys',
# 'images',
'full_price',
'old_price',
'price_per_square_meter',
'neighbourhood',
'get_building_type_display',
'number_of_storeys',
'date_of_construction',
'celling_height',
'decoration',
'kitchen_area',
'balcony_area',
'id',
)
|
Make ResaleApartmentSerializer return Decoration.name on decoration field.
|
Make ResaleApartmentSerializer return Decoration.name on decoration field.
It allows to show readable value at resale detailed page.
|
Python
|
mit
|
Dybov/real_estate_agency,Dybov/real_estate_agency,Dybov/real_estate_agency
|
from rest_framework import serializers
from .models import ResaleApartment, ResaleApartmentImage
class ResaleApartmentImageSerializer(serializers.ModelSerializer):
class Meta:
model = ResaleApartmentImage
fields = '__all__'
class ResaleApartmentSerializer(serializers.ModelSerializer):
# images = ResaleApartmentImageSerializer(source='photos', many=True)
get_building_type_display = serializers.ReadOnlyField()
price_per_square_meter = serializers.ReadOnlyField()
neighbourhood = serializers.StringRelatedField()
+ decoration = serializers.ReadOnlyField(source='decoration.name')
class Meta:
model = ResaleApartment
fields = (
'total_area',
'address',
'floor',
'number_of_storeys',
# 'images',
'full_price',
'old_price',
'price_per_square_meter',
'neighbourhood',
'get_building_type_display',
'number_of_storeys',
'date_of_construction',
'celling_height',
'decoration',
'kitchen_area',
'balcony_area',
'id',
)
|
Make ResaleApartmentSerializer return Decoration.name on decoration field.
|
## Code Before:
from rest_framework import serializers
from .models import ResaleApartment, ResaleApartmentImage
class ResaleApartmentImageSerializer(serializers.ModelSerializer):
class Meta:
model = ResaleApartmentImage
fields = '__all__'
class ResaleApartmentSerializer(serializers.ModelSerializer):
# images = ResaleApartmentImageSerializer(source='photos', many=True)
get_building_type_display = serializers.ReadOnlyField()
price_per_square_meter = serializers.ReadOnlyField()
neighbourhood = serializers.StringRelatedField()
class Meta:
model = ResaleApartment
fields = (
'total_area',
'address',
'floor',
'number_of_storeys',
# 'images',
'full_price',
'old_price',
'price_per_square_meter',
'neighbourhood',
'get_building_type_display',
'number_of_storeys',
'date_of_construction',
'celling_height',
'decoration',
'kitchen_area',
'balcony_area',
'id',
)
## Instruction:
Make ResaleApartmentSerializer return Decoration.name on decoration field.
## Code After:
from rest_framework import serializers
from .models import ResaleApartment, ResaleApartmentImage
class ResaleApartmentImageSerializer(serializers.ModelSerializer):
class Meta:
model = ResaleApartmentImage
fields = '__all__'
class ResaleApartmentSerializer(serializers.ModelSerializer):
# images = ResaleApartmentImageSerializer(source='photos', many=True)
get_building_type_display = serializers.ReadOnlyField()
price_per_square_meter = serializers.ReadOnlyField()
neighbourhood = serializers.StringRelatedField()
decoration = serializers.ReadOnlyField(source='decoration.name')
class Meta:
model = ResaleApartment
fields = (
'total_area',
'address',
'floor',
'number_of_storeys',
# 'images',
'full_price',
'old_price',
'price_per_square_meter',
'neighbourhood',
'get_building_type_display',
'number_of_storeys',
'date_of_construction',
'celling_height',
'decoration',
'kitchen_area',
'balcony_area',
'id',
)
|
from rest_framework import serializers
from .models import ResaleApartment, ResaleApartmentImage
class ResaleApartmentImageSerializer(serializers.ModelSerializer):
class Meta:
model = ResaleApartmentImage
fields = '__all__'
class ResaleApartmentSerializer(serializers.ModelSerializer):
# images = ResaleApartmentImageSerializer(source='photos', many=True)
get_building_type_display = serializers.ReadOnlyField()
price_per_square_meter = serializers.ReadOnlyField()
neighbourhood = serializers.StringRelatedField()
+ decoration = serializers.ReadOnlyField(source='decoration.name')
class Meta:
model = ResaleApartment
fields = (
'total_area',
'address',
'floor',
'number_of_storeys',
# 'images',
'full_price',
'old_price',
'price_per_square_meter',
'neighbourhood',
'get_building_type_display',
'number_of_storeys',
'date_of_construction',
'celling_height',
'decoration',
'kitchen_area',
'balcony_area',
'id',
)
|
38254c64bf94f5c1570a129cfe41f94dd88fb780
|
config/regenerate_launch_files.py
|
config/regenerate_launch_files.py
|
import glob
import os
import subprocess as sp
import rospkg
import tqdm
def get_launch_dir(package):
return os.path.join(rospkg.RosPack().get_path(package), "launch")
def get_file_root(path):
"""
>>> get_file_root("/tmp/test.txt")
'test'
"""
return os.path.split(path[:path.rindex(".")])[1]
def compile_xacro(inpath, outpath, stdout):
sp.call("rosrun xacro xacro {inpath} --inorder -o {outpath}"
.format(inpath=inpath, outpath=outpath).split(),
stdout=stdout)
def main():
launch_dir = get_launch_dir("spirit")
os.chdir(launch_dir)
with open(os.devnull, "w") as DEVNULL:
for path in tqdm.tqdm(glob.glob("xacro/*.xacro"),
desc="Regenerating launch files",
leave=True):
root = get_file_root(path)
compile_xacro(path, os.path.join("launchers", root), DEVNULL)
if __name__ == "__main__":
main()
|
import glob
import os
import subprocess as sp
import rospkg
import tqdm
def get_launch_dir(package):
return os.path.join(rospkg.RosPack().get_path(package), "launch")
def get_file_root(path):
"""
>>> get_file_root("/tmp/test.txt")
'test'
"""
return os.path.split(path[:path.rindex(".")])[1]
def compile_xacro(inpath, outpath, stdout):
sp.call("rosrun xacro xacro {inpath} --inorder -o {outpath}"
.format(inpath=inpath, outpath=outpath).split(),
stdout=stdout)
def main():
launch_dir = get_launch_dir("spirit")
os.chdir(launch_dir)
with open(os.devnull, "w") as DEVNULL:
for path in tqdm.tqdm(glob.glob("xacro/*.xacro"),
desc="Regenerating launch files",
unit=" files",
leave=True):
root = get_file_root(path)
compile_xacro(path, os.path.join("launchers", root), DEVNULL)
if __name__ == "__main__":
main()
|
Change unit from "it" to "files"
|
Change unit from "it" to "files"
|
Python
|
mit
|
masasin/spirit,masasin/spirit
|
import glob
import os
import subprocess as sp
import rospkg
import tqdm
def get_launch_dir(package):
return os.path.join(rospkg.RosPack().get_path(package), "launch")
def get_file_root(path):
"""
>>> get_file_root("/tmp/test.txt")
'test'
"""
return os.path.split(path[:path.rindex(".")])[1]
def compile_xacro(inpath, outpath, stdout):
sp.call("rosrun xacro xacro {inpath} --inorder -o {outpath}"
.format(inpath=inpath, outpath=outpath).split(),
stdout=stdout)
def main():
launch_dir = get_launch_dir("spirit")
os.chdir(launch_dir)
with open(os.devnull, "w") as DEVNULL:
for path in tqdm.tqdm(glob.glob("xacro/*.xacro"),
desc="Regenerating launch files",
+ unit=" files",
leave=True):
root = get_file_root(path)
compile_xacro(path, os.path.join("launchers", root), DEVNULL)
if __name__ == "__main__":
main()
|
Change unit from "it" to "files"
|
## Code Before:
import glob
import os
import subprocess as sp
import rospkg
import tqdm
def get_launch_dir(package):
return os.path.join(rospkg.RosPack().get_path(package), "launch")
def get_file_root(path):
"""
>>> get_file_root("/tmp/test.txt")
'test'
"""
return os.path.split(path[:path.rindex(".")])[1]
def compile_xacro(inpath, outpath, stdout):
sp.call("rosrun xacro xacro {inpath} --inorder -o {outpath}"
.format(inpath=inpath, outpath=outpath).split(),
stdout=stdout)
def main():
launch_dir = get_launch_dir("spirit")
os.chdir(launch_dir)
with open(os.devnull, "w") as DEVNULL:
for path in tqdm.tqdm(glob.glob("xacro/*.xacro"),
desc="Regenerating launch files",
leave=True):
root = get_file_root(path)
compile_xacro(path, os.path.join("launchers", root), DEVNULL)
if __name__ == "__main__":
main()
## Instruction:
Change unit from "it" to "files"
## Code After:
import glob
import os
import subprocess as sp
import rospkg
import tqdm
def get_launch_dir(package):
return os.path.join(rospkg.RosPack().get_path(package), "launch")
def get_file_root(path):
"""
>>> get_file_root("/tmp/test.txt")
'test'
"""
return os.path.split(path[:path.rindex(".")])[1]
def compile_xacro(inpath, outpath, stdout):
sp.call("rosrun xacro xacro {inpath} --inorder -o {outpath}"
.format(inpath=inpath, outpath=outpath).split(),
stdout=stdout)
def main():
launch_dir = get_launch_dir("spirit")
os.chdir(launch_dir)
with open(os.devnull, "w") as DEVNULL:
for path in tqdm.tqdm(glob.glob("xacro/*.xacro"),
desc="Regenerating launch files",
unit=" files",
leave=True):
root = get_file_root(path)
compile_xacro(path, os.path.join("launchers", root), DEVNULL)
if __name__ == "__main__":
main()
|
import glob
import os
import subprocess as sp
import rospkg
import tqdm
def get_launch_dir(package):
return os.path.join(rospkg.RosPack().get_path(package), "launch")
def get_file_root(path):
"""
>>> get_file_root("/tmp/test.txt")
'test'
"""
return os.path.split(path[:path.rindex(".")])[1]
def compile_xacro(inpath, outpath, stdout):
sp.call("rosrun xacro xacro {inpath} --inorder -o {outpath}"
.format(inpath=inpath, outpath=outpath).split(),
stdout=stdout)
def main():
launch_dir = get_launch_dir("spirit")
os.chdir(launch_dir)
with open(os.devnull, "w") as DEVNULL:
for path in tqdm.tqdm(glob.glob("xacro/*.xacro"),
desc="Regenerating launch files",
+ unit=" files",
leave=True):
root = get_file_root(path)
compile_xacro(path, os.path.join("launchers", root), DEVNULL)
if __name__ == "__main__":
main()
|
935552df10dc3a17cf3edb897e83861bbeaae803
|
tests/test_thread.py
|
tests/test_thread.py
|
import os
import unittest
from common import gobject, gtk, testhelper
# Enable PyGILState API
os.environ['PYGTK_USE_GIL_STATE_API'] = ''
gobject.threads_init()
class TestThread(unittest.TestCase):
def from_thread_cb(self, test, enum):
assert test == self.obj
assert int(enum) == 0
assert type(enum) != int
def idle_cb(self):
self.obj = testhelper.get_test_thread()
self.obj.connect('from-thread', self.from_thread_cb)
self.obj.emit('emit-signal')
def testExtensionModule(self):
gtk.idle_add(self.idle_cb)
gtk.timeout_add(50, self.timeout_cb)
gtk.main()
def timeout_cb(self):
gtk.main_quit()
|
import os
import unittest
from common import gobject, gtk, testhelper
# Enable PyGILState API
os.environ['PYGTK_USE_GIL_STATE_API'] = ''
gobject.threads_init()
class TestThread(unittest.TestCase):
def from_thread_cb(self, test, enum):
assert test == self.obj
assert int(enum) == 0
assert type(enum) != int
def idle_cb(self):
self.obj = testhelper.get_test_thread()
self.obj.connect('from-thread', self.from_thread_cb)
self.obj.emit('emit-signal')
def testExtensionModule(self):
gobject.idle_add(self.idle_cb)
gobject.timeout_add(50, self.timeout_cb)
gtk.main()
def timeout_cb(self):
gtk.main_quit()
|
Add pygtk_postinstall.py Updated Deprecate gtk.idle_add and friends. Merge
|
Add pygtk_postinstall.py Updated Deprecate gtk.idle_add and friends. Merge
* Makefile.am: Add pygtk_postinstall.py
* docs/random/missing-symbols: Updated
* gtk/__init__.py: Deprecate gtk.idle_add and friends.
* gtk/gtk.defs: Merge in 2.6 api, for GtkLabel functions,
thanks to Gian Mario Tagliaretti, fixes bug #163296
* tests/test_thread.py: Don't use gtk.idle_add
|
Python
|
lgpl-2.1
|
choeger/pygobject-cmake,GNOME/pygobject,nzjrs/pygobject,atizo/pygobject,pexip/pygobject,jdahlin/pygobject,davibe/pygobject,Distrotech/pygobject,sfeltman/pygobject,pexip/pygobject,choeger/pygobject-cmake,MathieuDuponchelle/pygobject,alexef/pygobject,alexef/pygobject,alexef/pygobject,sfeltman/pygobject,davidmalcolm/pygobject,MathieuDuponchelle/pygobject,davibe/pygobject,Distrotech/pygobject,atizo/pygobject,MathieuDuponchelle/pygobject,atizo/pygobject,Distrotech/pygobject,sfeltman/pygobject,thiblahute/pygobject,thiblahute/pygobject,davidmalcolm/pygobject,choeger/pygobject-cmake,davidmalcolm/pygobject,pexip/pygobject,davibe/pygobject,nzjrs/pygobject,thiblahute/pygobject,jdahlin/pygobject,nzjrs/pygobject,davibe/pygobject,GNOME/pygobject,GNOME/pygobject,Distrotech/pygobject,jdahlin/pygobject
|
import os
import unittest
from common import gobject, gtk, testhelper
# Enable PyGILState API
os.environ['PYGTK_USE_GIL_STATE_API'] = ''
gobject.threads_init()
class TestThread(unittest.TestCase):
def from_thread_cb(self, test, enum):
assert test == self.obj
assert int(enum) == 0
assert type(enum) != int
def idle_cb(self):
self.obj = testhelper.get_test_thread()
self.obj.connect('from-thread', self.from_thread_cb)
self.obj.emit('emit-signal')
def testExtensionModule(self):
- gtk.idle_add(self.idle_cb)
+ gobject.idle_add(self.idle_cb)
- gtk.timeout_add(50, self.timeout_cb)
+ gobject.timeout_add(50, self.timeout_cb)
gtk.main()
def timeout_cb(self):
gtk.main_quit()
|
Add pygtk_postinstall.py Updated Deprecate gtk.idle_add and friends. Merge
|
## Code Before:
import os
import unittest
from common import gobject, gtk, testhelper
# Enable PyGILState API
os.environ['PYGTK_USE_GIL_STATE_API'] = ''
gobject.threads_init()
class TestThread(unittest.TestCase):
def from_thread_cb(self, test, enum):
assert test == self.obj
assert int(enum) == 0
assert type(enum) != int
def idle_cb(self):
self.obj = testhelper.get_test_thread()
self.obj.connect('from-thread', self.from_thread_cb)
self.obj.emit('emit-signal')
def testExtensionModule(self):
gtk.idle_add(self.idle_cb)
gtk.timeout_add(50, self.timeout_cb)
gtk.main()
def timeout_cb(self):
gtk.main_quit()
## Instruction:
Add pygtk_postinstall.py Updated Deprecate gtk.idle_add and friends. Merge
## Code After:
import os
import unittest
from common import gobject, gtk, testhelper
# Enable PyGILState API
os.environ['PYGTK_USE_GIL_STATE_API'] = ''
gobject.threads_init()
class TestThread(unittest.TestCase):
def from_thread_cb(self, test, enum):
assert test == self.obj
assert int(enum) == 0
assert type(enum) != int
def idle_cb(self):
self.obj = testhelper.get_test_thread()
self.obj.connect('from-thread', self.from_thread_cb)
self.obj.emit('emit-signal')
def testExtensionModule(self):
gobject.idle_add(self.idle_cb)
gobject.timeout_add(50, self.timeout_cb)
gtk.main()
def timeout_cb(self):
gtk.main_quit()
|
import os
import unittest
from common import gobject, gtk, testhelper
# Enable PyGILState API
os.environ['PYGTK_USE_GIL_STATE_API'] = ''
gobject.threads_init()
class TestThread(unittest.TestCase):
def from_thread_cb(self, test, enum):
assert test == self.obj
assert int(enum) == 0
assert type(enum) != int
def idle_cb(self):
self.obj = testhelper.get_test_thread()
self.obj.connect('from-thread', self.from_thread_cb)
self.obj.emit('emit-signal')
def testExtensionModule(self):
- gtk.idle_add(self.idle_cb)
? -
+ gobject.idle_add(self.idle_cb)
? +++++
- gtk.timeout_add(50, self.timeout_cb)
? -
+ gobject.timeout_add(50, self.timeout_cb)
? +++++
gtk.main()
def timeout_cb(self):
gtk.main_quit()
|
4706d6feaff7057d04def0544e291900a754558e
|
nbgrader/apps/solutionapp.py
|
nbgrader/apps/solutionapp.py
|
from IPython.config.loader import Config
from IPython.config.application import catch_config_error
from IPython.utils.traitlets import Unicode
from nbgrader.apps.customnbconvertapp import CustomNbConvertApp
class SolutionApp(CustomNbConvertApp):
name = Unicode(u'nbgrader-solution')
description = Unicode(u'Prepare a solution version of an assignment')
def _export_format_default(self):
return 'notebook'
def build_extra_config(self):
self.extra_config = Config()
self.extra_config.Exporter.preprocessors = [
'nbgrader.preprocessors.IncludeHeaderFooter',
'nbgrader.preprocessors.TableOfContents',
'nbgrader.preprocessors.RenderSolutions',
'nbgrader.preprocessors.ExtractTests',
'IPython.nbconvert.preprocessors.ExecutePreprocessor'
]
self.extra_config.RenderSolutions.solution = True
self.config.merge(self.extra_config)
|
from IPython.config.loader import Config
from IPython.config.application import catch_config_error
from IPython.utils.traitlets import Unicode
from nbgrader.apps.customnbconvertapp import CustomNbConvertApp
class SolutionApp(CustomNbConvertApp):
name = Unicode(u'nbgrader-solution')
description = Unicode(u'Prepare a solution version of an assignment')
def _export_format_default(self):
return 'notebook'
def build_extra_config(self):
self.extra_config = Config()
self.extra_config.Exporter.preprocessors = [
'nbgrader.preprocessors.IncludeHeaderFooter',
'nbgrader.preprocessors.TableOfContents',
'nbgrader.preprocessors.RenderSolutions',
'nbgrader.preprocessors.ExtractTests',
'IPython.nbconvert.preprocessors.ExecutePreprocessor'
]
self.extra_config.RenderSolutions.solution = True
self.extra_config.NbGraderApp.writer_class = 'IPython.nbconvert.writers.FilesWriter'
self.config.merge(self.extra_config)
|
Add files writer to solution app
|
Add files writer to solution app
|
Python
|
bsd-3-clause
|
ellisonbg/nbgrader,jupyter/nbgrader,modulexcite/nbgrader,ellisonbg/nbgrader,modulexcite/nbgrader,jupyter/nbgrader,ellisonbg/nbgrader,dementrock/nbgrader,jdfreder/nbgrader,jupyter/nbgrader,EdwardJKim/nbgrader,jhamrick/nbgrader,MatKallada/nbgrader,dementrock/nbgrader,jhamrick/nbgrader,jupyter/nbgrader,alope107/nbgrader,alope107/nbgrader,EdwardJKim/nbgrader,ellisonbg/nbgrader,jhamrick/nbgrader,jupyter/nbgrader,jdfreder/nbgrader,jhamrick/nbgrader,EdwardJKim/nbgrader,EdwardJKim/nbgrader,MatKallada/nbgrader
|
from IPython.config.loader import Config
from IPython.config.application import catch_config_error
from IPython.utils.traitlets import Unicode
from nbgrader.apps.customnbconvertapp import CustomNbConvertApp
class SolutionApp(CustomNbConvertApp):
name = Unicode(u'nbgrader-solution')
description = Unicode(u'Prepare a solution version of an assignment')
def _export_format_default(self):
return 'notebook'
def build_extra_config(self):
self.extra_config = Config()
self.extra_config.Exporter.preprocessors = [
'nbgrader.preprocessors.IncludeHeaderFooter',
'nbgrader.preprocessors.TableOfContents',
'nbgrader.preprocessors.RenderSolutions',
'nbgrader.preprocessors.ExtractTests',
'IPython.nbconvert.preprocessors.ExecutePreprocessor'
]
self.extra_config.RenderSolutions.solution = True
+ self.extra_config.NbGraderApp.writer_class = 'IPython.nbconvert.writers.FilesWriter'
self.config.merge(self.extra_config)
|
Add files writer to solution app
|
## Code Before:
from IPython.config.loader import Config
from IPython.config.application import catch_config_error
from IPython.utils.traitlets import Unicode
from nbgrader.apps.customnbconvertapp import CustomNbConvertApp
class SolutionApp(CustomNbConvertApp):
name = Unicode(u'nbgrader-solution')
description = Unicode(u'Prepare a solution version of an assignment')
def _export_format_default(self):
return 'notebook'
def build_extra_config(self):
self.extra_config = Config()
self.extra_config.Exporter.preprocessors = [
'nbgrader.preprocessors.IncludeHeaderFooter',
'nbgrader.preprocessors.TableOfContents',
'nbgrader.preprocessors.RenderSolutions',
'nbgrader.preprocessors.ExtractTests',
'IPython.nbconvert.preprocessors.ExecutePreprocessor'
]
self.extra_config.RenderSolutions.solution = True
self.config.merge(self.extra_config)
## Instruction:
Add files writer to solution app
## Code After:
from IPython.config.loader import Config
from IPython.config.application import catch_config_error
from IPython.utils.traitlets import Unicode
from nbgrader.apps.customnbconvertapp import CustomNbConvertApp
class SolutionApp(CustomNbConvertApp):
name = Unicode(u'nbgrader-solution')
description = Unicode(u'Prepare a solution version of an assignment')
def _export_format_default(self):
return 'notebook'
def build_extra_config(self):
self.extra_config = Config()
self.extra_config.Exporter.preprocessors = [
'nbgrader.preprocessors.IncludeHeaderFooter',
'nbgrader.preprocessors.TableOfContents',
'nbgrader.preprocessors.RenderSolutions',
'nbgrader.preprocessors.ExtractTests',
'IPython.nbconvert.preprocessors.ExecutePreprocessor'
]
self.extra_config.RenderSolutions.solution = True
self.extra_config.NbGraderApp.writer_class = 'IPython.nbconvert.writers.FilesWriter'
self.config.merge(self.extra_config)
|
from IPython.config.loader import Config
from IPython.config.application import catch_config_error
from IPython.utils.traitlets import Unicode
from nbgrader.apps.customnbconvertapp import CustomNbConvertApp
class SolutionApp(CustomNbConvertApp):
name = Unicode(u'nbgrader-solution')
description = Unicode(u'Prepare a solution version of an assignment')
def _export_format_default(self):
return 'notebook'
def build_extra_config(self):
self.extra_config = Config()
self.extra_config.Exporter.preprocessors = [
'nbgrader.preprocessors.IncludeHeaderFooter',
'nbgrader.preprocessors.TableOfContents',
'nbgrader.preprocessors.RenderSolutions',
'nbgrader.preprocessors.ExtractTests',
'IPython.nbconvert.preprocessors.ExecutePreprocessor'
]
self.extra_config.RenderSolutions.solution = True
+ self.extra_config.NbGraderApp.writer_class = 'IPython.nbconvert.writers.FilesWriter'
self.config.merge(self.extra_config)
|
294b305aa7e0c78c72d4eac87ded476425873b62
|
src/inbox/server/basicauth.py
|
src/inbox/server/basicauth.py
|
import getpass
AUTH_TYPES = {'Gmail': 'OAuth', 'Yahoo': 'Password', 'EAS': 'Password'}
class AuthError(Exception):
pass
def password_auth(email_address):
pw = getpass.getpass('Password for %s (hidden): ' % email_address)
if len(pw) <= 0:
raise AuthError('Password required.')
return dict(email=email_address, password=pw)
|
import getpass
AUTH_TYPES = {'Gmail': 'OAuth', 'Yahoo': 'Password', 'EAS': 'Password'}
message = 'Password for {0}(hidden): '
class AuthError(Exception):
pass
def password_auth(email_address, message=message):
pw = getpass.getpass(message.format(email_address))
if len(pw) <= 0:
raise AuthError('Password required.')
return dict(email=email_address, password=pw)
|
Change for EAS invalid pw case, to allow user to re-enter pw once before raising error.
|
Change for EAS invalid pw case, to allow user to re-enter pw once before raising error.
Summary:
One line change in password_auth to allow password re-rentry.
See D106 too
Test Plan: None
Reviewers: mg
Differential Revision: https://review.inboxapp.com/D107
|
Python
|
agpl-3.0
|
ErinCall/sync-engine,ErinCall/sync-engine,ErinCall/sync-engine,wakermahmud/sync-engine,Eagles2F/sync-engine,wakermahmud/sync-engine,EthanBlackburn/sync-engine,PriviPK/privipk-sync-engine,rmasters/inbox,closeio/nylas,EthanBlackburn/sync-engine,Eagles2F/sync-engine,jobscore/sync-engine,Eagles2F/sync-engine,jobscore/sync-engine,PriviPK/privipk-sync-engine,wakermahmud/sync-engine,gale320/sync-engine,EthanBlackburn/sync-engine,rmasters/inbox,gale320/sync-engine,Eagles2F/sync-engine,gale320/sync-engine,PriviPK/privipk-sync-engine,jobscore/sync-engine,nylas/sync-engine,ErinCall/sync-engine,nylas/sync-engine,ErinCall/sync-engine,closeio/nylas,gale320/sync-engine,closeio/nylas,wakermahmud/sync-engine,closeio/nylas,nylas/sync-engine,PriviPK/privipk-sync-engine,PriviPK/privipk-sync-engine,wakermahmud/sync-engine,EthanBlackburn/sync-engine,jobscore/sync-engine,gale320/sync-engine,rmasters/inbox,Eagles2F/sync-engine,rmasters/inbox,nylas/sync-engine,EthanBlackburn/sync-engine
|
import getpass
AUTH_TYPES = {'Gmail': 'OAuth', 'Yahoo': 'Password', 'EAS': 'Password'}
+ message = 'Password for {0}(hidden): '
class AuthError(Exception):
pass
- def password_auth(email_address):
+ def password_auth(email_address, message=message):
- pw = getpass.getpass('Password for %s (hidden): ' % email_address)
+ pw = getpass.getpass(message.format(email_address))
if len(pw) <= 0:
raise AuthError('Password required.')
return dict(email=email_address, password=pw)
|
Change for EAS invalid pw case, to allow user to re-enter pw once before raising error.
|
## Code Before:
import getpass
AUTH_TYPES = {'Gmail': 'OAuth', 'Yahoo': 'Password', 'EAS': 'Password'}
class AuthError(Exception):
pass
def password_auth(email_address):
pw = getpass.getpass('Password for %s (hidden): ' % email_address)
if len(pw) <= 0:
raise AuthError('Password required.')
return dict(email=email_address, password=pw)
## Instruction:
Change for EAS invalid pw case, to allow user to re-enter pw once before raising error.
## Code After:
import getpass
AUTH_TYPES = {'Gmail': 'OAuth', 'Yahoo': 'Password', 'EAS': 'Password'}
message = 'Password for {0}(hidden): '
class AuthError(Exception):
pass
def password_auth(email_address, message=message):
pw = getpass.getpass(message.format(email_address))
if len(pw) <= 0:
raise AuthError('Password required.')
return dict(email=email_address, password=pw)
|
import getpass
AUTH_TYPES = {'Gmail': 'OAuth', 'Yahoo': 'Password', 'EAS': 'Password'}
+ message = 'Password for {0}(hidden): '
class AuthError(Exception):
pass
- def password_auth(email_address):
+ def password_auth(email_address, message=message):
? +++++++++++++++++
- pw = getpass.getpass('Password for %s (hidden): ' % email_address)
+ pw = getpass.getpass(message.format(email_address))
if len(pw) <= 0:
raise AuthError('Password required.')
return dict(email=email_address, password=pw)
|
b9b3837937341e6b1b052bbfdd979e3bb57d87c4
|
tests/integration/test_with_ssl.py
|
tests/integration/test_with_ssl.py
|
from . import base
class SSLTestCase(base.IntegrationTestCase):
'''RabbitMQ integration test case.'''
CTXT = {
'plugin.activemq.pool.1.port': 61614,
'plugin.activemq.pool.1.password': 'marionette',
'plugin.ssl_server_public': 'tests/fixtures/server-public.pem',
'plugin.ssl_client_private': 'tests/fixtures/client-private.pem',
'plugin.ssl_client_public': 'tests/fixtures/client-public.pem',
}
class TestWithSSLMCo20x(base.MCollective20x, SSLTestCase):
'''MCollective integration test case.'''
class TestWithSSLMCo22x(base.MCollective22x, SSLTestCase):
'''MCollective integration test case.'''
class TestWithSSLMCo23x(base.MCollective23x, SSLTestCase):
'''MCollective integration test case.'''
|
import os
from pymco.test import ctxt
from . import base
FIXTURES_PATH = os.path.join(ctxt.ROOT, 'fixtures')
class SSLTestCase(base.IntegrationTestCase):
'''RabbitMQ integration test case.'''
CTXT = {
'plugin.activemq.pool.1.port': 61614,
'plugin.activemq.pool.1.password': 'marionette',
'plugin.ssl_server_public': 'tests/fixtures/server-public.pem',
'plugin.ssl_client_private': 'tests/fixtures/client-private.pem',
'plugin.ssl_client_public': 'tests/fixtures/client-public.pem',
'plugin.ssl_server_private': os.path.join(FIXTURES_PATH,
'server-private.pem'),
'securityprovider': 'ssl',
'plugin.ssl_client_cert_dir': FIXTURES_PATH,
}
class TestWithSSLMCo20x(base.MCollective20x, SSLTestCase):
'''MCollective integration test case.'''
class TestWithSSLMCo22x(base.MCollective22x, SSLTestCase):
'''MCollective integration test case.'''
class TestWithSSLMCo23x(base.MCollective23x, SSLTestCase):
'''MCollective integration test case.'''
|
Fix SSL security provider integration tests
|
Fix SSL security provider integration tests
They were running with none provider instead.
|
Python
|
bsd-3-clause
|
rafaduran/python-mcollective,rafaduran/python-mcollective,rafaduran/python-mcollective,rafaduran/python-mcollective
|
+ import os
+
+ from pymco.test import ctxt
from . import base
+
+ FIXTURES_PATH = os.path.join(ctxt.ROOT, 'fixtures')
class SSLTestCase(base.IntegrationTestCase):
'''RabbitMQ integration test case.'''
CTXT = {
'plugin.activemq.pool.1.port': 61614,
'plugin.activemq.pool.1.password': 'marionette',
'plugin.ssl_server_public': 'tests/fixtures/server-public.pem',
'plugin.ssl_client_private': 'tests/fixtures/client-private.pem',
'plugin.ssl_client_public': 'tests/fixtures/client-public.pem',
+ 'plugin.ssl_server_private': os.path.join(FIXTURES_PATH,
+ 'server-private.pem'),
+ 'securityprovider': 'ssl',
+ 'plugin.ssl_client_cert_dir': FIXTURES_PATH,
}
class TestWithSSLMCo20x(base.MCollective20x, SSLTestCase):
'''MCollective integration test case.'''
class TestWithSSLMCo22x(base.MCollective22x, SSLTestCase):
'''MCollective integration test case.'''
class TestWithSSLMCo23x(base.MCollective23x, SSLTestCase):
'''MCollective integration test case.'''
|
Fix SSL security provider integration tests
|
## Code Before:
from . import base
class SSLTestCase(base.IntegrationTestCase):
'''RabbitMQ integration test case.'''
CTXT = {
'plugin.activemq.pool.1.port': 61614,
'plugin.activemq.pool.1.password': 'marionette',
'plugin.ssl_server_public': 'tests/fixtures/server-public.pem',
'plugin.ssl_client_private': 'tests/fixtures/client-private.pem',
'plugin.ssl_client_public': 'tests/fixtures/client-public.pem',
}
class TestWithSSLMCo20x(base.MCollective20x, SSLTestCase):
'''MCollective integration test case.'''
class TestWithSSLMCo22x(base.MCollective22x, SSLTestCase):
'''MCollective integration test case.'''
class TestWithSSLMCo23x(base.MCollective23x, SSLTestCase):
'''MCollective integration test case.'''
## Instruction:
Fix SSL security provider integration tests
## Code After:
import os
from pymco.test import ctxt
from . import base
FIXTURES_PATH = os.path.join(ctxt.ROOT, 'fixtures')
class SSLTestCase(base.IntegrationTestCase):
'''RabbitMQ integration test case.'''
CTXT = {
'plugin.activemq.pool.1.port': 61614,
'plugin.activemq.pool.1.password': 'marionette',
'plugin.ssl_server_public': 'tests/fixtures/server-public.pem',
'plugin.ssl_client_private': 'tests/fixtures/client-private.pem',
'plugin.ssl_client_public': 'tests/fixtures/client-public.pem',
'plugin.ssl_server_private': os.path.join(FIXTURES_PATH,
'server-private.pem'),
'securityprovider': 'ssl',
'plugin.ssl_client_cert_dir': FIXTURES_PATH,
}
class TestWithSSLMCo20x(base.MCollective20x, SSLTestCase):
'''MCollective integration test case.'''
class TestWithSSLMCo22x(base.MCollective22x, SSLTestCase):
'''MCollective integration test case.'''
class TestWithSSLMCo23x(base.MCollective23x, SSLTestCase):
'''MCollective integration test case.'''
|
+ import os
+
+ from pymco.test import ctxt
from . import base
+
+ FIXTURES_PATH = os.path.join(ctxt.ROOT, 'fixtures')
class SSLTestCase(base.IntegrationTestCase):
'''RabbitMQ integration test case.'''
CTXT = {
'plugin.activemq.pool.1.port': 61614,
'plugin.activemq.pool.1.password': 'marionette',
'plugin.ssl_server_public': 'tests/fixtures/server-public.pem',
'plugin.ssl_client_private': 'tests/fixtures/client-private.pem',
'plugin.ssl_client_public': 'tests/fixtures/client-public.pem',
+ 'plugin.ssl_server_private': os.path.join(FIXTURES_PATH,
+ 'server-private.pem'),
+ 'securityprovider': 'ssl',
+ 'plugin.ssl_client_cert_dir': FIXTURES_PATH,
}
class TestWithSSLMCo20x(base.MCollective20x, SSLTestCase):
'''MCollective integration test case.'''
class TestWithSSLMCo22x(base.MCollective22x, SSLTestCase):
'''MCollective integration test case.'''
class TestWithSSLMCo23x(base.MCollective23x, SSLTestCase):
'''MCollective integration test case.'''
|
853744e82f2740a47a3f36e003ea8d2784bafff6
|
accelerator/tests/factories/user_deferrable_modal_factory.py
|
accelerator/tests/factories/user_deferrable_modal_factory.py
|
import swapper
from datetime import (
datetime,
timedelta,
)
from factory import SubFactory
from factory.django import DjangoModelFactory
from simpleuser.tests.factories.user_factory import UserFactory
from .deferrable_modal_factory import DeferrableModalFactory
UserDeferrableModal = swapper.load_model('accelerator', 'UserDeferrableModal')
class UserDeferrableModalFactory(DjangoModelFactory):
class Meta:
django_get_or_create = ('deferrable_modal', 'user',)
model = UserDeferrableModal
user = SubFactory(UserFactory)
deferrable_modal = SubFactory(DeferrableModalFactory)
is_deferred = False
deferred_to = datetime.now() + timedelta(days=1)
|
import swapper
from datetime import (
datetime,
timedelta,
)
from factory import SubFactory
from factory.django import DjangoModelFactory
from pytz import utc
from simpleuser.tests.factories.user_factory import UserFactory
from .deferrable_modal_factory import DeferrableModalFactory
UserDeferrableModal = swapper.load_model('accelerator', 'UserDeferrableModal')
class UserDeferrableModalFactory(DjangoModelFactory):
class Meta:
django_get_or_create = ('deferrable_modal', 'user',)
model = UserDeferrableModal
user = SubFactory(UserFactory)
deferrable_modal = SubFactory(DeferrableModalFactory)
is_deferred = False
deferred_to = utc.localize(datetime.now()) + timedelta(days=1)
|
Fix bare datetime.now() in factory
|
[AC-8673] Fix bare datetime.now() in factory
|
Python
|
mit
|
masschallenge/django-accelerator,masschallenge/django-accelerator
|
import swapper
from datetime import (
datetime,
timedelta,
)
from factory import SubFactory
from factory.django import DjangoModelFactory
+ from pytz import utc
+
from simpleuser.tests.factories.user_factory import UserFactory
from .deferrable_modal_factory import DeferrableModalFactory
UserDeferrableModal = swapper.load_model('accelerator', 'UserDeferrableModal')
class UserDeferrableModalFactory(DjangoModelFactory):
class Meta:
django_get_or_create = ('deferrable_modal', 'user',)
model = UserDeferrableModal
user = SubFactory(UserFactory)
deferrable_modal = SubFactory(DeferrableModalFactory)
is_deferred = False
- deferred_to = datetime.now() + timedelta(days=1)
+ deferred_to = utc.localize(datetime.now()) + timedelta(days=1)
|
Fix bare datetime.now() in factory
|
## Code Before:
import swapper
from datetime import (
datetime,
timedelta,
)
from factory import SubFactory
from factory.django import DjangoModelFactory
from simpleuser.tests.factories.user_factory import UserFactory
from .deferrable_modal_factory import DeferrableModalFactory
UserDeferrableModal = swapper.load_model('accelerator', 'UserDeferrableModal')
class UserDeferrableModalFactory(DjangoModelFactory):
class Meta:
django_get_or_create = ('deferrable_modal', 'user',)
model = UserDeferrableModal
user = SubFactory(UserFactory)
deferrable_modal = SubFactory(DeferrableModalFactory)
is_deferred = False
deferred_to = datetime.now() + timedelta(days=1)
## Instruction:
Fix bare datetime.now() in factory
## Code After:
import swapper
from datetime import (
datetime,
timedelta,
)
from factory import SubFactory
from factory.django import DjangoModelFactory
from pytz import utc
from simpleuser.tests.factories.user_factory import UserFactory
from .deferrable_modal_factory import DeferrableModalFactory
UserDeferrableModal = swapper.load_model('accelerator', 'UserDeferrableModal')
class UserDeferrableModalFactory(DjangoModelFactory):
class Meta:
django_get_or_create = ('deferrable_modal', 'user',)
model = UserDeferrableModal
user = SubFactory(UserFactory)
deferrable_modal = SubFactory(DeferrableModalFactory)
is_deferred = False
deferred_to = utc.localize(datetime.now()) + timedelta(days=1)
|
import swapper
from datetime import (
datetime,
timedelta,
)
from factory import SubFactory
from factory.django import DjangoModelFactory
+ from pytz import utc
+
from simpleuser.tests.factories.user_factory import UserFactory
from .deferrable_modal_factory import DeferrableModalFactory
UserDeferrableModal = swapper.load_model('accelerator', 'UserDeferrableModal')
class UserDeferrableModalFactory(DjangoModelFactory):
class Meta:
django_get_or_create = ('deferrable_modal', 'user',)
model = UserDeferrableModal
user = SubFactory(UserFactory)
deferrable_modal = SubFactory(DeferrableModalFactory)
is_deferred = False
- deferred_to = datetime.now() + timedelta(days=1)
+ deferred_to = utc.localize(datetime.now()) + timedelta(days=1)
? +++++++++++++ +
|
8bcc4fe29468868190dcfcbea5438dc0aa638387
|
sweetercat/test_utils.py
|
sweetercat/test_utils.py
|
from __future__ import division
from utils import absolute_magnitude, plDensity, hz
def test_absolute_magnitude():
m = 10
assert isinstance(absolute_magnitude(1, 1), float)
assert absolute_magnitude(1, m) > m
assert absolute_magnitude(1, m) == 15
assert absolute_magnitude(0.1, m) == m
assert absolute_magnitude(0.01, m) < m
assert absolute_magnitude(1/10, m) == m
def test_plDensity():
m, r = 1, 1
assert isinstance(plDensity(m, r), float)
assert round(plDensity(m, r), 2) == 1.33
assert plDensity(0, r) == 0
def test_hz():
teff = 5777
lum = 1
for model in range(1, 6):
assert isinstance(hz(teff, lum, model), float)
results = [0.75, 0.98, 0.99, 1.71, 1.77]
for model, result in enumerate(results, start=1):
assert round(hz(teff, lum, model), 2) == result
|
from __future__ import division
import pytest
import pandas as pd
from utils import absolute_magnitude, plDensity, hz, readSC
def test_absolute_magnitude():
m = 10
assert isinstance(absolute_magnitude(1, 1), float)
assert absolute_magnitude(1, m) > m
assert absolute_magnitude(1, m) == 15
assert absolute_magnitude(0.1, m) == m
assert absolute_magnitude(0.01, m) < m
assert absolute_magnitude(1/10, m) == m
with pytest.raises(ZeroDivisionError):
absolute_magnitude(0, m)
def test_plDensity():
m, r = 1, 1
assert isinstance(plDensity(m, r), float)
assert round(plDensity(m, r), 2) == 1.33
assert plDensity(0, r) == 0
def test_hz():
teff = 5777
lum = 1
for model in range(1, 6):
assert isinstance(hz(teff, lum, model), float)
results = [0.75, 0.98, 0.99, 1.71, 1.77]
for model, result in enumerate(results, start=1):
assert round(hz(teff, lum, model), 2) == result
assert hz(teff, lum, 2) < hz(teff, lum, 4) # hz1 < hz2
def test_readSC():
df, plot_names = readSC()
assert isinstance(df, pd.DataFrame) #
assert isinstance(plot_names, list)
for name in plot_names:
assert isinstance(name, str)
|
Add couple more utils tests.
|
Add couple more utils tests.
|
Python
|
mit
|
DanielAndreasen/SWEETer-Cat,DanielAndreasen/SWEETer-Cat
|
from __future__ import division
+ import pytest
+ import pandas as pd
- from utils import absolute_magnitude, plDensity, hz
+ from utils import absolute_magnitude, plDensity, hz, readSC
def test_absolute_magnitude():
m = 10
assert isinstance(absolute_magnitude(1, 1), float)
assert absolute_magnitude(1, m) > m
assert absolute_magnitude(1, m) == 15
assert absolute_magnitude(0.1, m) == m
assert absolute_magnitude(0.01, m) < m
assert absolute_magnitude(1/10, m) == m
+ with pytest.raises(ZeroDivisionError):
+ absolute_magnitude(0, m)
def test_plDensity():
m, r = 1, 1
assert isinstance(plDensity(m, r), float)
assert round(plDensity(m, r), 2) == 1.33
assert plDensity(0, r) == 0
def test_hz():
teff = 5777
lum = 1
for model in range(1, 6):
assert isinstance(hz(teff, lum, model), float)
results = [0.75, 0.98, 0.99, 1.71, 1.77]
for model, result in enumerate(results, start=1):
assert round(hz(teff, lum, model), 2) == result
+ assert hz(teff, lum, 2) < hz(teff, lum, 4) # hz1 < hz2
+
+ def test_readSC():
+ df, plot_names = readSC()
+ assert isinstance(df, pd.DataFrame) #
+ assert isinstance(plot_names, list)
+ for name in plot_names:
+ assert isinstance(name, str)
+
|
Add couple more utils tests.
|
## Code Before:
from __future__ import division
from utils import absolute_magnitude, plDensity, hz
def test_absolute_magnitude():
m = 10
assert isinstance(absolute_magnitude(1, 1), float)
assert absolute_magnitude(1, m) > m
assert absolute_magnitude(1, m) == 15
assert absolute_magnitude(0.1, m) == m
assert absolute_magnitude(0.01, m) < m
assert absolute_magnitude(1/10, m) == m
def test_plDensity():
m, r = 1, 1
assert isinstance(plDensity(m, r), float)
assert round(plDensity(m, r), 2) == 1.33
assert plDensity(0, r) == 0
def test_hz():
teff = 5777
lum = 1
for model in range(1, 6):
assert isinstance(hz(teff, lum, model), float)
results = [0.75, 0.98, 0.99, 1.71, 1.77]
for model, result in enumerate(results, start=1):
assert round(hz(teff, lum, model), 2) == result
## Instruction:
Add couple more utils tests.
## Code After:
from __future__ import division
import pytest
import pandas as pd
from utils import absolute_magnitude, plDensity, hz, readSC
def test_absolute_magnitude():
m = 10
assert isinstance(absolute_magnitude(1, 1), float)
assert absolute_magnitude(1, m) > m
assert absolute_magnitude(1, m) == 15
assert absolute_magnitude(0.1, m) == m
assert absolute_magnitude(0.01, m) < m
assert absolute_magnitude(1/10, m) == m
with pytest.raises(ZeroDivisionError):
absolute_magnitude(0, m)
def test_plDensity():
m, r = 1, 1
assert isinstance(plDensity(m, r), float)
assert round(plDensity(m, r), 2) == 1.33
assert plDensity(0, r) == 0
def test_hz():
teff = 5777
lum = 1
for model in range(1, 6):
assert isinstance(hz(teff, lum, model), float)
results = [0.75, 0.98, 0.99, 1.71, 1.77]
for model, result in enumerate(results, start=1):
assert round(hz(teff, lum, model), 2) == result
assert hz(teff, lum, 2) < hz(teff, lum, 4) # hz1 < hz2
def test_readSC():
df, plot_names = readSC()
assert isinstance(df, pd.DataFrame) #
assert isinstance(plot_names, list)
for name in plot_names:
assert isinstance(name, str)
|
from __future__ import division
+ import pytest
+ import pandas as pd
- from utils import absolute_magnitude, plDensity, hz
+ from utils import absolute_magnitude, plDensity, hz, readSC
? ++++++++
def test_absolute_magnitude():
m = 10
assert isinstance(absolute_magnitude(1, 1), float)
assert absolute_magnitude(1, m) > m
assert absolute_magnitude(1, m) == 15
assert absolute_magnitude(0.1, m) == m
assert absolute_magnitude(0.01, m) < m
assert absolute_magnitude(1/10, m) == m
+ with pytest.raises(ZeroDivisionError):
+ absolute_magnitude(0, m)
def test_plDensity():
m, r = 1, 1
assert isinstance(plDensity(m, r), float)
assert round(plDensity(m, r), 2) == 1.33
assert plDensity(0, r) == 0
def test_hz():
teff = 5777
lum = 1
for model in range(1, 6):
assert isinstance(hz(teff, lum, model), float)
results = [0.75, 0.98, 0.99, 1.71, 1.77]
for model, result in enumerate(results, start=1):
assert round(hz(teff, lum, model), 2) == result
+ assert hz(teff, lum, 2) < hz(teff, lum, 4) # hz1 < hz2
+
+
+ def test_readSC():
+ df, plot_names = readSC()
+ assert isinstance(df, pd.DataFrame) #
+ assert isinstance(plot_names, list)
+ for name in plot_names:
+ assert isinstance(name, str)
|
ddb3bcf4e5d5eb5dc4f8bb74313f333e54c385d6
|
scripts/wall_stop.py
|
scripts/wall_stop.py
|
import rospy,copy
from geometry_msgs.msg import Twist
from std_srvs.srv import Trigger, TriggerResponse
from pimouse_ros.msg import LightSensorValues
class WallStop():
def __init__(self):
self.cmd_vel = rospy.Publisher('/cmd_vel',Twist,queue_size=1)
self.sensor_values = LightSensorValues()
rospy.Subscriber('/lightsensors', LightSensorValues, self.callback_lightsensors)
def callback_lightsensors(self,messages):
self.sensor_values = messages
def run(self):
rate = rospy.Rate(10)
data = Twist()
while not rospy.is_shutdown():
data.linear.x = 0.2 if self.sensor_values.sum_all < 500 else 0.0
self.cmd_vel.publish(data)
rate.sleep()
if __name__ == '__main__':
rospy.init_node('wall_stop')
rospy.wait_for_service('/motor_on')
rospy.wait_for_service('/motor_off')
rospy.on_shutdown(rospy.ServiceProxy('/motor_off',Trigger).call)
rospy.ServiceProxy('/motor_on',Trigger).call()
WallStop().run()
|
import rospy,copy
from geometry_msgs.msg import Twist
from std_srvs.srv import Trigger, TriggerResponse
from pimouse_ros.msg import LightSensorValues
class WallStop():
def __init__(self):
self.cmd_vel = rospy.Publisher('/cmd_vel',Twist,queue_size=1)
self.sensor_values = LightSensorValues()
rospy.Subscriber('/lightsensors', LightSensorValues, self.callback)
def callback(self,messages):
self.sensor_values = messages
def run(self):
rate = rospy.Rate(10)
data = Twist()
while not rospy.is_shutdown():
data.linear.x = 0.2 if self.sensor_values.sum_all < 500 else 0.0
self.cmd_vel.publish(data)
rate.sleep()
if __name__ == '__main__':
rospy.init_node('wall_stop')
rospy.wait_for_service('/motor_on')
rospy.wait_for_service('/motor_off')
rospy.on_shutdown(rospy.ServiceProxy('/motor_off',Trigger).call)
rospy.ServiceProxy('/motor_on',Trigger).call()
WallStop().run()
|
Reduce the name of a function
|
Reduce the name of a function
|
Python
|
mit
|
citueda/pimouse_run_corridor,citueda/pimouse_run_corridor
|
import rospy,copy
from geometry_msgs.msg import Twist
from std_srvs.srv import Trigger, TriggerResponse
from pimouse_ros.msg import LightSensorValues
class WallStop():
def __init__(self):
self.cmd_vel = rospy.Publisher('/cmd_vel',Twist,queue_size=1)
self.sensor_values = LightSensorValues()
- rospy.Subscriber('/lightsensors', LightSensorValues, self.callback_lightsensors)
+ rospy.Subscriber('/lightsensors', LightSensorValues, self.callback)
- def callback_lightsensors(self,messages):
+ def callback(self,messages):
self.sensor_values = messages
def run(self):
rate = rospy.Rate(10)
data = Twist()
while not rospy.is_shutdown():
data.linear.x = 0.2 if self.sensor_values.sum_all < 500 else 0.0
self.cmd_vel.publish(data)
rate.sleep()
if __name__ == '__main__':
rospy.init_node('wall_stop')
rospy.wait_for_service('/motor_on')
rospy.wait_for_service('/motor_off')
rospy.on_shutdown(rospy.ServiceProxy('/motor_off',Trigger).call)
rospy.ServiceProxy('/motor_on',Trigger).call()
WallStop().run()
|
Reduce the name of a function
|
## Code Before:
import rospy,copy
from geometry_msgs.msg import Twist
from std_srvs.srv import Trigger, TriggerResponse
from pimouse_ros.msg import LightSensorValues
class WallStop():
def __init__(self):
self.cmd_vel = rospy.Publisher('/cmd_vel',Twist,queue_size=1)
self.sensor_values = LightSensorValues()
rospy.Subscriber('/lightsensors', LightSensorValues, self.callback_lightsensors)
def callback_lightsensors(self,messages):
self.sensor_values = messages
def run(self):
rate = rospy.Rate(10)
data = Twist()
while not rospy.is_shutdown():
data.linear.x = 0.2 if self.sensor_values.sum_all < 500 else 0.0
self.cmd_vel.publish(data)
rate.sleep()
if __name__ == '__main__':
rospy.init_node('wall_stop')
rospy.wait_for_service('/motor_on')
rospy.wait_for_service('/motor_off')
rospy.on_shutdown(rospy.ServiceProxy('/motor_off',Trigger).call)
rospy.ServiceProxy('/motor_on',Trigger).call()
WallStop().run()
## Instruction:
Reduce the name of a function
## Code After:
import rospy,copy
from geometry_msgs.msg import Twist
from std_srvs.srv import Trigger, TriggerResponse
from pimouse_ros.msg import LightSensorValues
class WallStop():
def __init__(self):
self.cmd_vel = rospy.Publisher('/cmd_vel',Twist,queue_size=1)
self.sensor_values = LightSensorValues()
rospy.Subscriber('/lightsensors', LightSensorValues, self.callback)
def callback(self,messages):
self.sensor_values = messages
def run(self):
rate = rospy.Rate(10)
data = Twist()
while not rospy.is_shutdown():
data.linear.x = 0.2 if self.sensor_values.sum_all < 500 else 0.0
self.cmd_vel.publish(data)
rate.sleep()
if __name__ == '__main__':
rospy.init_node('wall_stop')
rospy.wait_for_service('/motor_on')
rospy.wait_for_service('/motor_off')
rospy.on_shutdown(rospy.ServiceProxy('/motor_off',Trigger).call)
rospy.ServiceProxy('/motor_on',Trigger).call()
WallStop().run()
|
import rospy,copy
from geometry_msgs.msg import Twist
from std_srvs.srv import Trigger, TriggerResponse
from pimouse_ros.msg import LightSensorValues
class WallStop():
def __init__(self):
self.cmd_vel = rospy.Publisher('/cmd_vel',Twist,queue_size=1)
self.sensor_values = LightSensorValues()
- rospy.Subscriber('/lightsensors', LightSensorValues, self.callback_lightsensors)
? -------------
+ rospy.Subscriber('/lightsensors', LightSensorValues, self.callback)
- def callback_lightsensors(self,messages):
? -------------
+ def callback(self,messages):
self.sensor_values = messages
def run(self):
rate = rospy.Rate(10)
data = Twist()
while not rospy.is_shutdown():
data.linear.x = 0.2 if self.sensor_values.sum_all < 500 else 0.0
self.cmd_vel.publish(data)
rate.sleep()
if __name__ == '__main__':
rospy.init_node('wall_stop')
rospy.wait_for_service('/motor_on')
rospy.wait_for_service('/motor_off')
rospy.on_shutdown(rospy.ServiceProxy('/motor_off',Trigger).call)
rospy.ServiceProxy('/motor_on',Trigger).call()
WallStop().run()
|
be89b2d9617fd5b837695e4322a2c98e4d4346cc
|
semillas_backend/users/serializers.py
|
semillas_backend/users/serializers.py
|
from rest_framework import serializers
from drf_extra_fields.geo_fields import PointField
from .models import User
class UserSerializer(serializers.ModelSerializer):
""" Usage:
from rest_framework.renderers import JSONRenderer
from semillas_backend.users.serializers import UserSerializer
JSONRenderer().render(UserSerializer(user_instance).data)
"""
location = PointField()
class Meta:
model = User
fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login')
class UpdateUserSerializer(serializers.ModelSerializer):
name = serializers.CharField(required=False)
#phone = PhoneNumberField(required=False)
email = serializers.CharField(required=False)
picture = serializers.ImageField(required=False)
uuid = serializers.CharField(read_only=True)
class Meta:
model = User
fields = ('name', 'picture', 'phone', 'email', 'uuid')
from wallet.serializers import WalletSerializer
class FullUserSerializer(UserSerializer):
wallet = WalletSerializer()
class Meta:
model = User
fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login', 'wallet', 'email', 'phone')
|
from rest_framework import serializers
from drf_extra_fields.geo_fields import PointField
from .models import User
class UserSerializer(serializers.ModelSerializer):
""" Usage:
from rest_framework.renderers import JSONRenderer
from semillas_backend.users.serializers import UserSerializer
JSONRenderer().render(UserSerializer(user_instance).data)
"""
location = PointField()
class Meta:
model = User
fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login', 'email', 'phone')
class UpdateUserSerializer(serializers.ModelSerializer):
name = serializers.CharField(required=False)
#phone = PhoneNumberField(required=False)
email = serializers.CharField(required=False)
picture = serializers.ImageField(required=False)
uuid = serializers.CharField(read_only=True)
class Meta:
model = User
fields = ('name', 'picture', 'phone', 'email', 'uuid')
from wallet.serializers import WalletSerializer
class FullUserSerializer(UserSerializer):
wallet = WalletSerializer()
class Meta:
model = User
fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login', 'wallet', 'email', 'phone')
|
Add phone and email to user serializer
|
Add phone and email to user serializer
|
Python
|
mit
|
Semillas/semillas_backend,Semillas/semillas_backend,Semillas/semillas_platform,Semillas/semillas_platform,Semillas/semillas_backend,Semillas/semillas_platform,Semillas/semillas_backend,Semillas/semillas_platform
|
from rest_framework import serializers
from drf_extra_fields.geo_fields import PointField
from .models import User
class UserSerializer(serializers.ModelSerializer):
""" Usage:
from rest_framework.renderers import JSONRenderer
from semillas_backend.users.serializers import UserSerializer
JSONRenderer().render(UserSerializer(user_instance).data)
"""
location = PointField()
class Meta:
model = User
- fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login')
+ fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login', 'email', 'phone')
class UpdateUserSerializer(serializers.ModelSerializer):
name = serializers.CharField(required=False)
#phone = PhoneNumberField(required=False)
email = serializers.CharField(required=False)
picture = serializers.ImageField(required=False)
uuid = serializers.CharField(read_only=True)
class Meta:
model = User
fields = ('name', 'picture', 'phone', 'email', 'uuid')
from wallet.serializers import WalletSerializer
class FullUserSerializer(UserSerializer):
wallet = WalletSerializer()
class Meta:
model = User
fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login', 'wallet', 'email', 'phone')
|
Add phone and email to user serializer
|
## Code Before:
from rest_framework import serializers
from drf_extra_fields.geo_fields import PointField
from .models import User
class UserSerializer(serializers.ModelSerializer):
""" Usage:
from rest_framework.renderers import JSONRenderer
from semillas_backend.users.serializers import UserSerializer
JSONRenderer().render(UserSerializer(user_instance).data)
"""
location = PointField()
class Meta:
model = User
fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login')
class UpdateUserSerializer(serializers.ModelSerializer):
name = serializers.CharField(required=False)
#phone = PhoneNumberField(required=False)
email = serializers.CharField(required=False)
picture = serializers.ImageField(required=False)
uuid = serializers.CharField(read_only=True)
class Meta:
model = User
fields = ('name', 'picture', 'phone', 'email', 'uuid')
from wallet.serializers import WalletSerializer
class FullUserSerializer(UserSerializer):
wallet = WalletSerializer()
class Meta:
model = User
fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login', 'wallet', 'email', 'phone')
## Instruction:
Add phone and email to user serializer
## Code After:
from rest_framework import serializers
from drf_extra_fields.geo_fields import PointField
from .models import User
class UserSerializer(serializers.ModelSerializer):
""" Usage:
from rest_framework.renderers import JSONRenderer
from semillas_backend.users.serializers import UserSerializer
JSONRenderer().render(UserSerializer(user_instance).data)
"""
location = PointField()
class Meta:
model = User
fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login', 'email', 'phone')
class UpdateUserSerializer(serializers.ModelSerializer):
name = serializers.CharField(required=False)
#phone = PhoneNumberField(required=False)
email = serializers.CharField(required=False)
picture = serializers.ImageField(required=False)
uuid = serializers.CharField(read_only=True)
class Meta:
model = User
fields = ('name', 'picture', 'phone', 'email', 'uuid')
from wallet.serializers import WalletSerializer
class FullUserSerializer(UserSerializer):
wallet = WalletSerializer()
class Meta:
model = User
fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login', 'wallet', 'email', 'phone')
|
from rest_framework import serializers
from drf_extra_fields.geo_fields import PointField
from .models import User
class UserSerializer(serializers.ModelSerializer):
""" Usage:
from rest_framework.renderers import JSONRenderer
from semillas_backend.users.serializers import UserSerializer
JSONRenderer().render(UserSerializer(user_instance).data)
"""
location = PointField()
class Meta:
model = User
- fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login')
+ fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login', 'email', 'phone')
? ++++++++++++++++++
class UpdateUserSerializer(serializers.ModelSerializer):
name = serializers.CharField(required=False)
#phone = PhoneNumberField(required=False)
email = serializers.CharField(required=False)
picture = serializers.ImageField(required=False)
uuid = serializers.CharField(read_only=True)
class Meta:
model = User
fields = ('name', 'picture', 'phone', 'email', 'uuid')
from wallet.serializers import WalletSerializer
class FullUserSerializer(UserSerializer):
wallet = WalletSerializer()
class Meta:
model = User
fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login', 'wallet', 'email', 'phone')
|
b75e3646ccd1b61868a47017f14f25960e52578c
|
bot/action/standard/info/action.py
|
bot/action/standard/info/action.py
|
from bot.action.core.action import Action
from bot.action.standard.info.formatter.chat import ChatInfoFormatter
from bot.action.standard.info.formatter.user import UserInfoFormatter
class MeInfoAction(Action):
def process(self, event):
formatter = UserInfoFormatter(self.api, event.message.from_, event.chat)
formatter.format(member_info=True)
response = formatter.get_formatted()
self.api.send_message(response.build_message().to_chat_replying(event.message))
class ChatInfoAction(Action):
def process(self, event):
formatter = ChatInfoFormatter(self.api, event.chat, self.cache.bot_info, event.message.from_)
formatter.format(full_info=True)
response = formatter.get_formatted()
self.api.send_message(response.build_message().to_chat_replying(event.message))
|
from bot.action.core.action import Action
from bot.action.standard.info.formatter.chat import ChatInfoFormatter
from bot.action.standard.info.formatter.user import UserInfoFormatter
class MeInfoAction(Action):
def process(self, event):
formatter = UserInfoFormatter(self.api, event.message.from_, event.chat)
formatter.format(member_info=True)
response = formatter.get_formatted()
self.api.send_message(response.build_message().to_chat_replying(event.message))
class UserInfoAction(Action):
def process(self, event):
message = event.message
replied_message = message.reply_to_message
if replied_message is None:
user = message.from_
else:
user = replied_message.from_
formatter = UserInfoFormatter(self.api, user, event.chat)
formatter.format(member_info=True)
response = formatter.get_formatted()
self.api.send_message(response.build_message().to_chat_replying(event.message))
class ChatInfoAction(Action):
def process(self, event):
formatter = ChatInfoFormatter(self.api, event.chat, self.cache.bot_info, event.message.from_)
formatter.format(full_info=True)
response = formatter.get_formatted()
self.api.send_message(response.build_message().to_chat_replying(event.message))
|
Create UserInfoAction that shows the info of the user which the message replies to, or the current user if there is no reply
|
Create UserInfoAction that shows the info of the user which the message replies to, or the current user if there is no reply
|
Python
|
agpl-3.0
|
alvarogzp/telegram-bot,alvarogzp/telegram-bot
|
from bot.action.core.action import Action
from bot.action.standard.info.formatter.chat import ChatInfoFormatter
from bot.action.standard.info.formatter.user import UserInfoFormatter
class MeInfoAction(Action):
def process(self, event):
formatter = UserInfoFormatter(self.api, event.message.from_, event.chat)
formatter.format(member_info=True)
response = formatter.get_formatted()
self.api.send_message(response.build_message().to_chat_replying(event.message))
+ class UserInfoAction(Action):
+ def process(self, event):
+ message = event.message
+ replied_message = message.reply_to_message
+ if replied_message is None:
+ user = message.from_
+ else:
+ user = replied_message.from_
+ formatter = UserInfoFormatter(self.api, user, event.chat)
+ formatter.format(member_info=True)
+ response = formatter.get_formatted()
+ self.api.send_message(response.build_message().to_chat_replying(event.message))
+
+
class ChatInfoAction(Action):
def process(self, event):
formatter = ChatInfoFormatter(self.api, event.chat, self.cache.bot_info, event.message.from_)
formatter.format(full_info=True)
response = formatter.get_formatted()
self.api.send_message(response.build_message().to_chat_replying(event.message))
|
Create UserInfoAction that shows the info of the user which the message replies to, or the current user if there is no reply
|
## Code Before:
from bot.action.core.action import Action
from bot.action.standard.info.formatter.chat import ChatInfoFormatter
from bot.action.standard.info.formatter.user import UserInfoFormatter
class MeInfoAction(Action):
def process(self, event):
formatter = UserInfoFormatter(self.api, event.message.from_, event.chat)
formatter.format(member_info=True)
response = formatter.get_formatted()
self.api.send_message(response.build_message().to_chat_replying(event.message))
class ChatInfoAction(Action):
def process(self, event):
formatter = ChatInfoFormatter(self.api, event.chat, self.cache.bot_info, event.message.from_)
formatter.format(full_info=True)
response = formatter.get_formatted()
self.api.send_message(response.build_message().to_chat_replying(event.message))
## Instruction:
Create UserInfoAction that shows the info of the user which the message replies to, or the current user if there is no reply
## Code After:
from bot.action.core.action import Action
from bot.action.standard.info.formatter.chat import ChatInfoFormatter
from bot.action.standard.info.formatter.user import UserInfoFormatter
class MeInfoAction(Action):
def process(self, event):
formatter = UserInfoFormatter(self.api, event.message.from_, event.chat)
formatter.format(member_info=True)
response = formatter.get_formatted()
self.api.send_message(response.build_message().to_chat_replying(event.message))
class UserInfoAction(Action):
def process(self, event):
message = event.message
replied_message = message.reply_to_message
if replied_message is None:
user = message.from_
else:
user = replied_message.from_
formatter = UserInfoFormatter(self.api, user, event.chat)
formatter.format(member_info=True)
response = formatter.get_formatted()
self.api.send_message(response.build_message().to_chat_replying(event.message))
class ChatInfoAction(Action):
def process(self, event):
formatter = ChatInfoFormatter(self.api, event.chat, self.cache.bot_info, event.message.from_)
formatter.format(full_info=True)
response = formatter.get_formatted()
self.api.send_message(response.build_message().to_chat_replying(event.message))
|
from bot.action.core.action import Action
from bot.action.standard.info.formatter.chat import ChatInfoFormatter
from bot.action.standard.info.formatter.user import UserInfoFormatter
class MeInfoAction(Action):
def process(self, event):
formatter = UserInfoFormatter(self.api, event.message.from_, event.chat)
formatter.format(member_info=True)
response = formatter.get_formatted()
self.api.send_message(response.build_message().to_chat_replying(event.message))
+ class UserInfoAction(Action):
+ def process(self, event):
+ message = event.message
+ replied_message = message.reply_to_message
+ if replied_message is None:
+ user = message.from_
+ else:
+ user = replied_message.from_
+ formatter = UserInfoFormatter(self.api, user, event.chat)
+ formatter.format(member_info=True)
+ response = formatter.get_formatted()
+ self.api.send_message(response.build_message().to_chat_replying(event.message))
+
+
class ChatInfoAction(Action):
def process(self, event):
formatter = ChatInfoFormatter(self.api, event.chat, self.cache.bot_info, event.message.from_)
formatter.format(full_info=True)
response = formatter.get_formatted()
self.api.send_message(response.build_message().to_chat_replying(event.message))
|
dd1aa173c8d158f45af9eeff8d3cc58c0e272f12
|
solcast/radiation_estimated_actuals.py
|
solcast/radiation_estimated_actuals.py
|
from datetime import datetime, timedelta
from urllib.parse import urljoin
from isodate import parse_datetime, parse_duration
import requests
from solcast.base import Base
class RadiationEstimatedActuals(Base):
end_point = 'radiation/estimated_actuals'
def __init__(self, latitude, longitude, *args, **kwargs):
self.latitude = latitude
self.longitude = longitude
self.latest = kwargs.get('latest', False)
self.estimated_actuals = None
self.params = {'latitude' : self.latitude,
'longitude' : self.longitude,
'capacity' : self.capacity,
'tilt' : self.tilt,
'azimuth' : self.azimuth,
'install_date' : self.install_date,
'loss_factor': self.loss_factor
}
if self.latest:
self.end_point = self.end_point + '/latest'
self._get(*args, **kwargs)
if self.ok:
self._generate_est_acts_dict()
def _generate_est_acts_dict(self):
self.estimated_actuals = []
for est_act in self.content.get('estimated_actuals'):
# Convert period_end and period. All other fields should already be
# the correct type
est_act['period_end'] = parse_datetime(est_act['period_end'])
est_act['period'] = parse_duration(est_act['period'])
self.estimated_actuals.append(est_act)
|
from datetime import datetime, timedelta
from urllib.parse import urljoin
from isodate import parse_datetime, parse_duration
import requests
from solcast.base import Base
class RadiationEstimatedActuals(Base):
end_point = 'radiation/estimated_actuals'
def __init__(self, latitude, longitude, *args, **kwargs):
self.latitude = latitude
self.longitude = longitude
self.latest = kwargs.get('latest', False)
self.estimated_actuals = None
self.params = {'latitude' : self.latitude, 'longitude' : self.longitude}
if self.latest:
self.end_point = self.end_point + '/latest'
self._get(*args, **kwargs)
if self.ok:
self._generate_est_acts_dict()
def _generate_est_acts_dict(self):
self.estimated_actuals = []
for est_act in self.content.get('estimated_actuals'):
# Convert period_end and period. All other fields should already be
# the correct type
est_act['period_end'] = parse_datetime(est_act['period_end'])
est_act['period'] = parse_duration(est_act['period'])
self.estimated_actuals.append(est_act)
|
Remove parameters that aren't required for an estimate actuals request
|
Remove parameters that aren't required for an estimate actuals request
|
Python
|
mit
|
cjtapper/solcast-py
|
from datetime import datetime, timedelta
from urllib.parse import urljoin
from isodate import parse_datetime, parse_duration
import requests
from solcast.base import Base
class RadiationEstimatedActuals(Base):
end_point = 'radiation/estimated_actuals'
def __init__(self, latitude, longitude, *args, **kwargs):
self.latitude = latitude
self.longitude = longitude
self.latest = kwargs.get('latest', False)
self.estimated_actuals = None
- self.params = {'latitude' : self.latitude,
+ self.params = {'latitude' : self.latitude, 'longitude' : self.longitude}
- 'longitude' : self.longitude,
- 'capacity' : self.capacity,
- 'tilt' : self.tilt,
- 'azimuth' : self.azimuth,
- 'install_date' : self.install_date,
- 'loss_factor': self.loss_factor
- }
if self.latest:
self.end_point = self.end_point + '/latest'
self._get(*args, **kwargs)
if self.ok:
self._generate_est_acts_dict()
def _generate_est_acts_dict(self):
self.estimated_actuals = []
for est_act in self.content.get('estimated_actuals'):
# Convert period_end and period. All other fields should already be
# the correct type
est_act['period_end'] = parse_datetime(est_act['period_end'])
est_act['period'] = parse_duration(est_act['period'])
self.estimated_actuals.append(est_act)
|
Remove parameters that aren't required for an estimate actuals request
|
## Code Before:
from datetime import datetime, timedelta
from urllib.parse import urljoin
from isodate import parse_datetime, parse_duration
import requests
from solcast.base import Base
class RadiationEstimatedActuals(Base):
end_point = 'radiation/estimated_actuals'
def __init__(self, latitude, longitude, *args, **kwargs):
self.latitude = latitude
self.longitude = longitude
self.latest = kwargs.get('latest', False)
self.estimated_actuals = None
self.params = {'latitude' : self.latitude,
'longitude' : self.longitude,
'capacity' : self.capacity,
'tilt' : self.tilt,
'azimuth' : self.azimuth,
'install_date' : self.install_date,
'loss_factor': self.loss_factor
}
if self.latest:
self.end_point = self.end_point + '/latest'
self._get(*args, **kwargs)
if self.ok:
self._generate_est_acts_dict()
def _generate_est_acts_dict(self):
self.estimated_actuals = []
for est_act in self.content.get('estimated_actuals'):
# Convert period_end and period. All other fields should already be
# the correct type
est_act['period_end'] = parse_datetime(est_act['period_end'])
est_act['period'] = parse_duration(est_act['period'])
self.estimated_actuals.append(est_act)
## Instruction:
Remove parameters that aren't required for an estimate actuals request
## Code After:
from datetime import datetime, timedelta
from urllib.parse import urljoin
from isodate import parse_datetime, parse_duration
import requests
from solcast.base import Base
class RadiationEstimatedActuals(Base):
end_point = 'radiation/estimated_actuals'
def __init__(self, latitude, longitude, *args, **kwargs):
self.latitude = latitude
self.longitude = longitude
self.latest = kwargs.get('latest', False)
self.estimated_actuals = None
self.params = {'latitude' : self.latitude, 'longitude' : self.longitude}
if self.latest:
self.end_point = self.end_point + '/latest'
self._get(*args, **kwargs)
if self.ok:
self._generate_est_acts_dict()
def _generate_est_acts_dict(self):
self.estimated_actuals = []
for est_act in self.content.get('estimated_actuals'):
# Convert period_end and period. All other fields should already be
# the correct type
est_act['period_end'] = parse_datetime(est_act['period_end'])
est_act['period'] = parse_duration(est_act['period'])
self.estimated_actuals.append(est_act)
|
from datetime import datetime, timedelta
from urllib.parse import urljoin
from isodate import parse_datetime, parse_duration
import requests
from solcast.base import Base
class RadiationEstimatedActuals(Base):
end_point = 'radiation/estimated_actuals'
def __init__(self, latitude, longitude, *args, **kwargs):
self.latitude = latitude
self.longitude = longitude
self.latest = kwargs.get('latest', False)
self.estimated_actuals = None
- self.params = {'latitude' : self.latitude,
+ self.params = {'latitude' : self.latitude, 'longitude' : self.longitude}
? ++++++++++++++++++++++++++++++
- 'longitude' : self.longitude,
- 'capacity' : self.capacity,
- 'tilt' : self.tilt,
- 'azimuth' : self.azimuth,
- 'install_date' : self.install_date,
- 'loss_factor': self.loss_factor
- }
if self.latest:
self.end_point = self.end_point + '/latest'
self._get(*args, **kwargs)
if self.ok:
self._generate_est_acts_dict()
def _generate_est_acts_dict(self):
self.estimated_actuals = []
for est_act in self.content.get('estimated_actuals'):
# Convert period_end and period. All other fields should already be
# the correct type
est_act['period_end'] = parse_datetime(est_act['period_end'])
est_act['period'] = parse_duration(est_act['period'])
self.estimated_actuals.append(est_act)
|
8dc6c7567f9bc94dc1b4a96b80d059f1231039bc
|
st2auth_flat_file_backend/__init__.py
|
st2auth_flat_file_backend/__init__.py
|
from flat_file import FlatFileAuthenticationBackend
__all__ = [
'FlatFileAuthenticationBackend'
]
__version__ = '0.1.0'
|
from __future__ import absolute_import
from .flat_file import FlatFileAuthenticationBackend
__all__ = [
'FlatFileAuthenticationBackend'
]
__version__ = '0.1.0'
|
Fix code so it also works under Python 3.
|
Fix code so it also works under Python 3.
|
Python
|
apache-2.0
|
StackStorm/st2-auth-backend-flat-file
|
+ from __future__ import absolute_import
+
- from flat_file import FlatFileAuthenticationBackend
+ from .flat_file import FlatFileAuthenticationBackend
__all__ = [
'FlatFileAuthenticationBackend'
]
__version__ = '0.1.0'
|
Fix code so it also works under Python 3.
|
## Code Before:
from flat_file import FlatFileAuthenticationBackend
__all__ = [
'FlatFileAuthenticationBackend'
]
__version__ = '0.1.0'
## Instruction:
Fix code so it also works under Python 3.
## Code After:
from __future__ import absolute_import
from .flat_file import FlatFileAuthenticationBackend
__all__ = [
'FlatFileAuthenticationBackend'
]
__version__ = '0.1.0'
|
+ from __future__ import absolute_import
+
- from flat_file import FlatFileAuthenticationBackend
+ from .flat_file import FlatFileAuthenticationBackend
? +
__all__ = [
'FlatFileAuthenticationBackend'
]
__version__ = '0.1.0'
|
97940ed6ddd7d50feb47a932be096be5b223b1f0
|
assassins/assassins/views.py
|
assassins/assassins/views.py
|
from django.shortcuts import render, redirect
from django.contrib.auth import views as auth_views
# Create your views here.
def index(request):
pass
def login(request, **kwargs):
if request.user.is_authenticated():
return redirect('index')
else:
return auth_views.login(request)
|
from django.shortcuts import render, redirect
from django.contrib.auth import views as auth_views
from django.views.decorators.http import require_POST
# Create your views here.
def index(request):
pass
@require_POST
def login(request, **kwargs):
if request.user.is_authenticated():
return redirect('index')
else:
return auth_views.login(request)
|
Modify login view to be a post endpoint
|
Modify login view to be a post endpoint
|
Python
|
mit
|
Squa256/assassins,bobandbetty/assassins,bobandbetty/assassins,bobandbetty/assassins,Squa256/assassins,Squa256/assassins
|
from django.shortcuts import render, redirect
from django.contrib.auth import views as auth_views
+ from django.views.decorators.http import require_POST
# Create your views here.
def index(request):
pass
+ @require_POST
def login(request, **kwargs):
if request.user.is_authenticated():
return redirect('index')
else:
return auth_views.login(request)
|
Modify login view to be a post endpoint
|
## Code Before:
from django.shortcuts import render, redirect
from django.contrib.auth import views as auth_views
# Create your views here.
def index(request):
pass
def login(request, **kwargs):
if request.user.is_authenticated():
return redirect('index')
else:
return auth_views.login(request)
## Instruction:
Modify login view to be a post endpoint
## Code After:
from django.shortcuts import render, redirect
from django.contrib.auth import views as auth_views
from django.views.decorators.http import require_POST
# Create your views here.
def index(request):
pass
@require_POST
def login(request, **kwargs):
if request.user.is_authenticated():
return redirect('index')
else:
return auth_views.login(request)
|
from django.shortcuts import render, redirect
from django.contrib.auth import views as auth_views
+ from django.views.decorators.http import require_POST
# Create your views here.
def index(request):
pass
+ @require_POST
def login(request, **kwargs):
if request.user.is_authenticated():
return redirect('index')
else:
return auth_views.login(request)
|
ac2b01e9177d04a6446b770639745010770cb317
|
nuage_neutron/plugins/nuage_ml2/nuage_subnet_ext_driver.py
|
nuage_neutron/plugins/nuage_ml2/nuage_subnet_ext_driver.py
|
from oslo_log import log as logging
from neutron.plugins.ml2 import driver_api as api
LOG = logging.getLogger(__name__)
class NuageSubnetExtensionDriver(api.ExtensionDriver):
_supported_extension_alias = 'nuage-subnet'
def initialize(self):
pass
@property
def extension_alias(self):
return self._supported_extension_alias
def process_create_subnet(self, plugin_context, data, result):
result['net_partition'] = data['net_partition']
result['nuagenet'] = data['nuagenet']
def extend_subnet_dict(self, session, db_data, result):
return result
|
from oslo_log import log as logging
from neutron.plugins.ml2 import driver_api as api
from nuage_neutron.plugins.common import nuagedb
LOG = logging.getLogger(__name__)
class NuageSubnetExtensionDriver(api.ExtensionDriver):
_supported_extension_alias = 'nuage-subnet'
def initialize(self):
pass
@property
def extension_alias(self):
return self._supported_extension_alias
def process_create_subnet(self, plugin_context, data, result):
result['net_partition'] = data['net_partition']
result['nuagenet'] = data['nuagenet']
def extend_subnet_dict(self, session, db_data, result):
subnet_mapping = nuagedb.get_subnet_l2dom_by_id(session, result['id'])
if subnet_mapping:
result['vsd_managed'] = subnet_mapping['nuage_managed_subnet']
else:
result['vsd_managed'] = False
return result
|
Add 'vsd_managed' to the GET subnet response for ML2
|
Add 'vsd_managed' to the GET subnet response for ML2
This commit looks up the related nuage_subnet_l2dom_mapping in the database
and uses the nuage_managed_subnet field to fill in 'vsd_managed'. False by
default.
Change-Id: I68957fe3754dc9f1ccf2b6a2b09a762fccd17a89
Closes-Bug: OPENSTACK-1504
|
Python
|
apache-2.0
|
nuagenetworks/nuage-openstack-neutron,naveensan1/nuage-openstack-neutron,naveensan1/nuage-openstack-neutron,nuagenetworks/nuage-openstack-neutron
|
from oslo_log import log as logging
from neutron.plugins.ml2 import driver_api as api
+ from nuage_neutron.plugins.common import nuagedb
LOG = logging.getLogger(__name__)
class NuageSubnetExtensionDriver(api.ExtensionDriver):
_supported_extension_alias = 'nuage-subnet'
def initialize(self):
pass
@property
def extension_alias(self):
return self._supported_extension_alias
def process_create_subnet(self, plugin_context, data, result):
result['net_partition'] = data['net_partition']
result['nuagenet'] = data['nuagenet']
def extend_subnet_dict(self, session, db_data, result):
+ subnet_mapping = nuagedb.get_subnet_l2dom_by_id(session, result['id'])
+ if subnet_mapping:
+ result['vsd_managed'] = subnet_mapping['nuage_managed_subnet']
+ else:
+ result['vsd_managed'] = False
return result
|
Add 'vsd_managed' to the GET subnet response for ML2
|
## Code Before:
from oslo_log import log as logging
from neutron.plugins.ml2 import driver_api as api
LOG = logging.getLogger(__name__)
class NuageSubnetExtensionDriver(api.ExtensionDriver):
_supported_extension_alias = 'nuage-subnet'
def initialize(self):
pass
@property
def extension_alias(self):
return self._supported_extension_alias
def process_create_subnet(self, plugin_context, data, result):
result['net_partition'] = data['net_partition']
result['nuagenet'] = data['nuagenet']
def extend_subnet_dict(self, session, db_data, result):
return result
## Instruction:
Add 'vsd_managed' to the GET subnet response for ML2
## Code After:
from oslo_log import log as logging
from neutron.plugins.ml2 import driver_api as api
from nuage_neutron.plugins.common import nuagedb
LOG = logging.getLogger(__name__)
class NuageSubnetExtensionDriver(api.ExtensionDriver):
_supported_extension_alias = 'nuage-subnet'
def initialize(self):
pass
@property
def extension_alias(self):
return self._supported_extension_alias
def process_create_subnet(self, plugin_context, data, result):
result['net_partition'] = data['net_partition']
result['nuagenet'] = data['nuagenet']
def extend_subnet_dict(self, session, db_data, result):
subnet_mapping = nuagedb.get_subnet_l2dom_by_id(session, result['id'])
if subnet_mapping:
result['vsd_managed'] = subnet_mapping['nuage_managed_subnet']
else:
result['vsd_managed'] = False
return result
|
from oslo_log import log as logging
from neutron.plugins.ml2 import driver_api as api
+ from nuage_neutron.plugins.common import nuagedb
LOG = logging.getLogger(__name__)
class NuageSubnetExtensionDriver(api.ExtensionDriver):
_supported_extension_alias = 'nuage-subnet'
def initialize(self):
pass
@property
def extension_alias(self):
return self._supported_extension_alias
def process_create_subnet(self, plugin_context, data, result):
result['net_partition'] = data['net_partition']
result['nuagenet'] = data['nuagenet']
def extend_subnet_dict(self, session, db_data, result):
+ subnet_mapping = nuagedb.get_subnet_l2dom_by_id(session, result['id'])
+ if subnet_mapping:
+ result['vsd_managed'] = subnet_mapping['nuage_managed_subnet']
+ else:
+ result['vsd_managed'] = False
return result
|
c72b712cf84e63dd2d72fdc6d64c50a65b8a88a0
|
courant/core/search/urls.py
|
courant/core/search/urls.py
|
from django.conf.urls.defaults import *
from courant.core.search.views import *
from haystack.forms import ModelSearchForm
from haystack.query import SearchQuerySet
from haystack.views import SearchView
urlpatterns = patterns('',
url(r'', CourantSearchView(template='search/results_page.html',
form_class=ModelSearchForm,
searchqueryset=SearchQuerySet().all()), name="search"),
)
|
from django.conf.urls.defaults import *
from courant.core.search.views import *
from haystack.forms import ModelSearchForm
from haystack.query import SearchQuerySet
from haystack.views import SearchView
urlpatterns = patterns('',
url(r'', SearchView(template='search/results_page.html',
load_all=True,
form_class=ModelSearchForm,
searchqueryset=SearchQuerySet().all()), name="search"),
)
|
Remove all Haystack customization of search view pending further investigations.
|
Remove all Haystack customization of search view pending further investigations.
|
Python
|
bsd-3-clause
|
maxcutler/Courant-News,maxcutler/Courant-News
|
from django.conf.urls.defaults import *
from courant.core.search.views import *
from haystack.forms import ModelSearchForm
from haystack.query import SearchQuerySet
from haystack.views import SearchView
urlpatterns = patterns('',
- url(r'', CourantSearchView(template='search/results_page.html',
+ url(r'', SearchView(template='search/results_page.html',
+ load_all=True,
- form_class=ModelSearchForm,
+ form_class=ModelSearchForm,
- searchqueryset=SearchQuerySet().all()), name="search"),
+ searchqueryset=SearchQuerySet().all()), name="search"),
)
|
Remove all Haystack customization of search view pending further investigations.
|
## Code Before:
from django.conf.urls.defaults import *
from courant.core.search.views import *
from haystack.forms import ModelSearchForm
from haystack.query import SearchQuerySet
from haystack.views import SearchView
urlpatterns = patterns('',
url(r'', CourantSearchView(template='search/results_page.html',
form_class=ModelSearchForm,
searchqueryset=SearchQuerySet().all()), name="search"),
)
## Instruction:
Remove all Haystack customization of search view pending further investigations.
## Code After:
from django.conf.urls.defaults import *
from courant.core.search.views import *
from haystack.forms import ModelSearchForm
from haystack.query import SearchQuerySet
from haystack.views import SearchView
urlpatterns = patterns('',
url(r'', SearchView(template='search/results_page.html',
load_all=True,
form_class=ModelSearchForm,
searchqueryset=SearchQuerySet().all()), name="search"),
)
|
from django.conf.urls.defaults import *
from courant.core.search.views import *
from haystack.forms import ModelSearchForm
from haystack.query import SearchQuerySet
from haystack.views import SearchView
urlpatterns = patterns('',
- url(r'', CourantSearchView(template='search/results_page.html',
? -------
+ url(r'', SearchView(template='search/results_page.html',
+ load_all=True,
- form_class=ModelSearchForm,
? -------
+ form_class=ModelSearchForm,
- searchqueryset=SearchQuerySet().all()), name="search"),
? -------
+ searchqueryset=SearchQuerySet().all()), name="search"),
)
|
e507461dba5020726c9505fef187098ad234a68a
|
kazoo/tests/__init__.py
|
kazoo/tests/__init__.py
|
import os
import unittest
import time
import uuid
from kazoo.client import KazooClient, KazooState
# if this env variable is set, ZK client integration tests are run
# against the specified host list
ENV_TEST_HOSTS = "KAZOO_TEST_HOSTS"
def get_hosts_or_skip():
if ENV_TEST_HOSTS in os.environ:
return os.environ[ENV_TEST_HOSTS]
raise unittest.SkipTest("Skipping ZooKeeper test. To run, set " +
"%s env to a host list. (ex: localhost:2181)" %
ENV_TEST_HOSTS)
def get_client_or_skip(**kwargs):
hosts = get_hosts_or_skip()
return KazooClient(hosts, **kwargs)
def until_timeout(timeout, value=None):
"""Returns an iterator that repeats until a timeout is reached
timeout is in seconds
"""
start = time.time()
while True:
if time.time() - start >= timeout:
raise Exception("timed out before success!")
yield value
class KazooTestCase(unittest.TestCase):
def _get_client(self):
return KazooClient(self.hosts)
def setUp(self):
namespace = "/kazootests" + uuid.uuid4().hex
self.hosts = get_hosts_or_skip() + namespace
self.client = self._get_client()
def tearDown(self):
if self.client.state == KazooState.LOST:
self.client.connect()
self.client.stop()
|
import os
import unittest
import time
import uuid
from nose import SkipTest
from kazoo.client import KazooClient, KazooState
# if this env variable is set, ZK client integration tests are run
# against the specified host list
ENV_TEST_HOSTS = "KAZOO_TEST_HOSTS"
def get_hosts_or_skip():
if ENV_TEST_HOSTS in os.environ:
return os.environ[ENV_TEST_HOSTS]
raise SkipTest("Skipping ZooKeeper test. To run, set " +
"%s env to a host list. (ex: localhost:2181)" %
ENV_TEST_HOSTS)
def get_client_or_skip(**kwargs):
hosts = get_hosts_or_skip()
return KazooClient(hosts, **kwargs)
def until_timeout(timeout, value=None):
"""Returns an iterator that repeats until a timeout is reached
timeout is in seconds
"""
start = time.time()
while True:
if time.time() - start >= timeout:
raise Exception("timed out before success!")
yield value
class KazooTestCase(unittest.TestCase):
def _get_client(self):
return KazooClient(self.hosts)
def setUp(self):
namespace = "/kazootests" + uuid.uuid4().hex
self.hosts = get_hosts_or_skip() + namespace
self.client = self._get_client()
def tearDown(self):
if self.client.state == KazooState.LOST:
self.client.connect()
self.client.stop()
|
Use SkipTest that works on Py2.6
|
Use SkipTest that works on Py2.6
|
Python
|
apache-2.0
|
kormat/kazoo,rackerlabs/kazoo,tempbottle/kazoo,max0d41/kazoo,rgs1/kazoo,rockerbox/kazoo,harlowja/kazoo,kormat/kazoo,rgs1/kazoo,harlowja/kazoo,pombredanne/kazoo,python-zk/kazoo,python-zk/kazoo,pombredanne/kazoo,rockerbox/kazoo,tempbottle/kazoo,AlexanderplUs/kazoo,jacksontj/kazoo,max0d41/kazoo,Asana/kazoo,jacksontj/kazoo,rackerlabs/kazoo,bsanders/kazoo,bsanders/kazoo,AlexanderplUs/kazoo
|
import os
import unittest
import time
import uuid
+
+ from nose import SkipTest
from kazoo.client import KazooClient, KazooState
# if this env variable is set, ZK client integration tests are run
# against the specified host list
ENV_TEST_HOSTS = "KAZOO_TEST_HOSTS"
def get_hosts_or_skip():
if ENV_TEST_HOSTS in os.environ:
return os.environ[ENV_TEST_HOSTS]
- raise unittest.SkipTest("Skipping ZooKeeper test. To run, set " +
+ raise SkipTest("Skipping ZooKeeper test. To run, set " +
- "%s env to a host list. (ex: localhost:2181)" %
+ "%s env to a host list. (ex: localhost:2181)" %
- ENV_TEST_HOSTS)
+ ENV_TEST_HOSTS)
def get_client_or_skip(**kwargs):
hosts = get_hosts_or_skip()
return KazooClient(hosts, **kwargs)
def until_timeout(timeout, value=None):
"""Returns an iterator that repeats until a timeout is reached
timeout is in seconds
"""
start = time.time()
while True:
if time.time() - start >= timeout:
raise Exception("timed out before success!")
yield value
class KazooTestCase(unittest.TestCase):
def _get_client(self):
return KazooClient(self.hosts)
def setUp(self):
namespace = "/kazootests" + uuid.uuid4().hex
self.hosts = get_hosts_or_skip() + namespace
self.client = self._get_client()
def tearDown(self):
if self.client.state == KazooState.LOST:
self.client.connect()
self.client.stop()
|
Use SkipTest that works on Py2.6
|
## Code Before:
import os
import unittest
import time
import uuid
from kazoo.client import KazooClient, KazooState
# if this env variable is set, ZK client integration tests are run
# against the specified host list
ENV_TEST_HOSTS = "KAZOO_TEST_HOSTS"
def get_hosts_or_skip():
if ENV_TEST_HOSTS in os.environ:
return os.environ[ENV_TEST_HOSTS]
raise unittest.SkipTest("Skipping ZooKeeper test. To run, set " +
"%s env to a host list. (ex: localhost:2181)" %
ENV_TEST_HOSTS)
def get_client_or_skip(**kwargs):
hosts = get_hosts_or_skip()
return KazooClient(hosts, **kwargs)
def until_timeout(timeout, value=None):
"""Returns an iterator that repeats until a timeout is reached
timeout is in seconds
"""
start = time.time()
while True:
if time.time() - start >= timeout:
raise Exception("timed out before success!")
yield value
class KazooTestCase(unittest.TestCase):
def _get_client(self):
return KazooClient(self.hosts)
def setUp(self):
namespace = "/kazootests" + uuid.uuid4().hex
self.hosts = get_hosts_or_skip() + namespace
self.client = self._get_client()
def tearDown(self):
if self.client.state == KazooState.LOST:
self.client.connect()
self.client.stop()
## Instruction:
Use SkipTest that works on Py2.6
## Code After:
import os
import unittest
import time
import uuid
from nose import SkipTest
from kazoo.client import KazooClient, KazooState
# if this env variable is set, ZK client integration tests are run
# against the specified host list
ENV_TEST_HOSTS = "KAZOO_TEST_HOSTS"
def get_hosts_or_skip():
if ENV_TEST_HOSTS in os.environ:
return os.environ[ENV_TEST_HOSTS]
raise SkipTest("Skipping ZooKeeper test. To run, set " +
"%s env to a host list. (ex: localhost:2181)" %
ENV_TEST_HOSTS)
def get_client_or_skip(**kwargs):
hosts = get_hosts_or_skip()
return KazooClient(hosts, **kwargs)
def until_timeout(timeout, value=None):
"""Returns an iterator that repeats until a timeout is reached
timeout is in seconds
"""
start = time.time()
while True:
if time.time() - start >= timeout:
raise Exception("timed out before success!")
yield value
class KazooTestCase(unittest.TestCase):
def _get_client(self):
return KazooClient(self.hosts)
def setUp(self):
namespace = "/kazootests" + uuid.uuid4().hex
self.hosts = get_hosts_or_skip() + namespace
self.client = self._get_client()
def tearDown(self):
if self.client.state == KazooState.LOST:
self.client.connect()
self.client.stop()
|
import os
import unittest
import time
import uuid
+
+ from nose import SkipTest
from kazoo.client import KazooClient, KazooState
# if this env variable is set, ZK client integration tests are run
# against the specified host list
ENV_TEST_HOSTS = "KAZOO_TEST_HOSTS"
def get_hosts_or_skip():
if ENV_TEST_HOSTS in os.environ:
return os.environ[ENV_TEST_HOSTS]
- raise unittest.SkipTest("Skipping ZooKeeper test. To run, set " +
? ---------
+ raise SkipTest("Skipping ZooKeeper test. To run, set " +
- "%s env to a host list. (ex: localhost:2181)" %
? ---------
+ "%s env to a host list. (ex: localhost:2181)" %
- ENV_TEST_HOSTS)
? --------
+ ENV_TEST_HOSTS)
def get_client_or_skip(**kwargs):
hosts = get_hosts_or_skip()
return KazooClient(hosts, **kwargs)
def until_timeout(timeout, value=None):
"""Returns an iterator that repeats until a timeout is reached
timeout is in seconds
"""
start = time.time()
while True:
if time.time() - start >= timeout:
raise Exception("timed out before success!")
yield value
class KazooTestCase(unittest.TestCase):
def _get_client(self):
return KazooClient(self.hosts)
def setUp(self):
namespace = "/kazootests" + uuid.uuid4().hex
self.hosts = get_hosts_or_skip() + namespace
self.client = self._get_client()
def tearDown(self):
if self.client.state == KazooState.LOST:
self.client.connect()
self.client.stop()
|
6b0167514bb41f877945b408638fab72873f2da8
|
postgres_copy/__init__.py
|
postgres_copy/__init__.py
|
from django.db import models
from django.db import connection
from .copy_from import CopyMapping
from .copy_to import SQLCopyToCompiler, CopyToQuery
__version__ = '2.0.0'
class CopyQuerySet(models.QuerySet):
"""
Subclass of QuerySet that adds from_csv and to_csv methods.
"""
def from_csv(self, csv_path, mapping, **kwargs):
"""
Copy CSV file from the provided path to the current model using the provided mapping.
"""
mapping = CopyMapping(self.model, csv_path, mapping, **kwargs)
mapping.save(silent=True)
def to_csv(self, csv_path, *fields):
"""
Copy current QuerySet to CSV at provided path.
"""
query = self.query.clone(CopyToQuery)
query.copy_to_fields = fields
compiler = query.get_compiler(self.db, connection=connection)
compiler.execute_sql(csv_path)
CopyManager = models.Manager.from_queryset(CopyQuerySet)
__all__ = (
'CopyMapping',
'SQLCopyToCompiler',
'CopyToQuery',
'CopyManager',
)
|
from django.db import models
from django.db import connection
from .copy_from import CopyMapping
from .copy_to import SQLCopyToCompiler, CopyToQuery
__version__ = '2.0.0'
class CopyQuerySet(models.QuerySet):
"""
Subclass of QuerySet that adds from_csv and to_csv methods.
"""
def from_csv(self, csv_path, mapping, **kwargs):
"""
Copy CSV file from the provided path to the current model using the provided mapping.
"""
mapping = CopyMapping(self.model, csv_path, mapping, **kwargs)
mapping.save(silent=True)
def to_csv(self, csv_path, *fields):
"""
Copy current QuerySet to CSV at provided path.
"""
query = self.query.clone(CopyToQuery)
query.copy_to_fields = fields
compiler = query.get_compiler(self.db, connection=connection)
compiler.execute_sql(csv_path)
CopyManager = models.Manager.from_queryset(CopyQuerySet)
__all__ = (
'CopyManager',
'CopyMapping',
'CopyToQuery',
'CopyToQuerySet',
'SQLCopyToCompiler',
)
|
Add CopyToQuerySet to available imports
|
Add CopyToQuerySet to available imports
|
Python
|
mit
|
california-civic-data-coalition/django-postgres-copy
|
from django.db import models
from django.db import connection
from .copy_from import CopyMapping
from .copy_to import SQLCopyToCompiler, CopyToQuery
__version__ = '2.0.0'
class CopyQuerySet(models.QuerySet):
"""
Subclass of QuerySet that adds from_csv and to_csv methods.
"""
def from_csv(self, csv_path, mapping, **kwargs):
"""
Copy CSV file from the provided path to the current model using the provided mapping.
"""
mapping = CopyMapping(self.model, csv_path, mapping, **kwargs)
mapping.save(silent=True)
def to_csv(self, csv_path, *fields):
"""
Copy current QuerySet to CSV at provided path.
"""
query = self.query.clone(CopyToQuery)
query.copy_to_fields = fields
compiler = query.get_compiler(self.db, connection=connection)
compiler.execute_sql(csv_path)
CopyManager = models.Manager.from_queryset(CopyQuerySet)
__all__ = (
+ 'CopyManager',
'CopyMapping',
+ 'CopyToQuery',
+ 'CopyToQuerySet',
'SQLCopyToCompiler',
- 'CopyToQuery',
- 'CopyManager',
)
|
Add CopyToQuerySet to available imports
|
## Code Before:
from django.db import models
from django.db import connection
from .copy_from import CopyMapping
from .copy_to import SQLCopyToCompiler, CopyToQuery
__version__ = '2.0.0'
class CopyQuerySet(models.QuerySet):
"""
Subclass of QuerySet that adds from_csv and to_csv methods.
"""
def from_csv(self, csv_path, mapping, **kwargs):
"""
Copy CSV file from the provided path to the current model using the provided mapping.
"""
mapping = CopyMapping(self.model, csv_path, mapping, **kwargs)
mapping.save(silent=True)
def to_csv(self, csv_path, *fields):
"""
Copy current QuerySet to CSV at provided path.
"""
query = self.query.clone(CopyToQuery)
query.copy_to_fields = fields
compiler = query.get_compiler(self.db, connection=connection)
compiler.execute_sql(csv_path)
CopyManager = models.Manager.from_queryset(CopyQuerySet)
__all__ = (
'CopyMapping',
'SQLCopyToCompiler',
'CopyToQuery',
'CopyManager',
)
## Instruction:
Add CopyToQuerySet to available imports
## Code After:
from django.db import models
from django.db import connection
from .copy_from import CopyMapping
from .copy_to import SQLCopyToCompiler, CopyToQuery
__version__ = '2.0.0'
class CopyQuerySet(models.QuerySet):
"""
Subclass of QuerySet that adds from_csv and to_csv methods.
"""
def from_csv(self, csv_path, mapping, **kwargs):
"""
Copy CSV file from the provided path to the current model using the provided mapping.
"""
mapping = CopyMapping(self.model, csv_path, mapping, **kwargs)
mapping.save(silent=True)
def to_csv(self, csv_path, *fields):
"""
Copy current QuerySet to CSV at provided path.
"""
query = self.query.clone(CopyToQuery)
query.copy_to_fields = fields
compiler = query.get_compiler(self.db, connection=connection)
compiler.execute_sql(csv_path)
CopyManager = models.Manager.from_queryset(CopyQuerySet)
__all__ = (
'CopyManager',
'CopyMapping',
'CopyToQuery',
'CopyToQuerySet',
'SQLCopyToCompiler',
)
|
from django.db import models
from django.db import connection
from .copy_from import CopyMapping
from .copy_to import SQLCopyToCompiler, CopyToQuery
__version__ = '2.0.0'
class CopyQuerySet(models.QuerySet):
"""
Subclass of QuerySet that adds from_csv and to_csv methods.
"""
def from_csv(self, csv_path, mapping, **kwargs):
"""
Copy CSV file from the provided path to the current model using the provided mapping.
"""
mapping = CopyMapping(self.model, csv_path, mapping, **kwargs)
mapping.save(silent=True)
def to_csv(self, csv_path, *fields):
"""
Copy current QuerySet to CSV at provided path.
"""
query = self.query.clone(CopyToQuery)
query.copy_to_fields = fields
compiler = query.get_compiler(self.db, connection=connection)
compiler.execute_sql(csv_path)
CopyManager = models.Manager.from_queryset(CopyQuerySet)
__all__ = (
+ 'CopyManager',
'CopyMapping',
+ 'CopyToQuery',
+ 'CopyToQuerySet',
'SQLCopyToCompiler',
- 'CopyToQuery',
- 'CopyManager',
)
|
437623aee55fd68683126bd6852df52379837eaa
|
bash_command.py
|
bash_command.py
|
import sublime, sublime_plugin
import os
from .common.utils import run_bash_for_output
from .common.utils import git_path_for_window
last_command = ""
class RunBash(sublime_plugin.WindowCommand):
def run(self):
global last_command
window = self.window
view = window.active_view()
if view.file_name() is not None:
path = os.path.join(os.path.dirname(view.file_name()), '')
window.show_input_panel(
'Bash:',
last_command,
lambda command: (
self.run_bash(path, command)
),
None,
None
)
def run_bash(self, path, command):
global last_command
last_command = command
if command.startswith('$'):
command = command[1:]
path = git_path_for_window(self.window)
final_command = "cd '{0}'; {1}".format(path, command)
output, _ = run_bash_for_output(final_command)
print(final_command, " ", output)
results_view = self.window.new_file()
results_view.set_scratch(True)
results_view.set_name("BashOutput")
# deps: this is from utilities.py
results_view.run_command('replace_content', {"new_content": output})
results_view.sel().clear()
results_view.sel().add(sublime.Region(0, 0))
self.window.focus_view(results_view)
|
import sublime, sublime_plugin
import os
from .common.utils import run_bash_for_output
from .common.utils import git_path_for_window
last_command = ""
class RunBash(sublime_plugin.WindowCommand):
def run(self):
global last_command
window = self.window
view = window.active_view()
if view.file_name() is not None:
path = os.path.join(os.path.dirname(view.file_name()), '')
window.show_input_panel(
'Bash:',
last_command,
lambda command: (
self.run_bash(path, command)
),
None,
None
)
def run_bash(self, path, command):
global last_command
last_command = command
if command.startswith('$'):
command = command[1:]
path = git_path_for_window(self.window)
final_command = "cd '{0}'; {1}".format(path, command)
output, err = run_bash_for_output(final_command)
new_content = output + '\n' + (100 * '=') + '\n' + err
results_view = self.window.new_file()
results_view.set_scratch(True)
results_view.set_name("BashOutput")
# deps: this is from utilities.py
results_view.run_command('replace_content', {"new_content": new_content})
results_view.sel().clear()
results_view.sel().add(sublime.Region(0, 0))
self.window.focus_view(results_view)
|
Print both output + error for bash command
|
Print both output + error for bash command
|
Python
|
mit
|
ktuan89/sublimeplugins
|
import sublime, sublime_plugin
import os
from .common.utils import run_bash_for_output
from .common.utils import git_path_for_window
last_command = ""
class RunBash(sublime_plugin.WindowCommand):
def run(self):
global last_command
window = self.window
view = window.active_view()
if view.file_name() is not None:
path = os.path.join(os.path.dirname(view.file_name()), '')
window.show_input_panel(
'Bash:',
last_command,
lambda command: (
self.run_bash(path, command)
),
None,
None
)
def run_bash(self, path, command):
global last_command
last_command = command
if command.startswith('$'):
command = command[1:]
path = git_path_for_window(self.window)
final_command = "cd '{0}'; {1}".format(path, command)
- output, _ = run_bash_for_output(final_command)
+ output, err = run_bash_for_output(final_command)
- print(final_command, " ", output)
+ new_content = output + '\n' + (100 * '=') + '\n' + err
results_view = self.window.new_file()
results_view.set_scratch(True)
results_view.set_name("BashOutput")
# deps: this is from utilities.py
- results_view.run_command('replace_content', {"new_content": output})
+ results_view.run_command('replace_content', {"new_content": new_content})
results_view.sel().clear()
results_view.sel().add(sublime.Region(0, 0))
self.window.focus_view(results_view)
|
Print both output + error for bash command
|
## Code Before:
import sublime, sublime_plugin
import os
from .common.utils import run_bash_for_output
from .common.utils import git_path_for_window
last_command = ""
class RunBash(sublime_plugin.WindowCommand):
def run(self):
global last_command
window = self.window
view = window.active_view()
if view.file_name() is not None:
path = os.path.join(os.path.dirname(view.file_name()), '')
window.show_input_panel(
'Bash:',
last_command,
lambda command: (
self.run_bash(path, command)
),
None,
None
)
def run_bash(self, path, command):
global last_command
last_command = command
if command.startswith('$'):
command = command[1:]
path = git_path_for_window(self.window)
final_command = "cd '{0}'; {1}".format(path, command)
output, _ = run_bash_for_output(final_command)
print(final_command, " ", output)
results_view = self.window.new_file()
results_view.set_scratch(True)
results_view.set_name("BashOutput")
# deps: this is from utilities.py
results_view.run_command('replace_content', {"new_content": output})
results_view.sel().clear()
results_view.sel().add(sublime.Region(0, 0))
self.window.focus_view(results_view)
## Instruction:
Print both output + error for bash command
## Code After:
import sublime, sublime_plugin
import os
from .common.utils import run_bash_for_output
from .common.utils import git_path_for_window
last_command = ""
class RunBash(sublime_plugin.WindowCommand):
def run(self):
global last_command
window = self.window
view = window.active_view()
if view.file_name() is not None:
path = os.path.join(os.path.dirname(view.file_name()), '')
window.show_input_panel(
'Bash:',
last_command,
lambda command: (
self.run_bash(path, command)
),
None,
None
)
def run_bash(self, path, command):
global last_command
last_command = command
if command.startswith('$'):
command = command[1:]
path = git_path_for_window(self.window)
final_command = "cd '{0}'; {1}".format(path, command)
output, err = run_bash_for_output(final_command)
new_content = output + '\n' + (100 * '=') + '\n' + err
results_view = self.window.new_file()
results_view.set_scratch(True)
results_view.set_name("BashOutput")
# deps: this is from utilities.py
results_view.run_command('replace_content', {"new_content": new_content})
results_view.sel().clear()
results_view.sel().add(sublime.Region(0, 0))
self.window.focus_view(results_view)
|
import sublime, sublime_plugin
import os
from .common.utils import run_bash_for_output
from .common.utils import git_path_for_window
last_command = ""
class RunBash(sublime_plugin.WindowCommand):
def run(self):
global last_command
window = self.window
view = window.active_view()
if view.file_name() is not None:
path = os.path.join(os.path.dirname(view.file_name()), '')
window.show_input_panel(
'Bash:',
last_command,
lambda command: (
self.run_bash(path, command)
),
None,
None
)
def run_bash(self, path, command):
global last_command
last_command = command
if command.startswith('$'):
command = command[1:]
path = git_path_for_window(self.window)
final_command = "cd '{0}'; {1}".format(path, command)
- output, _ = run_bash_for_output(final_command)
? ^
+ output, err = run_bash_for_output(final_command)
? ^^^
- print(final_command, " ", output)
+ new_content = output + '\n' + (100 * '=') + '\n' + err
results_view = self.window.new_file()
results_view.set_scratch(True)
results_view.set_name("BashOutput")
# deps: this is from utilities.py
- results_view.run_command('replace_content', {"new_content": output})
? ^ ^^
+ results_view.run_command('replace_content', {"new_content": new_content})
? +++++ ^ ^^
results_view.sel().clear()
results_view.sel().add(sublime.Region(0, 0))
self.window.focus_view(results_view)
|
02efde47b5cf20b7385eacaa3f21454ffa636ad7
|
troposphere/codestarconnections.py
|
troposphere/codestarconnections.py
|
from . import AWSObject, Tags
VALID_CONNECTION_PROVIDERTYPE = ('Bitbucket')
def validate_connection_providertype(connection_providertype):
"""Validate ProviderType for Connection"""
if connection_providertype not in VALID_CONNECTION_PROVIDERTYPE:
raise ValueError("Connection ProviderType must be one of: %s" %
", ".join(VALID_CONNECTION_PROVIDERTYPE))
return connection_providertype
class Connection(AWSObject):
resource_type = "AWS::CodeStarConnections::Connection"
props = {
'ConnectionName': (basestring, True),
'ProviderType': (validate_connection_providertype, True),
'Tags': (Tags, False),
}
|
from . import AWSObject, Tags
VALID_CONNECTION_PROVIDERTYPE = ('Bitbucket')
def validate_connection_providertype(connection_providertype):
"""Validate ProviderType for Connection"""
if connection_providertype not in VALID_CONNECTION_PROVIDERTYPE:
raise ValueError("Connection ProviderType must be one of: %s" %
", ".join(VALID_CONNECTION_PROVIDERTYPE))
return connection_providertype
class Connection(AWSObject):
resource_type = "AWS::CodeStarConnections::Connection"
props = {
'ConnectionName': (basestring, True),
'HostArn': (basestring, False),
'ProviderType': (validate_connection_providertype, True),
'Tags': (Tags, False),
}
|
Update CodeStarConnections::Connection per 2020-07-23 update
|
Update CodeStarConnections::Connection per 2020-07-23 update
|
Python
|
bsd-2-clause
|
cloudtools/troposphere,cloudtools/troposphere
|
from . import AWSObject, Tags
VALID_CONNECTION_PROVIDERTYPE = ('Bitbucket')
def validate_connection_providertype(connection_providertype):
"""Validate ProviderType for Connection"""
if connection_providertype not in VALID_CONNECTION_PROVIDERTYPE:
raise ValueError("Connection ProviderType must be one of: %s" %
", ".join(VALID_CONNECTION_PROVIDERTYPE))
return connection_providertype
class Connection(AWSObject):
resource_type = "AWS::CodeStarConnections::Connection"
props = {
'ConnectionName': (basestring, True),
+ 'HostArn': (basestring, False),
'ProviderType': (validate_connection_providertype, True),
'Tags': (Tags, False),
}
|
Update CodeStarConnections::Connection per 2020-07-23 update
|
## Code Before:
from . import AWSObject, Tags
VALID_CONNECTION_PROVIDERTYPE = ('Bitbucket')
def validate_connection_providertype(connection_providertype):
"""Validate ProviderType for Connection"""
if connection_providertype not in VALID_CONNECTION_PROVIDERTYPE:
raise ValueError("Connection ProviderType must be one of: %s" %
", ".join(VALID_CONNECTION_PROVIDERTYPE))
return connection_providertype
class Connection(AWSObject):
resource_type = "AWS::CodeStarConnections::Connection"
props = {
'ConnectionName': (basestring, True),
'ProviderType': (validate_connection_providertype, True),
'Tags': (Tags, False),
}
## Instruction:
Update CodeStarConnections::Connection per 2020-07-23 update
## Code After:
from . import AWSObject, Tags
VALID_CONNECTION_PROVIDERTYPE = ('Bitbucket')
def validate_connection_providertype(connection_providertype):
"""Validate ProviderType for Connection"""
if connection_providertype not in VALID_CONNECTION_PROVIDERTYPE:
raise ValueError("Connection ProviderType must be one of: %s" %
", ".join(VALID_CONNECTION_PROVIDERTYPE))
return connection_providertype
class Connection(AWSObject):
resource_type = "AWS::CodeStarConnections::Connection"
props = {
'ConnectionName': (basestring, True),
'HostArn': (basestring, False),
'ProviderType': (validate_connection_providertype, True),
'Tags': (Tags, False),
}
|
from . import AWSObject, Tags
VALID_CONNECTION_PROVIDERTYPE = ('Bitbucket')
def validate_connection_providertype(connection_providertype):
"""Validate ProviderType for Connection"""
if connection_providertype not in VALID_CONNECTION_PROVIDERTYPE:
raise ValueError("Connection ProviderType must be one of: %s" %
", ".join(VALID_CONNECTION_PROVIDERTYPE))
return connection_providertype
class Connection(AWSObject):
resource_type = "AWS::CodeStarConnections::Connection"
props = {
'ConnectionName': (basestring, True),
+ 'HostArn': (basestring, False),
'ProviderType': (validate_connection_providertype, True),
'Tags': (Tags, False),
}
|
9efd7f63f12affffb58b7e243777432a331e91f2
|
examples/add_command_line_argument.py
|
examples/add_command_line_argument.py
|
from locust import HttpUser, TaskSet, task, between
from locust import events
@events.init_command_line_parser.add_listener
def _(parser):
parser.add_argument(
'--custom-argument',
help="It's working"
)
@events.init.add_listener
def _(environment, **kw):
print("Custom argument supplied: %s" % environment.parsed_options.custom_argument)
class WebsiteUser(HttpUser):
"""
User class that does requests to the locust web server running on localhost
"""
host = "http://127.0.0.1:8089"
wait_time = between(2, 5)
@task
def my_task(self):
pass
|
from locust import HttpUser, TaskSet, task, between
from locust import events
@events.init_command_line_parser.add_listener
def _(parser):
parser.add_argument(
"--my-argument",
type=str,
env_var="LOCUST_MY_ARGUMENT",
default="",
help="It's working"
)
@events.init.add_listener
def _(environment, **kw):
print("Custom argument supplied: %s" % environment.parsed_options.my_argument)
class WebsiteUser(HttpUser):
"""
User class that does requests to the locust web server running on localhost
"""
host = "http://127.0.0.1:8089"
wait_time = between(2, 5)
@task
def my_task(self):
pass
|
Add type, env var and default value to custom argument example. And rename it so that nobody thinks the "custom" in the name has a specific meaning.
|
Add type, env var and default value to custom argument example. And rename it so that nobody thinks the "custom" in the name has a specific meaning.
|
Python
|
mit
|
mbeacom/locust,mbeacom/locust,locustio/locust,locustio/locust,mbeacom/locust,locustio/locust,locustio/locust,mbeacom/locust
|
from locust import HttpUser, TaskSet, task, between
from locust import events
@events.init_command_line_parser.add_listener
def _(parser):
parser.add_argument(
- '--custom-argument',
+ "--my-argument",
+ type=str,
+ env_var="LOCUST_MY_ARGUMENT",
+ default="",
help="It's working"
)
+
@events.init.add_listener
def _(environment, **kw):
- print("Custom argument supplied: %s" % environment.parsed_options.custom_argument)
+ print("Custom argument supplied: %s" % environment.parsed_options.my_argument)
class WebsiteUser(HttpUser):
"""
User class that does requests to the locust web server running on localhost
"""
+
host = "http://127.0.0.1:8089"
wait_time = between(2, 5)
+
@task
def my_task(self):
pass
|
Add type, env var and default value to custom argument example. And rename it so that nobody thinks the "custom" in the name has a specific meaning.
|
## Code Before:
from locust import HttpUser, TaskSet, task, between
from locust import events
@events.init_command_line_parser.add_listener
def _(parser):
parser.add_argument(
'--custom-argument',
help="It's working"
)
@events.init.add_listener
def _(environment, **kw):
print("Custom argument supplied: %s" % environment.parsed_options.custom_argument)
class WebsiteUser(HttpUser):
"""
User class that does requests to the locust web server running on localhost
"""
host = "http://127.0.0.1:8089"
wait_time = between(2, 5)
@task
def my_task(self):
pass
## Instruction:
Add type, env var and default value to custom argument example. And rename it so that nobody thinks the "custom" in the name has a specific meaning.
## Code After:
from locust import HttpUser, TaskSet, task, between
from locust import events
@events.init_command_line_parser.add_listener
def _(parser):
parser.add_argument(
"--my-argument",
type=str,
env_var="LOCUST_MY_ARGUMENT",
default="",
help="It's working"
)
@events.init.add_listener
def _(environment, **kw):
print("Custom argument supplied: %s" % environment.parsed_options.my_argument)
class WebsiteUser(HttpUser):
"""
User class that does requests to the locust web server running on localhost
"""
host = "http://127.0.0.1:8089"
wait_time = between(2, 5)
@task
def my_task(self):
pass
|
from locust import HttpUser, TaskSet, task, between
from locust import events
@events.init_command_line_parser.add_listener
def _(parser):
parser.add_argument(
- '--custom-argument',
? ^ ----- ^
+ "--my-argument",
? ^ + ^
+ type=str,
+ env_var="LOCUST_MY_ARGUMENT",
+ default="",
help="It's working"
)
+
@events.init.add_listener
def _(environment, **kw):
- print("Custom argument supplied: %s" % environment.parsed_options.custom_argument)
? -----
+ print("Custom argument supplied: %s" % environment.parsed_options.my_argument)
? +
class WebsiteUser(HttpUser):
"""
User class that does requests to the locust web server running on localhost
"""
+
host = "http://127.0.0.1:8089"
wait_time = between(2, 5)
+
@task
def my_task(self):
pass
|
e3a530d741529a7bbfeb274c232e2c6b8a5faddc
|
kokki/cookbooks/postgresql9/recipes/default.py
|
kokki/cookbooks/postgresql9/recipes/default.py
|
import os
from kokki import Execute, Package
apt_list_path = '/etc/apt/sources.list.d/pitti-postgresql-lucid.list'
Execute("apt-update-postgresql9",
command = "apt-get update",
action = "nothing")
apt = None
if env.system.platform == "ubuntu":
Package("python-software-properties")
Execute("add-apt-repository ppa:pitti/postgresql",
not_if = lambda:os.path.exists(apt_list_path),
notifies = [("run", env.resources["Execute"]["apt-update-postgresql9"], True)])
|
import os
from kokki import Execute, Package
if not (env.system.platform == "ubuntu" and env.system.lsb['release'] in ["11.10"]):
apt_list_path = '/etc/apt/sources.list.d/pitti-postgresql-lucid.list'
Execute("apt-update-postgresql9",
command = "apt-get update",
action = "nothing")
apt = None
if env.system.platform == "ubuntu":
Package("python-software-properties")
Execute("add-apt-repository ppa:pitti/postgresql",
not_if = lambda:os.path.exists(apt_list_path),
notifies = [("run", env.resources["Execute"]["apt-update-postgresql9"], True)])
|
Use standard repo for postgresql9 in ubuntu 11.10
|
Use standard repo for postgresql9 in ubuntu 11.10
|
Python
|
bsd-3-clause
|
samuel/kokki
|
import os
from kokki import Execute, Package
+ if not (env.system.platform == "ubuntu" and env.system.lsb['release'] in ["11.10"]):
- apt_list_path = '/etc/apt/sources.list.d/pitti-postgresql-lucid.list'
+ apt_list_path = '/etc/apt/sources.list.d/pitti-postgresql-lucid.list'
- Execute("apt-update-postgresql9",
+ Execute("apt-update-postgresql9",
- command = "apt-get update",
+ command = "apt-get update",
- action = "nothing")
+ action = "nothing")
- apt = None
+ apt = None
- if env.system.platform == "ubuntu":
+ if env.system.platform == "ubuntu":
- Package("python-software-properties")
+ Package("python-software-properties")
- Execute("add-apt-repository ppa:pitti/postgresql",
+ Execute("add-apt-repository ppa:pitti/postgresql",
- not_if = lambda:os.path.exists(apt_list_path),
+ not_if = lambda:os.path.exists(apt_list_path),
- notifies = [("run", env.resources["Execute"]["apt-update-postgresql9"], True)])
+ notifies = [("run", env.resources["Execute"]["apt-update-postgresql9"], True)])
|
Use standard repo for postgresql9 in ubuntu 11.10
|
## Code Before:
import os
from kokki import Execute, Package
apt_list_path = '/etc/apt/sources.list.d/pitti-postgresql-lucid.list'
Execute("apt-update-postgresql9",
command = "apt-get update",
action = "nothing")
apt = None
if env.system.platform == "ubuntu":
Package("python-software-properties")
Execute("add-apt-repository ppa:pitti/postgresql",
not_if = lambda:os.path.exists(apt_list_path),
notifies = [("run", env.resources["Execute"]["apt-update-postgresql9"], True)])
## Instruction:
Use standard repo for postgresql9 in ubuntu 11.10
## Code After:
import os
from kokki import Execute, Package
if not (env.system.platform == "ubuntu" and env.system.lsb['release'] in ["11.10"]):
apt_list_path = '/etc/apt/sources.list.d/pitti-postgresql-lucid.list'
Execute("apt-update-postgresql9",
command = "apt-get update",
action = "nothing")
apt = None
if env.system.platform == "ubuntu":
Package("python-software-properties")
Execute("add-apt-repository ppa:pitti/postgresql",
not_if = lambda:os.path.exists(apt_list_path),
notifies = [("run", env.resources["Execute"]["apt-update-postgresql9"], True)])
|
import os
from kokki import Execute, Package
+ if not (env.system.platform == "ubuntu" and env.system.lsb['release'] in ["11.10"]):
- apt_list_path = '/etc/apt/sources.list.d/pitti-postgresql-lucid.list'
+ apt_list_path = '/etc/apt/sources.list.d/pitti-postgresql-lucid.list'
? ++++
- Execute("apt-update-postgresql9",
+ Execute("apt-update-postgresql9",
? ++++
- command = "apt-get update",
+ command = "apt-get update",
? ++++
- action = "nothing")
+ action = "nothing")
? ++++
- apt = None
+ apt = None
? ++++
- if env.system.platform == "ubuntu":
+ if env.system.platform == "ubuntu":
? ++++
- Package("python-software-properties")
+ Package("python-software-properties")
? ++++
- Execute("add-apt-repository ppa:pitti/postgresql",
+ Execute("add-apt-repository ppa:pitti/postgresql",
? ++++
- not_if = lambda:os.path.exists(apt_list_path),
+ not_if = lambda:os.path.exists(apt_list_path),
? ++++
- notifies = [("run", env.resources["Execute"]["apt-update-postgresql9"], True)])
+ notifies = [("run", env.resources["Execute"]["apt-update-postgresql9"], True)])
? ++++
|
346ffdb3e3836e2931f838a6dd929a325da0d5e6
|
tests/test_arithmetic.py
|
tests/test_arithmetic.py
|
from intervals import Interval
class TestArithmeticOperators(object):
def test_add_operator(self):
assert Interval(1, 2) + Interval(1, 2) == Interval(2, 4)
def test_sub_operator(self):
assert Interval(1, 3) - Interval(1, 2) == Interval(-1, 2)
def test_isub_operator(self):
range_ = Interval(1, 3)
range_ -= Interval(1, 2)
assert range_ == Interval(-1, 2)
def test_iadd_operator(self):
range_ = Interval(1, 2)
range_ += Interval(1, 2)
assert range_ == Interval(2, 4)
|
from pytest import mark
from intervals import Interval
class TestArithmeticOperators(object):
def test_add_operator(self):
assert Interval(1, 2) + Interval(1, 2) == Interval(2, 4)
def test_sub_operator(self):
assert Interval(1, 3) - Interval(1, 2) == Interval(-1, 2)
def test_isub_operator(self):
range_ = Interval(1, 3)
range_ -= Interval(1, 2)
assert range_ == Interval(-1, 2)
def test_iadd_operator(self):
range_ = Interval(1, 2)
range_ += Interval(1, 2)
assert range_ == Interval(2, 4)
@mark.parametrize(('first', 'second', 'intersection'), (
('[1, 5]', '[2, 9]', '[2, 5]'),
('[3, 4]', '[3, 9]', '[3, 4]'),
('(3, 6]', '[2, 6)', '(3, 6)')
))
def test_intersection(self, first, second, intersection):
Interval(first) & Interval(second) == Interval(intersection)
|
Add some tests for intersection
|
Add some tests for intersection
|
Python
|
bsd-3-clause
|
kvesteri/intervals
|
+ from pytest import mark
from intervals import Interval
class TestArithmeticOperators(object):
def test_add_operator(self):
assert Interval(1, 2) + Interval(1, 2) == Interval(2, 4)
def test_sub_operator(self):
assert Interval(1, 3) - Interval(1, 2) == Interval(-1, 2)
def test_isub_operator(self):
range_ = Interval(1, 3)
range_ -= Interval(1, 2)
assert range_ == Interval(-1, 2)
def test_iadd_operator(self):
range_ = Interval(1, 2)
range_ += Interval(1, 2)
assert range_ == Interval(2, 4)
+ @mark.parametrize(('first', 'second', 'intersection'), (
+ ('[1, 5]', '[2, 9]', '[2, 5]'),
+ ('[3, 4]', '[3, 9]', '[3, 4]'),
+ ('(3, 6]', '[2, 6)', '(3, 6)')
+ ))
+ def test_intersection(self, first, second, intersection):
+ Interval(first) & Interval(second) == Interval(intersection)
+
|
Add some tests for intersection
|
## Code Before:
from intervals import Interval
class TestArithmeticOperators(object):
def test_add_operator(self):
assert Interval(1, 2) + Interval(1, 2) == Interval(2, 4)
def test_sub_operator(self):
assert Interval(1, 3) - Interval(1, 2) == Interval(-1, 2)
def test_isub_operator(self):
range_ = Interval(1, 3)
range_ -= Interval(1, 2)
assert range_ == Interval(-1, 2)
def test_iadd_operator(self):
range_ = Interval(1, 2)
range_ += Interval(1, 2)
assert range_ == Interval(2, 4)
## Instruction:
Add some tests for intersection
## Code After:
from pytest import mark
from intervals import Interval
class TestArithmeticOperators(object):
def test_add_operator(self):
assert Interval(1, 2) + Interval(1, 2) == Interval(2, 4)
def test_sub_operator(self):
assert Interval(1, 3) - Interval(1, 2) == Interval(-1, 2)
def test_isub_operator(self):
range_ = Interval(1, 3)
range_ -= Interval(1, 2)
assert range_ == Interval(-1, 2)
def test_iadd_operator(self):
range_ = Interval(1, 2)
range_ += Interval(1, 2)
assert range_ == Interval(2, 4)
@mark.parametrize(('first', 'second', 'intersection'), (
('[1, 5]', '[2, 9]', '[2, 5]'),
('[3, 4]', '[3, 9]', '[3, 4]'),
('(3, 6]', '[2, 6)', '(3, 6)')
))
def test_intersection(self, first, second, intersection):
Interval(first) & Interval(second) == Interval(intersection)
|
+ from pytest import mark
from intervals import Interval
class TestArithmeticOperators(object):
def test_add_operator(self):
assert Interval(1, 2) + Interval(1, 2) == Interval(2, 4)
def test_sub_operator(self):
assert Interval(1, 3) - Interval(1, 2) == Interval(-1, 2)
def test_isub_operator(self):
range_ = Interval(1, 3)
range_ -= Interval(1, 2)
assert range_ == Interval(-1, 2)
def test_iadd_operator(self):
range_ = Interval(1, 2)
range_ += Interval(1, 2)
assert range_ == Interval(2, 4)
+
+ @mark.parametrize(('first', 'second', 'intersection'), (
+ ('[1, 5]', '[2, 9]', '[2, 5]'),
+ ('[3, 4]', '[3, 9]', '[3, 4]'),
+ ('(3, 6]', '[2, 6)', '(3, 6)')
+ ))
+ def test_intersection(self, first, second, intersection):
+ Interval(first) & Interval(second) == Interval(intersection)
|
cdf3686150309800cb28f584b64b9175aa4b5662
|
tests/unit_tests/gather_tests/MameSink_test.py
|
tests/unit_tests/gather_tests/MameSink_test.py
|
import pytest
from cps2_zmq.gather import MameSink
@pytest.mark.parametrize("message, expected",[
({'wid': 420, 'message': 'closing'}, 'worksink closing'),
({'wid': 420, 'message': 'threaddead'}, '420 is dead'),
({'wid': 420, 'message': 'some result'}, 'another message'),
])
def test_process_message(message, expected, worker):
sink = MameSink.MameSink("inproc://help")
worker.wid = message['wid']
sink.setup_workers2([worker])
result = sink._process_message(message)
assert result == expected
sink._cleanup()
# @pytest.mark.parametrize("messages, expected", [
# ([{'frame_number': 1141, 'sprites': [[420, 69, 300, 1], [1, 1, 1, 1]], 'palettes': [[]]},
# {'frame_number': 0, 'sprites': [], 'palettes': []}], 1)
# ])
# @pytest.mark.timeout(timeout=10, method='thread')
# def test_run(workers, messages, expected):
# sink = MameSink.MameSink("inproc://frommockworkers")
# sink.setup_workers2(workers)
# pass
|
import pytest
from cps2_zmq.gather import MameSink
@pytest.fixture(scope="module")
def sink():
sink = MameSink.MameSink("inproc://frommockworkers")
yield sink
sink.cleanup()
class TestSink(object):
@pytest.fixture(autouse=True)
def refresh(self, sink):
pass
yield
sink._msgsrecv = 0
@pytest.mark.parametrize("message, expected", [
({'wid': 420, 'message': 'closing'}, 'worksink closing'),
({'wid': 420, 'message': 'threaddead'}, '420 is dead'),
({'wid': 420, 'message': 'some result'}, 'another message'),
])
def test_process_message(self, message, expected, sink, worker):
worker.wid = message['wid']
sink.setup_workers2([worker])
result = sink._process_message(message)
assert result == expected
def test_run(self, sink, tworkers):
# sink = MameSink.MameSink("inproc://frommockworkers")
messages = ['some result', 'closing', 'threaddead']
for worker in tworkers:
worker.messages = [{'wid' : worker.wid, 'message': msg} for msg in messages]
worker.connect_push("inproc://frommockworkers")
sink.setup_workers2(tworkers)
sink.start()
#block and let the sink run
sink.join()
assert not sink.workers
assert sink._msgsrecv == len(tworkers) * len(messages)
|
Test Class now returns to base state between different groups of tests
|
Test Class now returns to base state between different groups of tests
|
Python
|
mit
|
goosechooser/cps2-zmq
|
import pytest
from cps2_zmq.gather import MameSink
+ @pytest.fixture(scope="module")
+ def sink():
- @pytest.mark.parametrize("message, expected",[
- ({'wid': 420, 'message': 'closing'}, 'worksink closing'),
- ({'wid': 420, 'message': 'threaddead'}, '420 is dead'),
- ({'wid': 420, 'message': 'some result'}, 'another message'),
- ])
- def test_process_message(message, expected, worker):
- sink = MameSink.MameSink("inproc://help")
+ sink = MameSink.MameSink("inproc://frommockworkers")
- worker.wid = message['wid']
- sink.setup_workers2([worker])
+ yield sink
+ sink.cleanup()
- result = sink._process_message(message)
- assert result == expected
- sink._cleanup()
+ class TestSink(object):
+ @pytest.fixture(autouse=True)
+ def refresh(self, sink):
+ pass
+ yield
+ sink._msgsrecv = 0
- # @pytest.mark.parametrize("messages, expected", [
+ @pytest.mark.parametrize("message, expected", [
- # ([{'frame_number': 1141, 'sprites': [[420, 69, 300, 1], [1, 1, 1, 1]], 'palettes': [[]]},
- # {'frame_number': 0, 'sprites': [], 'palettes': []}], 1)
- # ])
- # @pytest.mark.timeout(timeout=10, method='thread')
- # def test_run(workers, messages, expected):
+ ({'wid': 420, 'message': 'closing'}, 'worksink closing'),
+ ({'wid': 420, 'message': 'threaddead'}, '420 is dead'),
+ ({'wid': 420, 'message': 'some result'}, 'another message'),
+ ])
+ def test_process_message(self, message, expected, sink, worker):
+ worker.wid = message['wid']
+ sink.setup_workers2([worker])
+
+ result = sink._process_message(message)
+ assert result == expected
+
+ def test_run(self, sink, tworkers):
- # sink = MameSink.MameSink("inproc://frommockworkers")
+ # sink = MameSink.MameSink("inproc://frommockworkers")
+
+ messages = ['some result', 'closing', 'threaddead']
+ for worker in tworkers:
+ worker.messages = [{'wid' : worker.wid, 'message': msg} for msg in messages]
+ worker.connect_push("inproc://frommockworkers")
+
- # sink.setup_workers2(workers)
+ sink.setup_workers2(tworkers)
- # pass
+
+ sink.start()
+ #block and let the sink run
+ sink.join()
+ assert not sink.workers
+ assert sink._msgsrecv == len(tworkers) * len(messages)
+
|
Test Class now returns to base state between different groups of tests
|
## Code Before:
import pytest
from cps2_zmq.gather import MameSink
@pytest.mark.parametrize("message, expected",[
({'wid': 420, 'message': 'closing'}, 'worksink closing'),
({'wid': 420, 'message': 'threaddead'}, '420 is dead'),
({'wid': 420, 'message': 'some result'}, 'another message'),
])
def test_process_message(message, expected, worker):
sink = MameSink.MameSink("inproc://help")
worker.wid = message['wid']
sink.setup_workers2([worker])
result = sink._process_message(message)
assert result == expected
sink._cleanup()
# @pytest.mark.parametrize("messages, expected", [
# ([{'frame_number': 1141, 'sprites': [[420, 69, 300, 1], [1, 1, 1, 1]], 'palettes': [[]]},
# {'frame_number': 0, 'sprites': [], 'palettes': []}], 1)
# ])
# @pytest.mark.timeout(timeout=10, method='thread')
# def test_run(workers, messages, expected):
# sink = MameSink.MameSink("inproc://frommockworkers")
# sink.setup_workers2(workers)
# pass
## Instruction:
Test Class now returns to base state between different groups of tests
## Code After:
import pytest
from cps2_zmq.gather import MameSink
@pytest.fixture(scope="module")
def sink():
sink = MameSink.MameSink("inproc://frommockworkers")
yield sink
sink.cleanup()
class TestSink(object):
@pytest.fixture(autouse=True)
def refresh(self, sink):
pass
yield
sink._msgsrecv = 0
@pytest.mark.parametrize("message, expected", [
({'wid': 420, 'message': 'closing'}, 'worksink closing'),
({'wid': 420, 'message': 'threaddead'}, '420 is dead'),
({'wid': 420, 'message': 'some result'}, 'another message'),
])
def test_process_message(self, message, expected, sink, worker):
worker.wid = message['wid']
sink.setup_workers2([worker])
result = sink._process_message(message)
assert result == expected
def test_run(self, sink, tworkers):
# sink = MameSink.MameSink("inproc://frommockworkers")
messages = ['some result', 'closing', 'threaddead']
for worker in tworkers:
worker.messages = [{'wid' : worker.wid, 'message': msg} for msg in messages]
worker.connect_push("inproc://frommockworkers")
sink.setup_workers2(tworkers)
sink.start()
#block and let the sink run
sink.join()
assert not sink.workers
assert sink._msgsrecv == len(tworkers) * len(messages)
|
import pytest
from cps2_zmq.gather import MameSink
+ @pytest.fixture(scope="module")
+ def sink():
- @pytest.mark.parametrize("message, expected",[
- ({'wid': 420, 'message': 'closing'}, 'worksink closing'),
- ({'wid': 420, 'message': 'threaddead'}, '420 is dead'),
- ({'wid': 420, 'message': 'some result'}, 'another message'),
- ])
- def test_process_message(message, expected, worker):
- sink = MameSink.MameSink("inproc://help")
? ^ ^^
+ sink = MameSink.MameSink("inproc://frommockworkers")
? ^^^^^^^^^^^^ ^^
- worker.wid = message['wid']
- sink.setup_workers2([worker])
+ yield sink
+ sink.cleanup()
- result = sink._process_message(message)
- assert result == expected
- sink._cleanup()
+ class TestSink(object):
+ @pytest.fixture(autouse=True)
+ def refresh(self, sink):
+ pass
+ yield
+ sink._msgsrecv = 0
- # @pytest.mark.parametrize("messages, expected", [
? ^ -
+ @pytest.mark.parametrize("message, expected", [
? ^^^
- # ([{'frame_number': 1141, 'sprites': [[420, 69, 300, 1], [1, 1, 1, 1]], 'palettes': [[]]},
- # {'frame_number': 0, 'sprites': [], 'palettes': []}], 1)
- # ])
- # @pytest.mark.timeout(timeout=10, method='thread')
- # def test_run(workers, messages, expected):
+ ({'wid': 420, 'message': 'closing'}, 'worksink closing'),
+ ({'wid': 420, 'message': 'threaddead'}, '420 is dead'),
+ ({'wid': 420, 'message': 'some result'}, 'another message'),
+ ])
+ def test_process_message(self, message, expected, sink, worker):
+ worker.wid = message['wid']
+ sink.setup_workers2([worker])
+
+ result = sink._process_message(message)
+ assert result == expected
+
+ def test_run(self, sink, tworkers):
- # sink = MameSink.MameSink("inproc://frommockworkers")
? -
+ # sink = MameSink.MameSink("inproc://frommockworkers")
? +++++
+
+ messages = ['some result', 'closing', 'threaddead']
+ for worker in tworkers:
+ worker.messages = [{'wid' : worker.wid, 'message': msg} for msg in messages]
+ worker.connect_push("inproc://frommockworkers")
+
- # sink.setup_workers2(workers)
? ^
+ sink.setup_workers2(tworkers)
? ^^^ +
- # pass
+
+ sink.start()
+ #block and let the sink run
+ sink.join()
+ assert not sink.workers
+ assert sink._msgsrecv == len(tworkers) * len(messages)
+
|
d9f388d2b486da3bd5e3209db70d3e691aec584d
|
clowder/clowder/cli/yaml_controller.py
|
clowder/clowder/cli/yaml_controller.py
|
from cement.ext.ext_argparse import expose
from clowder.cli.abstract_base_controller import AbstractBaseController
class YAMLController(AbstractBaseController):
class Meta:
label = 'yaml'
stacked_on = 'base'
stacked_type = 'nested'
description = 'Print clowder.yaml information'
arguments = [
(['--resolved', '-r'], dict(action='store_true', help='print resolved clowder.yaml'))
]
@expose(help="second-controller default command", hide=True)
def default(self):
print("Inside SecondController.default()")
|
from __future__ import print_function
import sys
from cement.ext.ext_argparse import expose
import clowder.util.formatting as fmt
from clowder.cli.abstract_base_controller import AbstractBaseController
from clowder.util.decorators import (
print_clowder_repo_status,
valid_clowder_yaml_required
)
from clowder.yaml.printing import print_yaml
class YAMLController(AbstractBaseController):
class Meta:
label = 'yaml'
stacked_on = 'base'
stacked_type = 'nested'
description = 'Print clowder.yaml information'
arguments = [
(['--resolved', '-r'], dict(action='store_true', help='print resolved clowder.yaml'))
]
@expose(help="second-controller default command", hide=True)
@valid_clowder_yaml_required
@print_clowder_repo_status
def default(self):
if self.app.pargs.resolved:
print(fmt.yaml_string(self.clowder.get_yaml_resolved()))
else:
print_yaml(self.clowder.root_directory)
sys.exit() # exit early to prevent printing extra newline
|
Add `clowder yaml` logic to Cement controller
|
Add `clowder yaml` logic to Cement controller
|
Python
|
mit
|
JrGoodle/clowder,JrGoodle/clowder,JrGoodle/clowder
|
+ from __future__ import print_function
+
+ import sys
+
from cement.ext.ext_argparse import expose
+ import clowder.util.formatting as fmt
from clowder.cli.abstract_base_controller import AbstractBaseController
+ from clowder.util.decorators import (
+ print_clowder_repo_status,
+ valid_clowder_yaml_required
+ )
+ from clowder.yaml.printing import print_yaml
class YAMLController(AbstractBaseController):
class Meta:
label = 'yaml'
stacked_on = 'base'
stacked_type = 'nested'
description = 'Print clowder.yaml information'
arguments = [
(['--resolved', '-r'], dict(action='store_true', help='print resolved clowder.yaml'))
]
@expose(help="second-controller default command", hide=True)
+ @valid_clowder_yaml_required
+ @print_clowder_repo_status
def default(self):
- print("Inside SecondController.default()")
+ if self.app.pargs.resolved:
+ print(fmt.yaml_string(self.clowder.get_yaml_resolved()))
+ else:
+ print_yaml(self.clowder.root_directory)
+ sys.exit() # exit early to prevent printing extra newline
|
Add `clowder yaml` logic to Cement controller
|
## Code Before:
from cement.ext.ext_argparse import expose
from clowder.cli.abstract_base_controller import AbstractBaseController
class YAMLController(AbstractBaseController):
class Meta:
label = 'yaml'
stacked_on = 'base'
stacked_type = 'nested'
description = 'Print clowder.yaml information'
arguments = [
(['--resolved', '-r'], dict(action='store_true', help='print resolved clowder.yaml'))
]
@expose(help="second-controller default command", hide=True)
def default(self):
print("Inside SecondController.default()")
## Instruction:
Add `clowder yaml` logic to Cement controller
## Code After:
from __future__ import print_function
import sys
from cement.ext.ext_argparse import expose
import clowder.util.formatting as fmt
from clowder.cli.abstract_base_controller import AbstractBaseController
from clowder.util.decorators import (
print_clowder_repo_status,
valid_clowder_yaml_required
)
from clowder.yaml.printing import print_yaml
class YAMLController(AbstractBaseController):
class Meta:
label = 'yaml'
stacked_on = 'base'
stacked_type = 'nested'
description = 'Print clowder.yaml information'
arguments = [
(['--resolved', '-r'], dict(action='store_true', help='print resolved clowder.yaml'))
]
@expose(help="second-controller default command", hide=True)
@valid_clowder_yaml_required
@print_clowder_repo_status
def default(self):
if self.app.pargs.resolved:
print(fmt.yaml_string(self.clowder.get_yaml_resolved()))
else:
print_yaml(self.clowder.root_directory)
sys.exit() # exit early to prevent printing extra newline
|
+ from __future__ import print_function
+
+ import sys
+
from cement.ext.ext_argparse import expose
+ import clowder.util.formatting as fmt
from clowder.cli.abstract_base_controller import AbstractBaseController
+ from clowder.util.decorators import (
+ print_clowder_repo_status,
+ valid_clowder_yaml_required
+ )
+ from clowder.yaml.printing import print_yaml
class YAMLController(AbstractBaseController):
class Meta:
label = 'yaml'
stacked_on = 'base'
stacked_type = 'nested'
description = 'Print clowder.yaml information'
arguments = [
(['--resolved', '-r'], dict(action='store_true', help='print resolved clowder.yaml'))
]
@expose(help="second-controller default command", hide=True)
+ @valid_clowder_yaml_required
+ @print_clowder_repo_status
def default(self):
- print("Inside SecondController.default()")
+ if self.app.pargs.resolved:
+ print(fmt.yaml_string(self.clowder.get_yaml_resolved()))
+ else:
+ print_yaml(self.clowder.root_directory)
+ sys.exit() # exit early to prevent printing extra newline
|
accca78e7d9dab841d4850f74795099d63854707
|
masters/master.client.v8.ports/master_site_config.py
|
masters/master.client.v8.ports/master_site_config.py
|
"""ActiveMaster definition."""
from config_bootstrap import Master
class V8Ports(Master.Master3):
base_app_url = 'https://v8-status.appspot.com'
tree_status_url = base_app_url + '/status'
store_revisions_url = base_app_url + '/revisions'
last_good_url = base_app_url + '/lkgr'
project_name = 'V8 Ports'
master_port_id = 17
project_url = 'https://developers.google.com/v8/'
buildbot_url = 'http://build.chromium.org/p/client.v8.ports/'
|
"""ActiveMaster definition."""
from config_bootstrap import Master
class V8Ports(Master.Master3):
base_app_url = 'https://v8-status.appspot.com'
tree_status_url = base_app_url + '/status'
store_revisions_url = base_app_url + '/revisions'
last_good_url = base_app_url + '/lkgr'
project_name = 'V8 Ports'
master_port_id = 17
project_url = 'https://developers.google.com/v8/'
buildbot_url = 'http://build.chromium.org/p/client.v8.ports/'
service_account_file = 'service-account-v8.json'
|
Add buildbucket integration to v8.ports
|
V8: Add buildbucket integration to v8.ports
BUG=595708
[email protected], [email protected]
Review URL: https://codereview.chromium.org/1810113002
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@299357 0039d316-1c4b-4281-b951-d872f2087c98
|
Python
|
bsd-3-clause
|
eunchong/build,eunchong/build,eunchong/build,eunchong/build
|
"""ActiveMaster definition."""
from config_bootstrap import Master
class V8Ports(Master.Master3):
base_app_url = 'https://v8-status.appspot.com'
tree_status_url = base_app_url + '/status'
store_revisions_url = base_app_url + '/revisions'
last_good_url = base_app_url + '/lkgr'
project_name = 'V8 Ports'
master_port_id = 17
project_url = 'https://developers.google.com/v8/'
buildbot_url = 'http://build.chromium.org/p/client.v8.ports/'
-
+ service_account_file = 'service-account-v8.json'
|
Add buildbucket integration to v8.ports
|
## Code Before:
"""ActiveMaster definition."""
from config_bootstrap import Master
class V8Ports(Master.Master3):
base_app_url = 'https://v8-status.appspot.com'
tree_status_url = base_app_url + '/status'
store_revisions_url = base_app_url + '/revisions'
last_good_url = base_app_url + '/lkgr'
project_name = 'V8 Ports'
master_port_id = 17
project_url = 'https://developers.google.com/v8/'
buildbot_url = 'http://build.chromium.org/p/client.v8.ports/'
## Instruction:
Add buildbucket integration to v8.ports
## Code After:
"""ActiveMaster definition."""
from config_bootstrap import Master
class V8Ports(Master.Master3):
base_app_url = 'https://v8-status.appspot.com'
tree_status_url = base_app_url + '/status'
store_revisions_url = base_app_url + '/revisions'
last_good_url = base_app_url + '/lkgr'
project_name = 'V8 Ports'
master_port_id = 17
project_url = 'https://developers.google.com/v8/'
buildbot_url = 'http://build.chromium.org/p/client.v8.ports/'
service_account_file = 'service-account-v8.json'
|
"""ActiveMaster definition."""
from config_bootstrap import Master
class V8Ports(Master.Master3):
base_app_url = 'https://v8-status.appspot.com'
tree_status_url = base_app_url + '/status'
store_revisions_url = base_app_url + '/revisions'
last_good_url = base_app_url + '/lkgr'
project_name = 'V8 Ports'
master_port_id = 17
project_url = 'https://developers.google.com/v8/'
buildbot_url = 'http://build.chromium.org/p/client.v8.ports/'
+ service_account_file = 'service-account-v8.json'
|
b6ee793158d549f3d04d42ecbeb1c63605d6258f
|
src/setup.py
|
src/setup.py
|
import distutils.core
import distutils.extension
import Cython.Distutils
import numpy as np
compile_args = ['-O3', '-march=native', '-ffast-math', '-std=c++14', '-fopenmp']
ext_module = distutils.extension.Extension('spectrum_match', ['spectrum_match.pyx', 'SpectrumMatch.cpp'],
language='c++', extra_compile_args=compile_args, extra_link_args=compile_args)
distutils.core.setup(
name='ANN-SoLo', cmdclass={'build_ext': Cython.Distutils.build_ext},
ext_modules=[ext_module], include_dirs=[np.get_include()],
)
|
import distutils.core
import distutils.extension
import Cython.Distutils
import numpy as np
compile_args = ['-O3', '-march=native', '-ffast-math', '-fno-associative-math', '-std=c++14', '-fopenmp']
ext_module = distutils.extension.Extension('spectrum_match', ['spectrum_match.pyx', 'SpectrumMatch.cpp'],
language='c++', extra_compile_args=compile_args, extra_link_args=compile_args)
distutils.core.setup(
name='ANN-SoLo', cmdclass={'build_ext': Cython.Distutils.build_ext},
ext_modules=[ext_module], include_dirs=[np.get_include()],
)
|
Add C++ compilation flag to ensure deterministic behavior
|
Add C++ compilation flag to ensure deterministic behavior
More information: https://github.com/spotify/annoy/pull/205
|
Python
|
apache-2.0
|
bittremieux/ANN-SoLo,bittremieux/ANN-SoLo
|
import distutils.core
import distutils.extension
import Cython.Distutils
import numpy as np
- compile_args = ['-O3', '-march=native', '-ffast-math', '-std=c++14', '-fopenmp']
+ compile_args = ['-O3', '-march=native', '-ffast-math', '-fno-associative-math', '-std=c++14', '-fopenmp']
ext_module = distutils.extension.Extension('spectrum_match', ['spectrum_match.pyx', 'SpectrumMatch.cpp'],
language='c++', extra_compile_args=compile_args, extra_link_args=compile_args)
distutils.core.setup(
name='ANN-SoLo', cmdclass={'build_ext': Cython.Distutils.build_ext},
ext_modules=[ext_module], include_dirs=[np.get_include()],
)
|
Add C++ compilation flag to ensure deterministic behavior
|
## Code Before:
import distutils.core
import distutils.extension
import Cython.Distutils
import numpy as np
compile_args = ['-O3', '-march=native', '-ffast-math', '-std=c++14', '-fopenmp']
ext_module = distutils.extension.Extension('spectrum_match', ['spectrum_match.pyx', 'SpectrumMatch.cpp'],
language='c++', extra_compile_args=compile_args, extra_link_args=compile_args)
distutils.core.setup(
name='ANN-SoLo', cmdclass={'build_ext': Cython.Distutils.build_ext},
ext_modules=[ext_module], include_dirs=[np.get_include()],
)
## Instruction:
Add C++ compilation flag to ensure deterministic behavior
## Code After:
import distutils.core
import distutils.extension
import Cython.Distutils
import numpy as np
compile_args = ['-O3', '-march=native', '-ffast-math', '-fno-associative-math', '-std=c++14', '-fopenmp']
ext_module = distutils.extension.Extension('spectrum_match', ['spectrum_match.pyx', 'SpectrumMatch.cpp'],
language='c++', extra_compile_args=compile_args, extra_link_args=compile_args)
distutils.core.setup(
name='ANN-SoLo', cmdclass={'build_ext': Cython.Distutils.build_ext},
ext_modules=[ext_module], include_dirs=[np.get_include()],
)
|
import distutils.core
import distutils.extension
import Cython.Distutils
import numpy as np
- compile_args = ['-O3', '-march=native', '-ffast-math', '-std=c++14', '-fopenmp']
+ compile_args = ['-O3', '-march=native', '-ffast-math', '-fno-associative-math', '-std=c++14', '-fopenmp']
? +++++++++++++++++++++++++
ext_module = distutils.extension.Extension('spectrum_match', ['spectrum_match.pyx', 'SpectrumMatch.cpp'],
language='c++', extra_compile_args=compile_args, extra_link_args=compile_args)
distutils.core.setup(
name='ANN-SoLo', cmdclass={'build_ext': Cython.Distutils.build_ext},
ext_modules=[ext_module], include_dirs=[np.get_include()],
)
|
e95bcb1a2688a9b5a0c09728cdd0082b643de943
|
pcbot/config.py
|
pcbot/config.py
|
import json
from os.path import exists
from os import mkdir
class Config:
config_path = "config/"
def __init__(self, filename, data=None, load=True):
self.filepath = "{}{}.json".format(self.config_path, filename)
if not exists(self.config_path):
mkdir(self.config_path)
loaded_data = None
if load:
loaded_data = self.load()
if data and not loaded_data:
self.data = data
elif loaded_data:
self.data = loaded_data
else:
self.data = None
if not self.data == loaded_data:
self.save()
def save(self):
with open(self.filepath, "w") as f:
json.dump(self.data, f)
def load(self):
if exists(self.filepath):
with open(self.filepath, "r") as f:
return json.load(f)
return None
|
import json
from os.path import exists
from os import mkdir
class Config:
config_path = "config/"
def __init__(self, filename, data=None, load=True):
self.filepath = "{}{}.json".format(self.config_path, filename)
if not exists(self.config_path):
mkdir(self.config_path)
loaded_data = None
if load:
loaded_data = self.load()
if data is not None and not loaded_data:
self.data = data
elif loaded_data:
self.data = loaded_data
else:
self.data = None
if not self.data == loaded_data:
self.save()
def save(self):
with open(self.filepath, "w") as f:
json.dump(self.data, f)
def load(self):
if exists(self.filepath):
with open(self.filepath, "r") as f:
return json.load(f)
return None
|
Check if data is not None instead of if data is true
|
Check if data is not None instead of if data is true
|
Python
|
mit
|
pckv/pcbot,PcBoy111/PC-BOT-V2,PcBoy111/PCBOT
|
import json
from os.path import exists
from os import mkdir
class Config:
config_path = "config/"
def __init__(self, filename, data=None, load=True):
self.filepath = "{}{}.json".format(self.config_path, filename)
if not exists(self.config_path):
mkdir(self.config_path)
loaded_data = None
if load:
loaded_data = self.load()
- if data and not loaded_data:
+ if data is not None and not loaded_data:
self.data = data
elif loaded_data:
self.data = loaded_data
else:
self.data = None
if not self.data == loaded_data:
self.save()
def save(self):
with open(self.filepath, "w") as f:
json.dump(self.data, f)
def load(self):
if exists(self.filepath):
with open(self.filepath, "r") as f:
return json.load(f)
return None
|
Check if data is not None instead of if data is true
|
## Code Before:
import json
from os.path import exists
from os import mkdir
class Config:
config_path = "config/"
def __init__(self, filename, data=None, load=True):
self.filepath = "{}{}.json".format(self.config_path, filename)
if not exists(self.config_path):
mkdir(self.config_path)
loaded_data = None
if load:
loaded_data = self.load()
if data and not loaded_data:
self.data = data
elif loaded_data:
self.data = loaded_data
else:
self.data = None
if not self.data == loaded_data:
self.save()
def save(self):
with open(self.filepath, "w") as f:
json.dump(self.data, f)
def load(self):
if exists(self.filepath):
with open(self.filepath, "r") as f:
return json.load(f)
return None
## Instruction:
Check if data is not None instead of if data is true
## Code After:
import json
from os.path import exists
from os import mkdir
class Config:
config_path = "config/"
def __init__(self, filename, data=None, load=True):
self.filepath = "{}{}.json".format(self.config_path, filename)
if not exists(self.config_path):
mkdir(self.config_path)
loaded_data = None
if load:
loaded_data = self.load()
if data is not None and not loaded_data:
self.data = data
elif loaded_data:
self.data = loaded_data
else:
self.data = None
if not self.data == loaded_data:
self.save()
def save(self):
with open(self.filepath, "w") as f:
json.dump(self.data, f)
def load(self):
if exists(self.filepath):
with open(self.filepath, "r") as f:
return json.load(f)
return None
|
import json
from os.path import exists
from os import mkdir
class Config:
config_path = "config/"
def __init__(self, filename, data=None, load=True):
self.filepath = "{}{}.json".format(self.config_path, filename)
if not exists(self.config_path):
mkdir(self.config_path)
loaded_data = None
if load:
loaded_data = self.load()
- if data and not loaded_data:
+ if data is not None and not loaded_data:
? ++++++++++++
self.data = data
elif loaded_data:
self.data = loaded_data
else:
self.data = None
if not self.data == loaded_data:
self.save()
def save(self):
with open(self.filepath, "w") as f:
json.dump(self.data, f)
def load(self):
if exists(self.filepath):
with open(self.filepath, "r") as f:
return json.load(f)
return None
|
ea73a999ffbc936f7e072a310f05ee2cb26b6c21
|
openprocurement/tender/limited/adapters.py
|
openprocurement/tender/limited/adapters.py
|
from openprocurement.tender.core.adapters import TenderConfigurator
from openprocurement.tender.limited.models import (
ReportingTender, NegotiationTender, NegotiationQuickTender
)
class TenderReportingConfigurator(TenderConfigurator):
""" Reporting Tender configuration adapter """
name = "Reporting Tender configurator"
model = ReportingTender
@property
def edit_accreditation(self):
raise NotImplemented
class TenderNegotiationConfigurator(TenderConfigurator):
""" Negotiation Tender configuration adapter """
name = "Negotiation Tender configurator"
model = NegotiationTender
@property
def edit_accreditation(self):
raise NotImplemented
class TenderNegotiationQuickConfigurator(TenderNegotiationConfigurator):
""" Negotiation Quick Tender configuration adapter """
name = "Negotiation Quick Tender configurator"
model = NegotiationQuickTender
|
from openprocurement.tender.core.adapters import TenderConfigurator
from openprocurement.tender.openua.constants import STATUS4ROLE
from openprocurement.tender.limited.models import (
ReportingTender, NegotiationTender, NegotiationQuickTender
)
class TenderReportingConfigurator(TenderConfigurator):
""" Reporting Tender configuration adapter """
name = "Reporting Tender configurator"
model = ReportingTender
# Dictionary with allowed complaint statuses for operations for each role
allowed_statuses_for_complaint_operations_for_roles = STATUS4ROLE
@property
def edit_accreditation(self):
raise NotImplemented
class TenderNegotiationConfigurator(TenderConfigurator):
""" Negotiation Tender configuration adapter """
name = "Negotiation Tender configurator"
model = NegotiationTender
# Dictionary with allowed complaint statuses for operations for each role
allowed_statuses_for_complaint_operations_for_roles = STATUS4ROLE
@property
def edit_accreditation(self):
raise NotImplemented
class TenderNegotiationQuickConfigurator(TenderNegotiationConfigurator):
""" Negotiation Quick Tender configuration adapter """
name = "Negotiation Quick Tender configurator"
model = NegotiationQuickTender
|
Add import and constant in adapter
|
Add import and constant in adapter
|
Python
|
apache-2.0
|
openprocurement/openprocurement.tender.limited
|
from openprocurement.tender.core.adapters import TenderConfigurator
+ from openprocurement.tender.openua.constants import STATUS4ROLE
from openprocurement.tender.limited.models import (
ReportingTender, NegotiationTender, NegotiationQuickTender
)
class TenderReportingConfigurator(TenderConfigurator):
""" Reporting Tender configuration adapter """
name = "Reporting Tender configurator"
model = ReportingTender
+
+ # Dictionary with allowed complaint statuses for operations for each role
+ allowed_statuses_for_complaint_operations_for_roles = STATUS4ROLE
@property
def edit_accreditation(self):
raise NotImplemented
class TenderNegotiationConfigurator(TenderConfigurator):
""" Negotiation Tender configuration adapter """
name = "Negotiation Tender configurator"
model = NegotiationTender
+ # Dictionary with allowed complaint statuses for operations for each role
+ allowed_statuses_for_complaint_operations_for_roles = STATUS4ROLE
+
@property
def edit_accreditation(self):
raise NotImplemented
class TenderNegotiationQuickConfigurator(TenderNegotiationConfigurator):
""" Negotiation Quick Tender configuration adapter """
name = "Negotiation Quick Tender configurator"
model = NegotiationQuickTender
|
Add import and constant in adapter
|
## Code Before:
from openprocurement.tender.core.adapters import TenderConfigurator
from openprocurement.tender.limited.models import (
ReportingTender, NegotiationTender, NegotiationQuickTender
)
class TenderReportingConfigurator(TenderConfigurator):
""" Reporting Tender configuration adapter """
name = "Reporting Tender configurator"
model = ReportingTender
@property
def edit_accreditation(self):
raise NotImplemented
class TenderNegotiationConfigurator(TenderConfigurator):
""" Negotiation Tender configuration adapter """
name = "Negotiation Tender configurator"
model = NegotiationTender
@property
def edit_accreditation(self):
raise NotImplemented
class TenderNegotiationQuickConfigurator(TenderNegotiationConfigurator):
""" Negotiation Quick Tender configuration adapter """
name = "Negotiation Quick Tender configurator"
model = NegotiationQuickTender
## Instruction:
Add import and constant in adapter
## Code After:
from openprocurement.tender.core.adapters import TenderConfigurator
from openprocurement.tender.openua.constants import STATUS4ROLE
from openprocurement.tender.limited.models import (
ReportingTender, NegotiationTender, NegotiationQuickTender
)
class TenderReportingConfigurator(TenderConfigurator):
""" Reporting Tender configuration adapter """
name = "Reporting Tender configurator"
model = ReportingTender
# Dictionary with allowed complaint statuses for operations for each role
allowed_statuses_for_complaint_operations_for_roles = STATUS4ROLE
@property
def edit_accreditation(self):
raise NotImplemented
class TenderNegotiationConfigurator(TenderConfigurator):
""" Negotiation Tender configuration adapter """
name = "Negotiation Tender configurator"
model = NegotiationTender
# Dictionary with allowed complaint statuses for operations for each role
allowed_statuses_for_complaint_operations_for_roles = STATUS4ROLE
@property
def edit_accreditation(self):
raise NotImplemented
class TenderNegotiationQuickConfigurator(TenderNegotiationConfigurator):
""" Negotiation Quick Tender configuration adapter """
name = "Negotiation Quick Tender configurator"
model = NegotiationQuickTender
|
from openprocurement.tender.core.adapters import TenderConfigurator
+ from openprocurement.tender.openua.constants import STATUS4ROLE
from openprocurement.tender.limited.models import (
ReportingTender, NegotiationTender, NegotiationQuickTender
)
class TenderReportingConfigurator(TenderConfigurator):
""" Reporting Tender configuration adapter """
name = "Reporting Tender configurator"
model = ReportingTender
+
+ # Dictionary with allowed complaint statuses for operations for each role
+ allowed_statuses_for_complaint_operations_for_roles = STATUS4ROLE
@property
def edit_accreditation(self):
raise NotImplemented
class TenderNegotiationConfigurator(TenderConfigurator):
""" Negotiation Tender configuration adapter """
name = "Negotiation Tender configurator"
model = NegotiationTender
+ # Dictionary with allowed complaint statuses for operations for each role
+ allowed_statuses_for_complaint_operations_for_roles = STATUS4ROLE
+
@property
def edit_accreditation(self):
raise NotImplemented
class TenderNegotiationQuickConfigurator(TenderNegotiationConfigurator):
""" Negotiation Quick Tender configuration adapter """
name = "Negotiation Quick Tender configurator"
model = NegotiationQuickTender
|
0c1a0a70154ddf107a6174d49793e369d28f1beb
|
openstack_dashboard/views.py
|
openstack_dashboard/views.py
|
from django import shortcuts
from django.views.decorators import vary
import horizon
from openstack_auth import views
def get_user_home(user):
if user.is_superuser:
return horizon.get_dashboard('admin').get_absolute_url()
return horizon.get_dashboard('project').get_absolute_url()
@vary.vary_on_cookie
def splash(request):
if request.user.is_authenticated():
return shortcuts.redirect(horizon.get_user_home(request.user))
form = views.Login(request)
request.session.clear()
request.session.set_test_cookie()
return shortcuts.render(request, 'splash.html', {'form': form})
|
from django import shortcuts
from django.views.decorators import vary
import horizon
from horizon import base
from openstack_auth import views
def get_user_home(user):
dashboard = None
if user.is_superuser:
try:
dashboard = horizon.get_dashboard('admin')
except base.NotRegistered:
pass
if dashboard is None:
dashboard = horizon.get_default_dashboard()
return dashboard.get_absolute_url()
@vary.vary_on_cookie
def splash(request):
if request.user.is_authenticated():
return shortcuts.redirect(horizon.get_user_home(request.user))
form = views.Login(request)
request.session.clear()
request.session.set_test_cookie()
return shortcuts.render(request, 'splash.html', {'form': form})
|
Fix default get_user_home with dynamic dashboards
|
Fix default get_user_home with dynamic dashboards
The existing get_user_home implementation expects both the 'admin'
and 'project' dashboards to exist and throws an exception if they
are missing. With the inclusion of configurable dashboard loading,
we can no longer count on certain dashboards being loaded.
Closes-Bug: #1293727
Change-Id: I4ee0b7b313f4e1b27c0daea829c8b38282fa78d9
|
Python
|
apache-2.0
|
bigswitch/horizon,tsufiev/horizon,froyobin/horizon,NeCTAR-RC/horizon,kfox1111/horizon,NeCTAR-RC/horizon,yjxtogo/horizon,RudoCris/horizon,watonyweng/horizon,philoniare/horizon,philoniare/horizon,noironetworks/horizon,eayunstack/horizon,CiscoSystems/avos,Dark-Hacker/horizon,zouyapeng/horizon,nvoron23/avos,mrunge/horizon,xinwu/horizon,agileblaze/OpenStackTwoFactorAuthentication,VaneCloud/horizon,mdavid/horizon,gerrive/horizon,mrunge/openstack_horizon,Daniex/horizon,openstack/horizon,orbitfp7/horizon,yeming233/horizon,kfox1111/horizon,Solinea/horizon,xinwu/horizon,damien-dg/horizon,tqtran7/horizon,mrunge/openstack_horizon,flochaz/horizon,tanglei528/horizon,doug-fish/horizon,Daniex/horizon,wolverineav/horizon,dan1/horizon-x509,saydulk/horizon,ChameleonCloud/horizon,Solinea/horizon,yeming233/horizon,anthonydillon/horizon,tellesnobrega/horizon,Tesora/tesora-horizon,wolverineav/horizon,VaneCloud/horizon,Mirantis/mos-horizon,henaras/horizon,pranavtendolkr/horizon,NCI-Cloud/horizon,eayunstack/horizon,blueboxgroup/horizon,yjxtogo/horizon,takeshineshiro/horizon,tellesnobrega/horizon,pranavtendolkr/horizon,endorphinl/horizon,tanglei528/horizon,davidcusatis/horizon,endorphinl/horizon,yjxtogo/horizon,endorphinl/horizon,anthonydillon/horizon,CiscoSystems/horizon,saydulk/horizon,Metaswitch/horizon,mrunge/openstack_horizon,mdavid/horizon,idjaw/horizon,watonyweng/horizon,sandvine/horizon,maestro-hybrid-cloud/horizon,django-leonardo/horizon,pranavtendolkr/horizon,VaneCloud/horizon,izadorozhna/dashboard_integration_tests,henaras/horizon,karthik-suresh/horizon,kfox1111/horizon,CiscoSystems/avos,saydulk/horizon,coreycb/horizon,eayunstack/horizon,dan1/horizon-x509,j4/horizon,maestro-hybrid-cloud/horizon,FNST-OpenStack/horizon,aaronorosen/horizon-congress,xinwu/horizon,VaneCloud/horizon,Hodorable/0602,mandeepdhami/horizon,newrocknj/horizon,NCI-Cloud/horizon,aaronorosen/horizon-congress,kfox1111/horizon,endorphinl/horizon-fork,redhat-cip/horizon,CiscoSystems/avos,j4/horizon,watonyweng/horizon,FNST-OpenStack/horizon,bac/horizon,NCI-Cloud/horizon,endorphinl/horizon,RudoCris/horizon,orbitfp7/horizon,coreycb/horizon,saydulk/horizon,luhanhan/horizon,dan1/horizon-proto,henaras/horizon,luhanhan/horizon,tqtran7/horizon,tellesnobrega/horizon,doug-fish/horizon,ging/horizon,mrunge/horizon_lib,vladryk/horizon,Dark-Hacker/horizon,Metaswitch/horizon,CiscoSystems/horizon,icloudrnd/automation_tools,sandvine/horizon,luhanhan/horizon,idjaw/horizon,flochaz/horizon,NCI-Cloud/horizon,JioCloud/horizon,damien-dg/horizon,django-leonardo/horizon,luhanhan/horizon,flochaz/horizon,wolverineav/horizon,FNST-OpenStack/horizon,takeshineshiro/horizon,Tesora/tesora-horizon,CiscoSystems/horizon,promptworks/horizon,xme1226/horizon,zouyapeng/horizon,django-leonardo/horizon,nvoron23/avos,Dark-Hacker/horizon,endorphinl/horizon-fork,BiznetGIO/horizon,mandeepdhami/horizon,Metaswitch/horizon,redhat-cip/horizon,coreycb/horizon,noironetworks/horizon,Daniex/horizon,Dark-Hacker/horizon,ChameleonCloud/horizon,mdavid/horizon,tsufiev/horizon,yeming233/horizon,CiscoSystems/horizon,JioCloud/horizon,xme1226/horizon,idjaw/horizon,bigswitch/horizon,ging/horizon,Metaswitch/horizon,BiznetGIO/horizon,karthik-suresh/horizon,mdavid/horizon,Daniex/horizon,philoniare/horizon,vladryk/horizon,froyobin/horizon,django-leonardo/horizon,agileblaze/OpenStackTwoFactorAuthentication,Solinea/horizon,mandeepdhami/horizon,newrocknj/horizon,wangxiangyu/horizon,philoniare/horizon,anthonydillon/horizon,gerrive/horizon,openstack/horizon,redhat-openstack/horizon,doug-fish/horizon,Mirantis/mos-horizon,nvoron23/avos,vladryk/horizon,endorphinl/horizon-fork,liyitest/rr,zouyapeng/horizon,openstack/horizon,icloudrnd/automation_tools,liyitest/rr,tqtran7/horizon,Mirantis/mos-horizon,j4/horizon,yjxtogo/horizon,wolverineav/horizon,ging/horizon,zouyapeng/horizon,henaras/horizon,promptworks/horizon,promptworks/horizon,endorphinl/horizon-fork,promptworks/horizon,blueboxgroup/horizon,blueboxgroup/horizon,tsufiev/horizon,blueboxgroup/horizon,BiznetGIO/horizon,ChameleonCloud/horizon,yeming233/horizon,Mirantis/mos-horizon,noironetworks/horizon,tqtran7/horizon,davidcusatis/horizon,bac/horizon,redhat-cip/horizon,froyobin/horizon,pranavtendolkr/horizon,liyitest/rr,newrocknj/horizon,redhat-openstack/horizon,tsufiev/horizon,Tesora/tesora-horizon,anthonydillon/horizon,wangxiangyu/horizon,dan1/horizon-x509,davidcusatis/horizon,bigswitch/horizon,redhat-openstack/horizon,dan1/horizon-proto,karthik-suresh/horizon,wangxiangyu/horizon,doug-fish/horizon,ging/horizon,JioCloud/horizon,watonyweng/horizon,takeshineshiro/horizon,sandvine/horizon,damien-dg/horizon,NeCTAR-RC/horizon,davidcusatis/horizon,mandeepdhami/horizon,bigswitch/horizon,aaronorosen/horizon-congress,karthik-suresh/horizon,idjaw/horizon,izadorozhna/dashboard_integration_tests,mrunge/horizon_lib,vladryk/horizon,Hodorable/0602,agileblaze/OpenStackTwoFactorAuthentication,xme1226/horizon,orbitfp7/horizon,BiznetGIO/horizon,mrunge/horizon_lib,noironetworks/horizon,gerrive/horizon,Hodorable/0602,agileblaze/OpenStackTwoFactorAuthentication,takeshineshiro/horizon,dan1/horizon-proto,maestro-hybrid-cloud/horizon,CiscoSystems/avos,damien-dg/horizon,sandvine/horizon,mrunge/horizon,FNST-OpenStack/horizon,wangxiangyu/horizon,Solinea/horizon,newrocknj/horizon,dan1/horizon-x509,j4/horizon,RudoCris/horizon,redhat-openstack/horizon,coreycb/horizon,orbitfp7/horizon,gerrive/horizon,mrunge/horizon,RudoCris/horizon,dan1/horizon-proto,tanglei528/horizon,openstack/horizon,icloudrnd/automation_tools,flochaz/horizon,nvoron23/avos,icloudrnd/automation_tools,NeCTAR-RC/horizon,bac/horizon,ChameleonCloud/horizon,redhat-cip/horizon,Hodorable/0602,maestro-hybrid-cloud/horizon,liyitest/rr,bac/horizon,Tesora/tesora-horizon,tellesnobrega/horizon,xinwu/horizon
|
from django import shortcuts
from django.views.decorators import vary
import horizon
+ from horizon import base
from openstack_auth import views
def get_user_home(user):
+ dashboard = None
if user.is_superuser:
- return horizon.get_dashboard('admin').get_absolute_url()
+ try:
+ dashboard = horizon.get_dashboard('admin')
+ except base.NotRegistered:
+ pass
+
+ if dashboard is None:
+ dashboard = horizon.get_default_dashboard()
+
- return horizon.get_dashboard('project').get_absolute_url()
+ return dashboard.get_absolute_url()
@vary.vary_on_cookie
def splash(request):
if request.user.is_authenticated():
return shortcuts.redirect(horizon.get_user_home(request.user))
form = views.Login(request)
request.session.clear()
request.session.set_test_cookie()
return shortcuts.render(request, 'splash.html', {'form': form})
|
Fix default get_user_home with dynamic dashboards
|
## Code Before:
from django import shortcuts
from django.views.decorators import vary
import horizon
from openstack_auth import views
def get_user_home(user):
if user.is_superuser:
return horizon.get_dashboard('admin').get_absolute_url()
return horizon.get_dashboard('project').get_absolute_url()
@vary.vary_on_cookie
def splash(request):
if request.user.is_authenticated():
return shortcuts.redirect(horizon.get_user_home(request.user))
form = views.Login(request)
request.session.clear()
request.session.set_test_cookie()
return shortcuts.render(request, 'splash.html', {'form': form})
## Instruction:
Fix default get_user_home with dynamic dashboards
## Code After:
from django import shortcuts
from django.views.decorators import vary
import horizon
from horizon import base
from openstack_auth import views
def get_user_home(user):
dashboard = None
if user.is_superuser:
try:
dashboard = horizon.get_dashboard('admin')
except base.NotRegistered:
pass
if dashboard is None:
dashboard = horizon.get_default_dashboard()
return dashboard.get_absolute_url()
@vary.vary_on_cookie
def splash(request):
if request.user.is_authenticated():
return shortcuts.redirect(horizon.get_user_home(request.user))
form = views.Login(request)
request.session.clear()
request.session.set_test_cookie()
return shortcuts.render(request, 'splash.html', {'form': form})
|
from django import shortcuts
from django.views.decorators import vary
import horizon
+ from horizon import base
from openstack_auth import views
def get_user_home(user):
+ dashboard = None
if user.is_superuser:
- return horizon.get_dashboard('admin').get_absolute_url()
+ try:
+ dashboard = horizon.get_dashboard('admin')
+ except base.NotRegistered:
+ pass
+
+ if dashboard is None:
+ dashboard = horizon.get_default_dashboard()
+
- return horizon.get_dashboard('project').get_absolute_url()
? ------------ -----------
+ return dashboard.get_absolute_url()
@vary.vary_on_cookie
def splash(request):
if request.user.is_authenticated():
return shortcuts.redirect(horizon.get_user_home(request.user))
form = views.Login(request)
request.session.clear()
request.session.set_test_cookie()
return shortcuts.render(request, 'splash.html', {'form': form})
|
24788b106b9cdd70e7240dc3eccac82fba290c85
|
tests/util/test_yaml.py
|
tests/util/test_yaml.py
|
"""Test Home Assistant yaml loader."""
import io
import unittest
from homeassistant.util import yaml
class TestYaml(unittest.TestCase):
"""Test util.yaml loader."""
def test_simple_list(self):
"""Test simple list."""
conf = "config:\n - simple\n - list"
with io.StringIO(conf) as f:
doc = yaml.yaml.safe_load(f)
assert doc['config'] == ["simple", "list"]
def test_simple_dict(self):
"""Test simple dict."""
conf = "key: value"
with io.StringIO(conf) as f:
doc = yaml.yaml.safe_load(f)
assert doc['key'] == 'value'
def test_duplicate_key(self):
"""Test simple dict."""
conf = "key: thing1\nkey: thing2"
try:
with io.StringIO(conf) as f:
yaml.yaml.safe_load(f)
except Exception:
pass
else:
assert 0
|
"""Test Home Assistant yaml loader."""
import io
import unittest
import os
from homeassistant.util import yaml
class TestYaml(unittest.TestCase):
"""Test util.yaml loader."""
def test_simple_list(self):
"""Test simple list."""
conf = "config:\n - simple\n - list"
with io.StringIO(conf) as f:
doc = yaml.yaml.safe_load(f)
assert doc['config'] == ["simple", "list"]
def test_simple_dict(self):
"""Test simple dict."""
conf = "key: value"
with io.StringIO(conf) as f:
doc = yaml.yaml.safe_load(f)
assert doc['key'] == 'value'
def test_duplicate_key(self):
"""Test simple dict."""
conf = "key: thing1\nkey: thing2"
try:
with io.StringIO(conf) as f:
yaml.yaml.safe_load(f)
except Exception:
pass
else:
assert 0
def test_enviroment_variable(self):
"""Test config file with enviroment variable."""
os.environ["PASSWORD"] = "secret_password"
conf = "password: !env_var PASSWORD"
with io.StringIO(conf) as f:
doc = yaml.yaml.safe_load(f)
assert doc['password'] == "secret_password"
del os.environ["PASSWORD"]
def test_invalid_enviroment_variable(self):
"""Test config file with no enviroment variable sat."""
conf = "password: !env_var PASSWORD"
try:
with io.StringIO(conf) as f:
yaml.yaml.safe_load(f)
except Exception:
pass
else:
assert 0
|
Add test for yaml enviroment
|
Add test for yaml enviroment
|
Python
|
mit
|
lukas-hetzenecker/home-assistant,LinuxChristian/home-assistant,molobrakos/home-assistant,sffjunkie/home-assistant,titilambert/home-assistant,ewandor/home-assistant,emilhetty/home-assistant,mikaelboman/home-assistant,nkgilley/home-assistant,robbiet480/home-assistant,jawilson/home-assistant,molobrakos/home-assistant,devdelay/home-assistant,florianholzapfel/home-assistant,deisi/home-assistant,betrisey/home-assistant,jaharkes/home-assistant,qedi-r/home-assistant,postlund/home-assistant,mezz64/home-assistant,eagleamon/home-assistant,w1ll1am23/home-assistant,varunr047/homefile,balloob/home-assistant,leoc/home-assistant,jabesq/home-assistant,MungoRae/home-assistant,Cinntax/home-assistant,nugget/home-assistant,soldag/home-assistant,dmeulen/home-assistant,happyleavesaoc/home-assistant,devdelay/home-assistant,sffjunkie/home-assistant,kennedyshead/home-assistant,sffjunkie/home-assistant,sander76/home-assistant,hexxter/home-assistant,balloob/home-assistant,Teagan42/home-assistant,DavidLP/home-assistant,Danielhiversen/home-assistant,robjohnson189/home-assistant,morphis/home-assistant,Zac-HD/home-assistant,ct-23/home-assistant,w1ll1am23/home-assistant,Danielhiversen/home-assistant,aronsky/home-assistant,kyvinh/home-assistant,emilhetty/home-assistant,hexxter/home-assistant,hmronline/home-assistant,varunr047/homefile,shaftoe/home-assistant,MartinHjelmare/home-assistant,shaftoe/home-assistant,deisi/home-assistant,joopert/home-assistant,open-homeautomation/home-assistant,open-homeautomation/home-assistant,dmeulen/home-assistant,oandrew/home-assistant,HydrelioxGitHub/home-assistant,deisi/home-assistant,leoc/home-assistant,emilhetty/home-assistant,postlund/home-assistant,ct-23/home-assistant,ct-23/home-assistant,auduny/home-assistant,betrisey/home-assistant,HydrelioxGitHub/home-assistant,PetePriority/home-assistant,leppa/home-assistant,home-assistant/home-assistant,PetePriority/home-assistant,sffjunkie/home-assistant,persandstrom/home-assistant,jabesq/home-assistant,robjohnson189/home-assistant,jamespcole/home-assistant,alexmogavero/home-assistant,oandrew/home-assistant,Smart-Torvy/torvy-home-assistant,philipbl/home-assistant,tchellomello/home-assistant,GenericStudent/home-assistant,rohitranjan1991/home-assistant,stefan-jonasson/home-assistant,xifle/home-assistant,philipbl/home-assistant,auduny/home-assistant,MungoRae/home-assistant,jaharkes/home-assistant,Zac-HD/home-assistant,soldag/home-assistant,tinloaf/home-assistant,bdfoster/blumate,morphis/home-assistant,keerts/home-assistant,morphis/home-assistant,jnewland/home-assistant,Smart-Torvy/torvy-home-assistant,partofthething/home-assistant,alexmogavero/home-assistant,eagleamon/home-assistant,PetePriority/home-assistant,mKeRix/home-assistant,florianholzapfel/home-assistant,tinloaf/home-assistant,kyvinh/home-assistant,nkgilley/home-assistant,persandstrom/home-assistant,kyvinh/home-assistant,Zac-HD/home-assistant,turbokongen/home-assistant,turbokongen/home-assistant,fbradyirl/home-assistant,JshWright/home-assistant,bdfoster/blumate,sffjunkie/home-assistant,partofthething/home-assistant,emilhetty/home-assistant,kennedyshead/home-assistant,aequitas/home-assistant,emilhetty/home-assistant,pschmitt/home-assistant,jabesq/home-assistant,varunr047/homefile,srcLurker/home-assistant,Smart-Torvy/torvy-home-assistant,Julian/home-assistant,toddeye/home-assistant,GenericStudent/home-assistant,mKeRix/home-assistant,srcLurker/home-assistant,rohitranjan1991/home-assistant,robjohnson189/home-assistant,robbiet480/home-assistant,stefan-jonasson/home-assistant,HydrelioxGitHub/home-assistant,jnewland/home-assistant,happyleavesaoc/home-assistant,adrienbrault/home-assistant,alexmogavero/home-assistant,DavidLP/home-assistant,philipbl/home-assistant,tboyce1/home-assistant,LinuxChristian/home-assistant,hmronline/home-assistant,MungoRae/home-assistant,mikaelboman/home-assistant,Duoxilian/home-assistant,hexxter/home-assistant,JshWright/home-assistant,bdfoster/blumate,fbradyirl/home-assistant,miniconfig/home-assistant,robjohnson189/home-assistant,nugget/home-assistant,Cinntax/home-assistant,betrisey/home-assistant,sdague/home-assistant,adrienbrault/home-assistant,tboyce1/home-assistant,home-assistant/home-assistant,lukas-hetzenecker/home-assistant,sdague/home-assistant,tchellomello/home-assistant,rohitranjan1991/home-assistant,toddeye/home-assistant,mKeRix/home-assistant,mKeRix/home-assistant,eagleamon/home-assistant,mikaelboman/home-assistant,aronsky/home-assistant,oandrew/home-assistant,eagleamon/home-assistant,morphis/home-assistant,ewandor/home-assistant,betrisey/home-assistant,ma314smith/home-assistant,sander76/home-assistant,balloob/home-assistant,jaharkes/home-assistant,dmeulen/home-assistant,miniconfig/home-assistant,keerts/home-assistant,aequitas/home-assistant,FreekingDean/home-assistant,Julian/home-assistant,Zac-HD/home-assistant,oandrew/home-assistant,ma314smith/home-assistant,MungoRae/home-assistant,nugget/home-assistant,open-homeautomation/home-assistant,ewandor/home-assistant,deisi/home-assistant,florianholzapfel/home-assistant,mikaelboman/home-assistant,titilambert/home-assistant,Teagan42/home-assistant,jaharkes/home-assistant,Duoxilian/home-assistant,shaftoe/home-assistant,leppa/home-assistant,keerts/home-assistant,mezz64/home-assistant,jawilson/home-assistant,deisi/home-assistant,jamespcole/home-assistant,Smart-Torvy/torvy-home-assistant,tboyce021/home-assistant,stefan-jonasson/home-assistant,stefan-jonasson/home-assistant,ct-23/home-assistant,DavidLP/home-assistant,auduny/home-assistant,pschmitt/home-assistant,ma314smith/home-assistant,persandstrom/home-assistant,varunr047/homefile,philipbl/home-assistant,joopert/home-assistant,devdelay/home-assistant,FreekingDean/home-assistant,happyleavesaoc/home-assistant,hmronline/home-assistant,xifle/home-assistant,tboyce021/home-assistant,xifle/home-assistant,miniconfig/home-assistant,aequitas/home-assistant,Duoxilian/home-assistant,bdfoster/blumate,JshWright/home-assistant,open-homeautomation/home-assistant,keerts/home-assistant,leoc/home-assistant,ma314smith/home-assistant,srcLurker/home-assistant,jamespcole/home-assistant,LinuxChristian/home-assistant,jnewland/home-assistant,varunr047/homefile,shaftoe/home-assistant,miniconfig/home-assistant,tboyce1/home-assistant,devdelay/home-assistant,happyleavesaoc/home-assistant,MungoRae/home-assistant,MartinHjelmare/home-assistant,hmronline/home-assistant,LinuxChristian/home-assistant,bdfoster/blumate,tinloaf/home-assistant,xifle/home-assistant,srcLurker/home-assistant,Julian/home-assistant,florianholzapfel/home-assistant,molobrakos/home-assistant,LinuxChristian/home-assistant,Julian/home-assistant,tboyce1/home-assistant,mikaelboman/home-assistant,leoc/home-assistant,kyvinh/home-assistant,hexxter/home-assistant,Duoxilian/home-assistant,hmronline/home-assistant,JshWright/home-assistant,MartinHjelmare/home-assistant,alexmogavero/home-assistant,qedi-r/home-assistant,dmeulen/home-assistant,fbradyirl/home-assistant,ct-23/home-assistant
|
"""Test Home Assistant yaml loader."""
import io
import unittest
+ import os
from homeassistant.util import yaml
class TestYaml(unittest.TestCase):
"""Test util.yaml loader."""
def test_simple_list(self):
"""Test simple list."""
conf = "config:\n - simple\n - list"
with io.StringIO(conf) as f:
doc = yaml.yaml.safe_load(f)
assert doc['config'] == ["simple", "list"]
def test_simple_dict(self):
"""Test simple dict."""
conf = "key: value"
with io.StringIO(conf) as f:
doc = yaml.yaml.safe_load(f)
assert doc['key'] == 'value'
def test_duplicate_key(self):
"""Test simple dict."""
conf = "key: thing1\nkey: thing2"
try:
with io.StringIO(conf) as f:
yaml.yaml.safe_load(f)
except Exception:
pass
else:
assert 0
+ def test_enviroment_variable(self):
+ """Test config file with enviroment variable."""
+ os.environ["PASSWORD"] = "secret_password"
+ conf = "password: !env_var PASSWORD"
+ with io.StringIO(conf) as f:
+ doc = yaml.yaml.safe_load(f)
+ assert doc['password'] == "secret_password"
+ del os.environ["PASSWORD"]
+
+ def test_invalid_enviroment_variable(self):
+ """Test config file with no enviroment variable sat."""
+ conf = "password: !env_var PASSWORD"
+ try:
+ with io.StringIO(conf) as f:
+ yaml.yaml.safe_load(f)
+ except Exception:
+ pass
+ else:
+ assert 0
+
|
Add test for yaml enviroment
|
## Code Before:
"""Test Home Assistant yaml loader."""
import io
import unittest
from homeassistant.util import yaml
class TestYaml(unittest.TestCase):
"""Test util.yaml loader."""
def test_simple_list(self):
"""Test simple list."""
conf = "config:\n - simple\n - list"
with io.StringIO(conf) as f:
doc = yaml.yaml.safe_load(f)
assert doc['config'] == ["simple", "list"]
def test_simple_dict(self):
"""Test simple dict."""
conf = "key: value"
with io.StringIO(conf) as f:
doc = yaml.yaml.safe_load(f)
assert doc['key'] == 'value'
def test_duplicate_key(self):
"""Test simple dict."""
conf = "key: thing1\nkey: thing2"
try:
with io.StringIO(conf) as f:
yaml.yaml.safe_load(f)
except Exception:
pass
else:
assert 0
## Instruction:
Add test for yaml enviroment
## Code After:
"""Test Home Assistant yaml loader."""
import io
import unittest
import os
from homeassistant.util import yaml
class TestYaml(unittest.TestCase):
"""Test util.yaml loader."""
def test_simple_list(self):
"""Test simple list."""
conf = "config:\n - simple\n - list"
with io.StringIO(conf) as f:
doc = yaml.yaml.safe_load(f)
assert doc['config'] == ["simple", "list"]
def test_simple_dict(self):
"""Test simple dict."""
conf = "key: value"
with io.StringIO(conf) as f:
doc = yaml.yaml.safe_load(f)
assert doc['key'] == 'value'
def test_duplicate_key(self):
"""Test simple dict."""
conf = "key: thing1\nkey: thing2"
try:
with io.StringIO(conf) as f:
yaml.yaml.safe_load(f)
except Exception:
pass
else:
assert 0
def test_enviroment_variable(self):
"""Test config file with enviroment variable."""
os.environ["PASSWORD"] = "secret_password"
conf = "password: !env_var PASSWORD"
with io.StringIO(conf) as f:
doc = yaml.yaml.safe_load(f)
assert doc['password'] == "secret_password"
del os.environ["PASSWORD"]
def test_invalid_enviroment_variable(self):
"""Test config file with no enviroment variable sat."""
conf = "password: !env_var PASSWORD"
try:
with io.StringIO(conf) as f:
yaml.yaml.safe_load(f)
except Exception:
pass
else:
assert 0
|
"""Test Home Assistant yaml loader."""
import io
import unittest
+ import os
from homeassistant.util import yaml
class TestYaml(unittest.TestCase):
"""Test util.yaml loader."""
def test_simple_list(self):
"""Test simple list."""
conf = "config:\n - simple\n - list"
with io.StringIO(conf) as f:
doc = yaml.yaml.safe_load(f)
assert doc['config'] == ["simple", "list"]
def test_simple_dict(self):
"""Test simple dict."""
conf = "key: value"
with io.StringIO(conf) as f:
doc = yaml.yaml.safe_load(f)
assert doc['key'] == 'value'
def test_duplicate_key(self):
"""Test simple dict."""
conf = "key: thing1\nkey: thing2"
try:
with io.StringIO(conf) as f:
yaml.yaml.safe_load(f)
except Exception:
pass
else:
assert 0
+
+ def test_enviroment_variable(self):
+ """Test config file with enviroment variable."""
+ os.environ["PASSWORD"] = "secret_password"
+ conf = "password: !env_var PASSWORD"
+ with io.StringIO(conf) as f:
+ doc = yaml.yaml.safe_load(f)
+ assert doc['password'] == "secret_password"
+ del os.environ["PASSWORD"]
+
+ def test_invalid_enviroment_variable(self):
+ """Test config file with no enviroment variable sat."""
+ conf = "password: !env_var PASSWORD"
+ try:
+ with io.StringIO(conf) as f:
+ yaml.yaml.safe_load(f)
+ except Exception:
+ pass
+ else:
+ assert 0
|
34a2b3a93bd96643d74fcb3c8d2f8db52d18253f
|
desubot.py
|
desubot.py
|
from motobot.irc_bot import IRCBot, IRCLevel
import desubot
import threading
def worker():
desubot.bot.run()
def main():
desubot.bot.load_plugins('plugins')
desubot.bot.join('#Moto-chan')
thread = threading.Thread(target=worker)
thread.start()
while True:
msg = input()
if msg.startswith(':'):
desubot.bot.load_plugins('plugins')
else:
desubot.bot.send(msg)
if __name__ == '__main__':
main()
else:
bot = IRCBot('desutest', 'irc.rizon.net', command_prefix='!')
|
from motobot.irc_bot import IRCBot, IRCLevel
import desubot
import threading
import traceback
def worker():
desubot.bot.run()
def main():
desubot.bot.load_plugins('plugins')
desubot.bot.join('#Moto-chan')
desubot.bot.join('#animu')
desubot.bot.join('#anime-planet.com')
thread = threading.Thread(target=worker)
thread.start()
while True:
try:
msg = input()
if msg.startswith(':'):
desubot.bot.load_plugins('plugins')
else:
desubot.bot.send(msg)
except:
traceback.print_exc()
if __name__ == '__main__':
main()
else:
bot = IRCBot('desubot', 'irc.rizon.net', command_prefix='!')
|
Make exception on reload not crash input
|
Make exception on reload not crash input
|
Python
|
mit
|
Motoko11/MotoBot
|
from motobot.irc_bot import IRCBot, IRCLevel
import desubot
import threading
+ import traceback
def worker():
desubot.bot.run()
def main():
desubot.bot.load_plugins('plugins')
desubot.bot.join('#Moto-chan')
+ desubot.bot.join('#animu')
+ desubot.bot.join('#anime-planet.com')
thread = threading.Thread(target=worker)
thread.start()
while True:
+ try:
- msg = input()
+ msg = input()
- if msg.startswith(':'):
+ if msg.startswith(':'):
- desubot.bot.load_plugins('plugins')
+ desubot.bot.load_plugins('plugins')
- else:
+ else:
- desubot.bot.send(msg)
+ desubot.bot.send(msg)
+ except:
+ traceback.print_exc()
if __name__ == '__main__':
main()
else:
- bot = IRCBot('desutest', 'irc.rizon.net', command_prefix='!')
+ bot = IRCBot('desubot', 'irc.rizon.net', command_prefix='!')
|
Make exception on reload not crash input
|
## Code Before:
from motobot.irc_bot import IRCBot, IRCLevel
import desubot
import threading
def worker():
desubot.bot.run()
def main():
desubot.bot.load_plugins('plugins')
desubot.bot.join('#Moto-chan')
thread = threading.Thread(target=worker)
thread.start()
while True:
msg = input()
if msg.startswith(':'):
desubot.bot.load_plugins('plugins')
else:
desubot.bot.send(msg)
if __name__ == '__main__':
main()
else:
bot = IRCBot('desutest', 'irc.rizon.net', command_prefix='!')
## Instruction:
Make exception on reload not crash input
## Code After:
from motobot.irc_bot import IRCBot, IRCLevel
import desubot
import threading
import traceback
def worker():
desubot.bot.run()
def main():
desubot.bot.load_plugins('plugins')
desubot.bot.join('#Moto-chan')
desubot.bot.join('#animu')
desubot.bot.join('#anime-planet.com')
thread = threading.Thread(target=worker)
thread.start()
while True:
try:
msg = input()
if msg.startswith(':'):
desubot.bot.load_plugins('plugins')
else:
desubot.bot.send(msg)
except:
traceback.print_exc()
if __name__ == '__main__':
main()
else:
bot = IRCBot('desubot', 'irc.rizon.net', command_prefix='!')
|
from motobot.irc_bot import IRCBot, IRCLevel
import desubot
import threading
+ import traceback
def worker():
desubot.bot.run()
def main():
desubot.bot.load_plugins('plugins')
desubot.bot.join('#Moto-chan')
+ desubot.bot.join('#animu')
+ desubot.bot.join('#anime-planet.com')
thread = threading.Thread(target=worker)
thread.start()
while True:
+ try:
- msg = input()
+ msg = input()
? ++++
- if msg.startswith(':'):
+ if msg.startswith(':'):
? ++++
- desubot.bot.load_plugins('plugins')
+ desubot.bot.load_plugins('plugins')
? ++++
- else:
+ else:
? ++++
- desubot.bot.send(msg)
+ desubot.bot.send(msg)
? ++++
+ except:
+ traceback.print_exc()
if __name__ == '__main__':
main()
else:
- bot = IRCBot('desutest', 'irc.rizon.net', command_prefix='!')
? ^^^
+ bot = IRCBot('desubot', 'irc.rizon.net', command_prefix='!')
? ^^
|
2404e11c06418cc72b1a486d7d62d9d719cfe263
|
regression/tests/studio/test_studio_login.py
|
regression/tests/studio/test_studio_login.py
|
import os
from flaky import flaky
from bok_choy.web_app_test import WebAppTest
from regression.pages.studio.studio_home import DashboardPageExtended
from regression.pages.studio.login_studio import StudioLogin
from regression.pages.studio.logout_studio import StudioLogout
class StudioUserLogin(WebAppTest):
"""
Test for logging in and out to Studio
"""
DEMO_COURSE_USER = os.environ.get('USER_LOGIN_EMAIL')
DEMO_COURSE_PASSWORD = os.environ.get('USER_LOGIN_PASSWORD')
def setUp(self):
"""
Initialize the page object
"""
super(StudioUserLogin, self).setUp()
self.studio_login_page = StudioLogin(self.browser)
self.studio_home_page = DashboardPageExtended(self.browser)
self.studio_logout_page = StudioLogout(self.browser)
@flaky # TODO: See https://openedx.atlassian.net/browse/LT-65
def test_studio_login_logout(self):
"""
Verifies that user can login and logout successfully
"""
self.studio_login_page.visit()
self.studio_login_page.login(self.DEMO_COURSE_USER,
self.DEMO_COURSE_PASSWORD)
self.studio_home_page.wait_for_page()
self.studio_home_page.click_logout_button()
self.studio_logout_page.wait_for_page()
|
import os
from bok_choy.web_app_test import WebAppTest
from regression.pages.studio.studio_home import DashboardPageExtended
from regression.pages.studio.login_studio import StudioLogin
from regression.pages.studio.logout_studio import StudioLogout
class StudioUserLogin(WebAppTest):
"""
Test for logging in and out to Studio
"""
DEMO_COURSE_USER = os.environ.get('USER_LOGIN_EMAIL')
DEMO_COURSE_PASSWORD = os.environ.get('USER_LOGIN_PASSWORD')
def setUp(self):
"""
Initialize the page object
"""
super(StudioUserLogin, self).setUp()
self.studio_login_page = StudioLogin(self.browser)
self.studio_home_page = DashboardPageExtended(self.browser)
self.studio_logout_page = StudioLogout(self.browser)
def test_studio_login_logout(self):
"""
Verifies that user can login and logout successfully
"""
self.studio_login_page.visit()
self.studio_login_page.login(self.DEMO_COURSE_USER,
self.DEMO_COURSE_PASSWORD)
self.studio_home_page.wait_for_page()
self.studio_home_page.click_logout_button()
self.studio_logout_page.wait_for_page()
|
Fix flaky logout on FF 45
|
Fix flaky logout on FF 45
|
Python
|
agpl-3.0
|
edx/edx-e2e-tests,edx/edx-e2e-tests
|
import os
- from flaky import flaky
+
from bok_choy.web_app_test import WebAppTest
+
from regression.pages.studio.studio_home import DashboardPageExtended
from regression.pages.studio.login_studio import StudioLogin
from regression.pages.studio.logout_studio import StudioLogout
class StudioUserLogin(WebAppTest):
"""
Test for logging in and out to Studio
"""
DEMO_COURSE_USER = os.environ.get('USER_LOGIN_EMAIL')
DEMO_COURSE_PASSWORD = os.environ.get('USER_LOGIN_PASSWORD')
def setUp(self):
"""
Initialize the page object
"""
super(StudioUserLogin, self).setUp()
self.studio_login_page = StudioLogin(self.browser)
self.studio_home_page = DashboardPageExtended(self.browser)
self.studio_logout_page = StudioLogout(self.browser)
- @flaky # TODO: See https://openedx.atlassian.net/browse/LT-65
def test_studio_login_logout(self):
"""
Verifies that user can login and logout successfully
"""
self.studio_login_page.visit()
self.studio_login_page.login(self.DEMO_COURSE_USER,
self.DEMO_COURSE_PASSWORD)
self.studio_home_page.wait_for_page()
self.studio_home_page.click_logout_button()
self.studio_logout_page.wait_for_page()
|
Fix flaky logout on FF 45
|
## Code Before:
import os
from flaky import flaky
from bok_choy.web_app_test import WebAppTest
from regression.pages.studio.studio_home import DashboardPageExtended
from regression.pages.studio.login_studio import StudioLogin
from regression.pages.studio.logout_studio import StudioLogout
class StudioUserLogin(WebAppTest):
"""
Test for logging in and out to Studio
"""
DEMO_COURSE_USER = os.environ.get('USER_LOGIN_EMAIL')
DEMO_COURSE_PASSWORD = os.environ.get('USER_LOGIN_PASSWORD')
def setUp(self):
"""
Initialize the page object
"""
super(StudioUserLogin, self).setUp()
self.studio_login_page = StudioLogin(self.browser)
self.studio_home_page = DashboardPageExtended(self.browser)
self.studio_logout_page = StudioLogout(self.browser)
@flaky # TODO: See https://openedx.atlassian.net/browse/LT-65
def test_studio_login_logout(self):
"""
Verifies that user can login and logout successfully
"""
self.studio_login_page.visit()
self.studio_login_page.login(self.DEMO_COURSE_USER,
self.DEMO_COURSE_PASSWORD)
self.studio_home_page.wait_for_page()
self.studio_home_page.click_logout_button()
self.studio_logout_page.wait_for_page()
## Instruction:
Fix flaky logout on FF 45
## Code After:
import os
from bok_choy.web_app_test import WebAppTest
from regression.pages.studio.studio_home import DashboardPageExtended
from regression.pages.studio.login_studio import StudioLogin
from regression.pages.studio.logout_studio import StudioLogout
class StudioUserLogin(WebAppTest):
"""
Test for logging in and out to Studio
"""
DEMO_COURSE_USER = os.environ.get('USER_LOGIN_EMAIL')
DEMO_COURSE_PASSWORD = os.environ.get('USER_LOGIN_PASSWORD')
def setUp(self):
"""
Initialize the page object
"""
super(StudioUserLogin, self).setUp()
self.studio_login_page = StudioLogin(self.browser)
self.studio_home_page = DashboardPageExtended(self.browser)
self.studio_logout_page = StudioLogout(self.browser)
def test_studio_login_logout(self):
"""
Verifies that user can login and logout successfully
"""
self.studio_login_page.visit()
self.studio_login_page.login(self.DEMO_COURSE_USER,
self.DEMO_COURSE_PASSWORD)
self.studio_home_page.wait_for_page()
self.studio_home_page.click_logout_button()
self.studio_logout_page.wait_for_page()
|
import os
- from flaky import flaky
+
from bok_choy.web_app_test import WebAppTest
+
from regression.pages.studio.studio_home import DashboardPageExtended
from regression.pages.studio.login_studio import StudioLogin
from regression.pages.studio.logout_studio import StudioLogout
class StudioUserLogin(WebAppTest):
"""
Test for logging in and out to Studio
"""
DEMO_COURSE_USER = os.environ.get('USER_LOGIN_EMAIL')
DEMO_COURSE_PASSWORD = os.environ.get('USER_LOGIN_PASSWORD')
def setUp(self):
"""
Initialize the page object
"""
super(StudioUserLogin, self).setUp()
self.studio_login_page = StudioLogin(self.browser)
self.studio_home_page = DashboardPageExtended(self.browser)
self.studio_logout_page = StudioLogout(self.browser)
- @flaky # TODO: See https://openedx.atlassian.net/browse/LT-65
def test_studio_login_logout(self):
"""
Verifies that user can login and logout successfully
"""
self.studio_login_page.visit()
self.studio_login_page.login(self.DEMO_COURSE_USER,
self.DEMO_COURSE_PASSWORD)
self.studio_home_page.wait_for_page()
self.studio_home_page.click_logout_button()
self.studio_logout_page.wait_for_page()
|
5fd3bed281018a556cfd6305670317bf5acb2a16
|
tests/test_auth.py
|
tests/test_auth.py
|
import random
import unittest
from .config import *
from tweepy import API, OAuthHandler
class TweepyAuthTests(unittest.TestCase):
def testoauth(self):
auth = OAuthHandler(oauth_consumer_key, oauth_consumer_secret)
# test getting access token
auth_url = auth.get_authorization_url()
print('Please authorize: ' + auth_url)
verifier = input('PIN: ').strip()
self.assertTrue(len(verifier) > 0)
access_token = auth.get_access_token(verifier)
self.assertTrue(access_token is not None)
# build api object test using oauth
api = API(auth)
s = api.update_status(f'test {random.randint(0, 1000)}')
api.destroy_status(s.id)
def testaccesstype(self):
auth = OAuthHandler(oauth_consumer_key, oauth_consumer_secret)
auth_url = auth.get_authorization_url(access_type='read')
print('Please open: ' + auth_url)
answer = input('Did Twitter only request read permissions? (y/n) ')
self.assertEqual('y', answer.lower())
|
import random
import unittest
from .config import *
from tweepy import API, OAuthHandler
class TweepyAuthTests(unittest.TestCase):
def testoauth(self):
auth = OAuthHandler(consumer_key, consumer_secret)
# test getting access token
auth_url = auth.get_authorization_url()
print('Please authorize: ' + auth_url)
verifier = input('PIN: ').strip()
self.assertTrue(len(verifier) > 0)
access_token = auth.get_access_token(verifier)
self.assertTrue(access_token is not None)
# build api object test using oauth
api = API(auth)
s = api.update_status(f'test {random.randint(0, 1000)}')
api.destroy_status(s.id)
def testaccesstype(self):
auth = OAuthHandler(consumer_key, consumer_secret)
auth_url = auth.get_authorization_url(access_type='read')
print('Please open: ' + auth_url)
answer = input('Did Twitter only request read permissions? (y/n) ')
self.assertEqual('y', answer.lower())
|
Update consumer key and secret usage in auth tests
|
Update consumer key and secret usage in auth tests
|
Python
|
mit
|
svven/tweepy,tweepy/tweepy
|
import random
import unittest
from .config import *
from tweepy import API, OAuthHandler
class TweepyAuthTests(unittest.TestCase):
def testoauth(self):
- auth = OAuthHandler(oauth_consumer_key, oauth_consumer_secret)
+ auth = OAuthHandler(consumer_key, consumer_secret)
# test getting access token
auth_url = auth.get_authorization_url()
print('Please authorize: ' + auth_url)
verifier = input('PIN: ').strip()
self.assertTrue(len(verifier) > 0)
access_token = auth.get_access_token(verifier)
self.assertTrue(access_token is not None)
# build api object test using oauth
api = API(auth)
s = api.update_status(f'test {random.randint(0, 1000)}')
api.destroy_status(s.id)
def testaccesstype(self):
- auth = OAuthHandler(oauth_consumer_key, oauth_consumer_secret)
+ auth = OAuthHandler(consumer_key, consumer_secret)
auth_url = auth.get_authorization_url(access_type='read')
print('Please open: ' + auth_url)
answer = input('Did Twitter only request read permissions? (y/n) ')
self.assertEqual('y', answer.lower())
|
Update consumer key and secret usage in auth tests
|
## Code Before:
import random
import unittest
from .config import *
from tweepy import API, OAuthHandler
class TweepyAuthTests(unittest.TestCase):
def testoauth(self):
auth = OAuthHandler(oauth_consumer_key, oauth_consumer_secret)
# test getting access token
auth_url = auth.get_authorization_url()
print('Please authorize: ' + auth_url)
verifier = input('PIN: ').strip()
self.assertTrue(len(verifier) > 0)
access_token = auth.get_access_token(verifier)
self.assertTrue(access_token is not None)
# build api object test using oauth
api = API(auth)
s = api.update_status(f'test {random.randint(0, 1000)}')
api.destroy_status(s.id)
def testaccesstype(self):
auth = OAuthHandler(oauth_consumer_key, oauth_consumer_secret)
auth_url = auth.get_authorization_url(access_type='read')
print('Please open: ' + auth_url)
answer = input('Did Twitter only request read permissions? (y/n) ')
self.assertEqual('y', answer.lower())
## Instruction:
Update consumer key and secret usage in auth tests
## Code After:
import random
import unittest
from .config import *
from tweepy import API, OAuthHandler
class TweepyAuthTests(unittest.TestCase):
def testoauth(self):
auth = OAuthHandler(consumer_key, consumer_secret)
# test getting access token
auth_url = auth.get_authorization_url()
print('Please authorize: ' + auth_url)
verifier = input('PIN: ').strip()
self.assertTrue(len(verifier) > 0)
access_token = auth.get_access_token(verifier)
self.assertTrue(access_token is not None)
# build api object test using oauth
api = API(auth)
s = api.update_status(f'test {random.randint(0, 1000)}')
api.destroy_status(s.id)
def testaccesstype(self):
auth = OAuthHandler(consumer_key, consumer_secret)
auth_url = auth.get_authorization_url(access_type='read')
print('Please open: ' + auth_url)
answer = input('Did Twitter only request read permissions? (y/n) ')
self.assertEqual('y', answer.lower())
|
import random
import unittest
from .config import *
from tweepy import API, OAuthHandler
class TweepyAuthTests(unittest.TestCase):
def testoauth(self):
- auth = OAuthHandler(oauth_consumer_key, oauth_consumer_secret)
? ------ ------
+ auth = OAuthHandler(consumer_key, consumer_secret)
# test getting access token
auth_url = auth.get_authorization_url()
print('Please authorize: ' + auth_url)
verifier = input('PIN: ').strip()
self.assertTrue(len(verifier) > 0)
access_token = auth.get_access_token(verifier)
self.assertTrue(access_token is not None)
# build api object test using oauth
api = API(auth)
s = api.update_status(f'test {random.randint(0, 1000)}')
api.destroy_status(s.id)
def testaccesstype(self):
- auth = OAuthHandler(oauth_consumer_key, oauth_consumer_secret)
? ------ ------
+ auth = OAuthHandler(consumer_key, consumer_secret)
auth_url = auth.get_authorization_url(access_type='read')
print('Please open: ' + auth_url)
answer = input('Did Twitter only request read permissions? (y/n) ')
self.assertEqual('y', answer.lower())
|
27e573d55b37869e09b8cf9809ea41e9b2ce1567
|
tests/data_test.py
|
tests/data_test.py
|
from pork.data import Data
from mock import Mock, patch
from StringIO import StringIO
patch.TEST_PREFIX = 'it'
class TestData:
def it_sets_and_gets_keys(self):
with patch("__builtin__.open", side_effect=IOError):
data = Data()
with patch("__builtin__.open"):
data.set('foo', 'bar')
assert data.get('foo') == 'bar'
def it_deletes_existing_keys(self):
with patch("__builtin__.open", side_effect=IOError):
data = Data()
with patch("__builtin__.open"):
data.set('foo', 'bar')
data.delete('foo')
assert data.get('foo') is None
def it_is_empty_if_there_are_no_keys(self):
with patch("__builtin__.open", side_effect=IOError):
data = Data()
assert data.is_empty()
def it_returns_the_data_dict(self):
with patch("__builtin__.open", side_effect=IOError):
data = Data()
data.set('foo', 'bar')
assert data.list() == { 'foo': 'bar' }
def it_fails_silently_if_it_cannot_save(self):
with patch("__builtin__.open", side_effect=IOError):
data = Data()
with patch("__builtin__.open", side_effect=ValueError):
data.set('foo', 'bar')
assert True
|
from pork.data import Data
from mock import Mock, patch, mock_open
from StringIO import StringIO
patch.TEST_PREFIX = 'it'
class TestData:
def it_loads_json_data_from_file(self):
with patch("__builtin__.open", mock_open(read_data='{"foo":"bar"}'),
create=True) as m:
data = Data()
assert data.get('foo') == 'bar'
def it_sets_and_gets_keys(self):
data = Data()
data.set('foo', 'bar')
assert data.get('foo') == 'bar'
def it_deletes_existing_keys(self):
data = Data()
data.set('foo', 'bar')
data.delete('foo')
assert data.get('foo') is None
def it_is_empty_if_there_are_no_keys(self):
data = Data()
assert data.is_empty()
def it_returns_the_data_dict(self):
data = Data()
data.set('foo', 'bar')
assert data.list() == { 'foo': 'bar' }
def it_fails_silently_if_it_cannot_save(self):
data = Data()
with patch("__builtin__.open", side_effect=ValueError):
data.set('foo', 'bar')
assert True
|
Use mock_open and remove unnecessary stubbing of open.
|
Use mock_open and remove unnecessary stubbing of open.
|
Python
|
mit
|
jimmycuadra/pork,jimmycuadra/pork
|
from pork.data import Data
- from mock import Mock, patch
+ from mock import Mock, patch, mock_open
from StringIO import StringIO
patch.TEST_PREFIX = 'it'
class TestData:
- def it_sets_and_gets_keys(self):
- with patch("__builtin__.open", side_effect=IOError):
+ def it_loads_json_data_from_file(self):
+ with patch("__builtin__.open", mock_open(read_data='{"foo":"bar"}'),
+ create=True) as m:
data = Data()
- with patch("__builtin__.open"):
- data.set('foo', 'bar')
assert data.get('foo') == 'bar'
+ def it_sets_and_gets_keys(self):
+ data = Data()
+ data.set('foo', 'bar')
+ assert data.get('foo') == 'bar'
+
def it_deletes_existing_keys(self):
- with patch("__builtin__.open", side_effect=IOError):
- data = Data()
+ data = Data()
- with patch("__builtin__.open"):
- data.set('foo', 'bar')
+ data.set('foo', 'bar')
- data.delete('foo')
+ data.delete('foo')
- assert data.get('foo') is None
+ assert data.get('foo') is None
def it_is_empty_if_there_are_no_keys(self):
- with patch("__builtin__.open", side_effect=IOError):
- data = Data()
+ data = Data()
assert data.is_empty()
def it_returns_the_data_dict(self):
- with patch("__builtin__.open", side_effect=IOError):
- data = Data()
+ data = Data()
data.set('foo', 'bar')
assert data.list() == { 'foo': 'bar' }
def it_fails_silently_if_it_cannot_save(self):
- with patch("__builtin__.open", side_effect=IOError):
- data = Data()
+ data = Data()
with patch("__builtin__.open", side_effect=ValueError):
data.set('foo', 'bar')
assert True
|
Use mock_open and remove unnecessary stubbing of open.
|
## Code Before:
from pork.data import Data
from mock import Mock, patch
from StringIO import StringIO
patch.TEST_PREFIX = 'it'
class TestData:
def it_sets_and_gets_keys(self):
with patch("__builtin__.open", side_effect=IOError):
data = Data()
with patch("__builtin__.open"):
data.set('foo', 'bar')
assert data.get('foo') == 'bar'
def it_deletes_existing_keys(self):
with patch("__builtin__.open", side_effect=IOError):
data = Data()
with patch("__builtin__.open"):
data.set('foo', 'bar')
data.delete('foo')
assert data.get('foo') is None
def it_is_empty_if_there_are_no_keys(self):
with patch("__builtin__.open", side_effect=IOError):
data = Data()
assert data.is_empty()
def it_returns_the_data_dict(self):
with patch("__builtin__.open", side_effect=IOError):
data = Data()
data.set('foo', 'bar')
assert data.list() == { 'foo': 'bar' }
def it_fails_silently_if_it_cannot_save(self):
with patch("__builtin__.open", side_effect=IOError):
data = Data()
with patch("__builtin__.open", side_effect=ValueError):
data.set('foo', 'bar')
assert True
## Instruction:
Use mock_open and remove unnecessary stubbing of open.
## Code After:
from pork.data import Data
from mock import Mock, patch, mock_open
from StringIO import StringIO
patch.TEST_PREFIX = 'it'
class TestData:
def it_loads_json_data_from_file(self):
with patch("__builtin__.open", mock_open(read_data='{"foo":"bar"}'),
create=True) as m:
data = Data()
assert data.get('foo') == 'bar'
def it_sets_and_gets_keys(self):
data = Data()
data.set('foo', 'bar')
assert data.get('foo') == 'bar'
def it_deletes_existing_keys(self):
data = Data()
data.set('foo', 'bar')
data.delete('foo')
assert data.get('foo') is None
def it_is_empty_if_there_are_no_keys(self):
data = Data()
assert data.is_empty()
def it_returns_the_data_dict(self):
data = Data()
data.set('foo', 'bar')
assert data.list() == { 'foo': 'bar' }
def it_fails_silently_if_it_cannot_save(self):
data = Data()
with patch("__builtin__.open", side_effect=ValueError):
data.set('foo', 'bar')
assert True
|
from pork.data import Data
- from mock import Mock, patch
+ from mock import Mock, patch, mock_open
? +++++++++++
from StringIO import StringIO
patch.TEST_PREFIX = 'it'
class TestData:
- def it_sets_and_gets_keys(self):
- with patch("__builtin__.open", side_effect=IOError):
+ def it_loads_json_data_from_file(self):
+ with patch("__builtin__.open", mock_open(read_data='{"foo":"bar"}'),
+ create=True) as m:
data = Data()
- with patch("__builtin__.open"):
- data.set('foo', 'bar')
assert data.get('foo') == 'bar'
+ def it_sets_and_gets_keys(self):
+ data = Data()
+ data.set('foo', 'bar')
+ assert data.get('foo') == 'bar'
+
def it_deletes_existing_keys(self):
- with patch("__builtin__.open", side_effect=IOError):
- data = Data()
? ----
+ data = Data()
- with patch("__builtin__.open"):
- data.set('foo', 'bar')
? ----
+ data.set('foo', 'bar')
- data.delete('foo')
? ----
+ data.delete('foo')
- assert data.get('foo') is None
? ----
+ assert data.get('foo') is None
def it_is_empty_if_there_are_no_keys(self):
- with patch("__builtin__.open", side_effect=IOError):
- data = Data()
? ----
+ data = Data()
assert data.is_empty()
def it_returns_the_data_dict(self):
- with patch("__builtin__.open", side_effect=IOError):
- data = Data()
? ----
+ data = Data()
data.set('foo', 'bar')
assert data.list() == { 'foo': 'bar' }
def it_fails_silently_if_it_cannot_save(self):
- with patch("__builtin__.open", side_effect=IOError):
- data = Data()
? ----
+ data = Data()
with patch("__builtin__.open", side_effect=ValueError):
data.set('foo', 'bar')
assert True
|
d8d01d89710cd1d752809b8cd91d934092e99adf
|
pythonforandroid/recipes/ruamel.yaml/__init__.py
|
pythonforandroid/recipes/ruamel.yaml/__init__.py
|
from pythonforandroid.toolchain import PythonRecipe
class RuamelYamlRecipe(PythonRecipe):
version = '0.14.5'
url = 'https://pypi.python.org/packages/5c/13/c120a06b3add0f9763ca9190e5f6edb9faf9d34b158dd3cff7cc9097be03/ruamel.yaml-{version}.tar.gz'
depends = [ ('python2', 'python3crystax') ]
site_packages_name = 'ruamel'
call_hostpython_via_targetpython = False
patches = ['disable-pip-req.patch']
recipe = RuamelYamlRecipe()
|
from pythonforandroid.recipe import PythonRecipe
class RuamelYamlRecipe(PythonRecipe):
version = '0.15.77'
url = 'https://pypi.python.org/packages/source/r/ruamel.yaml/ruamel.yaml-{version}.tar.gz'
depends = [('python2', 'python3crystax'), 'setuptools']
site_packages_name = 'ruamel'
call_hostpython_via_targetpython = False
patches = ['disable-pip-req.patch']
recipe = RuamelYamlRecipe()
|
Update to last version, fixes import and deps
|
Update to last version, fixes import and deps
|
Python
|
mit
|
kronenpj/python-for-android,kronenpj/python-for-android,germn/python-for-android,kivy/python-for-android,kronenpj/python-for-android,kivy/python-for-android,rnixx/python-for-android,rnixx/python-for-android,germn/python-for-android,kivy/python-for-android,PKRoma/python-for-android,germn/python-for-android,germn/python-for-android,kivy/python-for-android,PKRoma/python-for-android,rnixx/python-for-android,germn/python-for-android,germn/python-for-android,PKRoma/python-for-android,rnixx/python-for-android,rnixx/python-for-android,PKRoma/python-for-android,kivy/python-for-android,kronenpj/python-for-android,PKRoma/python-for-android,kronenpj/python-for-android,rnixx/python-for-android
|
- from pythonforandroid.toolchain import PythonRecipe
+ from pythonforandroid.recipe import PythonRecipe
class RuamelYamlRecipe(PythonRecipe):
- version = '0.14.5'
+ version = '0.15.77'
+ url = 'https://pypi.python.org/packages/source/r/ruamel.yaml/ruamel.yaml-{version}.tar.gz'
- url = 'https://pypi.python.org/packages/5c/13/c120a06b3add0f9763ca9190e5f6edb9faf9d34b158dd3cff7cc9097be03/ruamel.yaml-{version}.tar.gz'
-
- depends = [ ('python2', 'python3crystax') ]
+ depends = [('python2', 'python3crystax'), 'setuptools']
site_packages_name = 'ruamel'
call_hostpython_via_targetpython = False
+ patches = ['disable-pip-req.patch']
- patches = ['disable-pip-req.patch']
recipe = RuamelYamlRecipe()
|
Update to last version, fixes import and deps
|
## Code Before:
from pythonforandroid.toolchain import PythonRecipe
class RuamelYamlRecipe(PythonRecipe):
version = '0.14.5'
url = 'https://pypi.python.org/packages/5c/13/c120a06b3add0f9763ca9190e5f6edb9faf9d34b158dd3cff7cc9097be03/ruamel.yaml-{version}.tar.gz'
depends = [ ('python2', 'python3crystax') ]
site_packages_name = 'ruamel'
call_hostpython_via_targetpython = False
patches = ['disable-pip-req.patch']
recipe = RuamelYamlRecipe()
## Instruction:
Update to last version, fixes import and deps
## Code After:
from pythonforandroid.recipe import PythonRecipe
class RuamelYamlRecipe(PythonRecipe):
version = '0.15.77'
url = 'https://pypi.python.org/packages/source/r/ruamel.yaml/ruamel.yaml-{version}.tar.gz'
depends = [('python2', 'python3crystax'), 'setuptools']
site_packages_name = 'ruamel'
call_hostpython_via_targetpython = False
patches = ['disable-pip-req.patch']
recipe = RuamelYamlRecipe()
|
- from pythonforandroid.toolchain import PythonRecipe
? ^^^^ -- ^
+ from pythonforandroid.recipe import PythonRecipe
? ^^ ^^
class RuamelYamlRecipe(PythonRecipe):
- version = '0.14.5'
? ^ ^
+ version = '0.15.77'
? ^ ^^
+ url = 'https://pypi.python.org/packages/source/r/ruamel.yaml/ruamel.yaml-{version}.tar.gz'
- url = 'https://pypi.python.org/packages/5c/13/c120a06b3add0f9763ca9190e5f6edb9faf9d34b158dd3cff7cc9097be03/ruamel.yaml-{version}.tar.gz'
-
- depends = [ ('python2', 'python3crystax') ]
? -
+ depends = [('python2', 'python3crystax'), 'setuptools']
? + ++++++++++++
site_packages_name = 'ruamel'
call_hostpython_via_targetpython = False
-
patches = ['disable-pip-req.patch']
+
recipe = RuamelYamlRecipe()
|
b43e06dd5a80814e15ce20f50d683f0daaa19a93
|
addons/hr/models/hr_employee_base.py
|
addons/hr/models/hr_employee_base.py
|
from odoo import fields, models
class HrEmployeeBase(models.AbstractModel):
_name = "hr.employee.base"
_description = "Basic Employee"
_order = 'name'
name = fields.Char()
active = fields.Boolean("Active")
department_id = fields.Many2one('hr.department', 'Department')
job_id = fields.Many2one('hr.job', 'Job Position')
job_title = fields.Char("Job Title")
company_id = fields.Many2one('res.company', 'Company')
address_id = fields.Many2one('res.partner', 'Work Address')
work_phone = fields.Char('Work Phone')
mobile_phone = fields.Char('Work Mobile')
work_email = fields.Char('Work Email')
work_location = fields.Char('Work Location')
user_id = fields.Many2one('res.users')
resource_id = fields.Many2one('resource.resource')
resource_calendar_id = fields.Many2one('resource.calendar')
|
from odoo import fields, models
class HrEmployeeBase(models.AbstractModel):
_name = "hr.employee.base"
_description = "Basic Employee"
_order = 'name'
name = fields.Char()
active = fields.Boolean("Active")
color = fields.Integer('Color Index', default=0)
department_id = fields.Many2one('hr.department', 'Department')
job_id = fields.Many2one('hr.job', 'Job Position')
job_title = fields.Char("Job Title")
company_id = fields.Many2one('res.company', 'Company')
address_id = fields.Many2one('res.partner', 'Work Address')
work_phone = fields.Char('Work Phone')
mobile_phone = fields.Char('Work Mobile')
work_email = fields.Char('Work Email')
work_location = fields.Char('Work Location')
user_id = fields.Many2one('res.users')
resource_id = fields.Many2one('resource.resource')
resource_calendar_id = fields.Many2one('resource.calendar')
|
Add the color field to public employee
|
[FIX] hr: Add the color field to public employee
The color field is necessary to be able to display some fields
(many2many_tags) and used in the kanban views
closes odoo/odoo#35216
Signed-off-by: Yannick Tivisse (yti) <[email protected]>
closes odoo/odoo#35462
Signed-off-by: Romain Libert (rli) <[email protected]>
|
Python
|
agpl-3.0
|
ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo
|
from odoo import fields, models
class HrEmployeeBase(models.AbstractModel):
_name = "hr.employee.base"
_description = "Basic Employee"
_order = 'name'
name = fields.Char()
active = fields.Boolean("Active")
+ color = fields.Integer('Color Index', default=0)
department_id = fields.Many2one('hr.department', 'Department')
job_id = fields.Many2one('hr.job', 'Job Position')
job_title = fields.Char("Job Title")
company_id = fields.Many2one('res.company', 'Company')
address_id = fields.Many2one('res.partner', 'Work Address')
work_phone = fields.Char('Work Phone')
mobile_phone = fields.Char('Work Mobile')
work_email = fields.Char('Work Email')
work_location = fields.Char('Work Location')
user_id = fields.Many2one('res.users')
resource_id = fields.Many2one('resource.resource')
resource_calendar_id = fields.Many2one('resource.calendar')
|
Add the color field to public employee
|
## Code Before:
from odoo import fields, models
class HrEmployeeBase(models.AbstractModel):
_name = "hr.employee.base"
_description = "Basic Employee"
_order = 'name'
name = fields.Char()
active = fields.Boolean("Active")
department_id = fields.Many2one('hr.department', 'Department')
job_id = fields.Many2one('hr.job', 'Job Position')
job_title = fields.Char("Job Title")
company_id = fields.Many2one('res.company', 'Company')
address_id = fields.Many2one('res.partner', 'Work Address')
work_phone = fields.Char('Work Phone')
mobile_phone = fields.Char('Work Mobile')
work_email = fields.Char('Work Email')
work_location = fields.Char('Work Location')
user_id = fields.Many2one('res.users')
resource_id = fields.Many2one('resource.resource')
resource_calendar_id = fields.Many2one('resource.calendar')
## Instruction:
Add the color field to public employee
## Code After:
from odoo import fields, models
class HrEmployeeBase(models.AbstractModel):
_name = "hr.employee.base"
_description = "Basic Employee"
_order = 'name'
name = fields.Char()
active = fields.Boolean("Active")
color = fields.Integer('Color Index', default=0)
department_id = fields.Many2one('hr.department', 'Department')
job_id = fields.Many2one('hr.job', 'Job Position')
job_title = fields.Char("Job Title")
company_id = fields.Many2one('res.company', 'Company')
address_id = fields.Many2one('res.partner', 'Work Address')
work_phone = fields.Char('Work Phone')
mobile_phone = fields.Char('Work Mobile')
work_email = fields.Char('Work Email')
work_location = fields.Char('Work Location')
user_id = fields.Many2one('res.users')
resource_id = fields.Many2one('resource.resource')
resource_calendar_id = fields.Many2one('resource.calendar')
|
from odoo import fields, models
class HrEmployeeBase(models.AbstractModel):
_name = "hr.employee.base"
_description = "Basic Employee"
_order = 'name'
name = fields.Char()
active = fields.Boolean("Active")
+ color = fields.Integer('Color Index', default=0)
department_id = fields.Many2one('hr.department', 'Department')
job_id = fields.Many2one('hr.job', 'Job Position')
job_title = fields.Char("Job Title")
company_id = fields.Many2one('res.company', 'Company')
address_id = fields.Many2one('res.partner', 'Work Address')
work_phone = fields.Char('Work Phone')
mobile_phone = fields.Char('Work Mobile')
work_email = fields.Char('Work Email')
work_location = fields.Char('Work Location')
user_id = fields.Many2one('res.users')
resource_id = fields.Many2one('resource.resource')
resource_calendar_id = fields.Many2one('resource.calendar')
|
4076fb322814848d802d1f925d163e90b3d629a9
|
selenium_testcase/testcases/forms.py
|
selenium_testcase/testcases/forms.py
|
from __future__ import absolute_import
from selenium.webdriver.common.by import By
from .utils import wait_for
class FormTestMixin:
# default search element
form_search_list = (
(By.ID, '{}',),
(By.NAME, '{}',),
(By.XPATH, '//form[@action="{}"]',),
(By.XPATH, '//form[@name="{}"]',),
(By.XPATH, '//form/*',),
)
@wait_for
def get_form(self, *args, **kwargs):
""" Return form element or None. """
return self.find_element(
self.form_search_list, *args, **kwargs)
input_search_list = (
(By.ID, '{}',),
(By.NAME, '{}',),
)
@wait_for
def set_input(self, field, value, **kwargs):
input = self.find_element(
self.input_search_list, field, **kwargs)
input.clear()
input.send_keys(value)
return input
|
from __future__ import absolute_import
from selenium.webdriver.common.by import By
from .utils import wait_for
class FormTestMixin:
# default search element
form_search_list = (
(By.ID, '{}',),
(By.NAME, '{}',),
(By.XPATH, '//form[@action="{}"]',),
(By.XPATH, '//form[@name="{}"]',),
(By.XPATH, '//form',),
)
@wait_for
def get_form(self, *args, **kwargs):
""" Return form element or None. """
return self.find_element(
self.form_search_list, *args, **kwargs)
input_search_list = (
(By.ID, '{}',),
(By.NAME, '{}',),
)
@wait_for
def get_input(self, field, **kwargs):
""" Return matching input field. """
return self.find_element(
self.input_search_list, field, **kwargs)
def set_input(self, field, value, **kwargs):
""" Clear the field and enter value. """
element = self.get_input(field, **kwargs)
element.clear()
element.send_keys(value)
return element
|
Split get_input from set_input in FormTestMixin.
|
Split get_input from set_input in FormTestMixin.
In order to reduce side-effects, this commit moves the @wait_for to
a get_input method and set_input operates immediately.
|
Python
|
bsd-3-clause
|
nimbis/django-selenium-testcase,nimbis/django-selenium-testcase
|
from __future__ import absolute_import
from selenium.webdriver.common.by import By
from .utils import wait_for
class FormTestMixin:
# default search element
form_search_list = (
(By.ID, '{}',),
(By.NAME, '{}',),
(By.XPATH, '//form[@action="{}"]',),
(By.XPATH, '//form[@name="{}"]',),
- (By.XPATH, '//form/*',),
+ (By.XPATH, '//form',),
)
@wait_for
def get_form(self, *args, **kwargs):
""" Return form element or None. """
return self.find_element(
self.form_search_list, *args, **kwargs)
input_search_list = (
(By.ID, '{}',),
(By.NAME, '{}',),
)
@wait_for
+ def get_input(self, field, **kwargs):
+ """ Return matching input field. """
+ return self.find_element(
+ self.input_search_list, field, **kwargs)
+
def set_input(self, field, value, **kwargs):
- input = self.find_element(
- self.input_search_list, field, **kwargs)
+ """ Clear the field and enter value. """
+ element = self.get_input(field, **kwargs)
- input.clear()
+ element.clear()
- input.send_keys(value)
+ element.send_keys(value)
- return input
+ return element
|
Split get_input from set_input in FormTestMixin.
|
## Code Before:
from __future__ import absolute_import
from selenium.webdriver.common.by import By
from .utils import wait_for
class FormTestMixin:
# default search element
form_search_list = (
(By.ID, '{}',),
(By.NAME, '{}',),
(By.XPATH, '//form[@action="{}"]',),
(By.XPATH, '//form[@name="{}"]',),
(By.XPATH, '//form/*',),
)
@wait_for
def get_form(self, *args, **kwargs):
""" Return form element or None. """
return self.find_element(
self.form_search_list, *args, **kwargs)
input_search_list = (
(By.ID, '{}',),
(By.NAME, '{}',),
)
@wait_for
def set_input(self, field, value, **kwargs):
input = self.find_element(
self.input_search_list, field, **kwargs)
input.clear()
input.send_keys(value)
return input
## Instruction:
Split get_input from set_input in FormTestMixin.
## Code After:
from __future__ import absolute_import
from selenium.webdriver.common.by import By
from .utils import wait_for
class FormTestMixin:
# default search element
form_search_list = (
(By.ID, '{}',),
(By.NAME, '{}',),
(By.XPATH, '//form[@action="{}"]',),
(By.XPATH, '//form[@name="{}"]',),
(By.XPATH, '//form',),
)
@wait_for
def get_form(self, *args, **kwargs):
""" Return form element or None. """
return self.find_element(
self.form_search_list, *args, **kwargs)
input_search_list = (
(By.ID, '{}',),
(By.NAME, '{}',),
)
@wait_for
def get_input(self, field, **kwargs):
""" Return matching input field. """
return self.find_element(
self.input_search_list, field, **kwargs)
def set_input(self, field, value, **kwargs):
""" Clear the field and enter value. """
element = self.get_input(field, **kwargs)
element.clear()
element.send_keys(value)
return element
|
from __future__ import absolute_import
from selenium.webdriver.common.by import By
from .utils import wait_for
class FormTestMixin:
# default search element
form_search_list = (
(By.ID, '{}',),
(By.NAME, '{}',),
(By.XPATH, '//form[@action="{}"]',),
(By.XPATH, '//form[@name="{}"]',),
- (By.XPATH, '//form/*',),
? --
+ (By.XPATH, '//form',),
)
@wait_for
def get_form(self, *args, **kwargs):
""" Return form element or None. """
return self.find_element(
self.form_search_list, *args, **kwargs)
input_search_list = (
(By.ID, '{}',),
(By.NAME, '{}',),
)
@wait_for
+ def get_input(self, field, **kwargs):
+ """ Return matching input field. """
+ return self.find_element(
+ self.input_search_list, field, **kwargs)
+
def set_input(self, field, value, **kwargs):
- input = self.find_element(
- self.input_search_list, field, **kwargs)
+ """ Clear the field and enter value. """
+ element = self.get_input(field, **kwargs)
- input.clear()
? ^ --
+ element.clear()
? ^^^^^
- input.send_keys(value)
? ^ --
+ element.send_keys(value)
? ^^^^^
- return input
? ^ --
+ return element
? ^^^^^
|
5ddaf8f653df98752ca67b8bfec9b8adfcf168b1
|
pgcli/pgtoolbar.py
|
pgcli/pgtoolbar.py
|
from prompt_toolkit.layout.toolbars import Toolbar
from prompt_toolkit.layout.utils import TokenList
from pygments.token import Token
class PGToolbar(Toolbar):
def __init__(self, token=None):
token = token or Token.Toolbar.Status
super(self.__class__, self).__init__(token=token)
def get_tokens(self, cli, width):
result = TokenList()
result.append((self.token, ' '))
if cli.current_buffer.completer.smart_completion:
result.append((self.token.On, '[F2] Smart Completion: ON '))
else:
result.append((self.token.Off, '[F2] Smart Completion: OFF '))
if cli.current_buffer.always_multiline:
result.append((self.token.On, '[F3] Multiline: ON'))
else:
result.append((self.token.Off, '[F3] Multiline: OFF'))
if cli.current_buffer.always_multiline:
result.append((self.token,
' (Semi-colon [;] will end the line)'))
return result
|
from prompt_toolkit.layout.toolbars import Toolbar
from prompt_toolkit.layout.utils import TokenList
from pygments.token import Token
class PGToolbar(Toolbar):
def __init__(self, token=None):
token = token or Token.Toolbar.Status
super(self.__class__, self).__init__(token=token)
def get_tokens(self, cli, width):
result = TokenList()
result.append((self.token, ' '))
if cli.buffers['default'].completer.smart_completion:
result.append((self.token.On, '[F2] Smart Completion: ON '))
else:
result.append((self.token.Off, '[F2] Smart Completion: OFF '))
if cli.buffers['default'].always_multiline:
result.append((self.token.On, '[F3] Multiline: ON'))
else:
result.append((self.token.Off, '[F3] Multiline: OFF'))
if cli.buffers['default'].always_multiline:
result.append((self.token,
' (Semi-colon [;] will end the line)'))
return result
|
Use the default buffer instead of current_buffer.
|
Use the default buffer instead of current_buffer.
|
Python
|
bsd-3-clause
|
nosun/pgcli,TamasNo1/pgcli,d33tah/pgcli,MattOates/pgcli,suzukaze/pgcli,zhiyuanshi/pgcli,dbcli/vcli,bitemyapp/pgcli,janusnic/pgcli,thedrow/pgcli,dbcli/pgcli,bitemyapp/pgcli,n-someya/pgcli,koljonen/pgcli,johshoff/pgcli,d33tah/pgcli,koljonen/pgcli,lk1ngaa7/pgcli,darikg/pgcli,w4ngyi/pgcli,j-bennet/pgcli,MattOates/pgcli,darikg/pgcli,w4ngyi/pgcli,dbcli/vcli,TamasNo1/pgcli,joewalnes/pgcli,j-bennet/pgcli,nosun/pgcli,yx91490/pgcli,johshoff/pgcli,n-someya/pgcli,janusnic/pgcli,zhiyuanshi/pgcli,dbcli/pgcli,thedrow/pgcli,bitmonk/pgcli,joewalnes/pgcli,lk1ngaa7/pgcli,yx91490/pgcli,bitmonk/pgcli,suzukaze/pgcli
|
from prompt_toolkit.layout.toolbars import Toolbar
from prompt_toolkit.layout.utils import TokenList
from pygments.token import Token
class PGToolbar(Toolbar):
def __init__(self, token=None):
token = token or Token.Toolbar.Status
super(self.__class__, self).__init__(token=token)
def get_tokens(self, cli, width):
result = TokenList()
result.append((self.token, ' '))
- if cli.current_buffer.completer.smart_completion:
+ if cli.buffers['default'].completer.smart_completion:
result.append((self.token.On, '[F2] Smart Completion: ON '))
else:
result.append((self.token.Off, '[F2] Smart Completion: OFF '))
- if cli.current_buffer.always_multiline:
+ if cli.buffers['default'].always_multiline:
result.append((self.token.On, '[F3] Multiline: ON'))
else:
result.append((self.token.Off, '[F3] Multiline: OFF'))
- if cli.current_buffer.always_multiline:
+ if cli.buffers['default'].always_multiline:
result.append((self.token,
' (Semi-colon [;] will end the line)'))
return result
|
Use the default buffer instead of current_buffer.
|
## Code Before:
from prompt_toolkit.layout.toolbars import Toolbar
from prompt_toolkit.layout.utils import TokenList
from pygments.token import Token
class PGToolbar(Toolbar):
def __init__(self, token=None):
token = token or Token.Toolbar.Status
super(self.__class__, self).__init__(token=token)
def get_tokens(self, cli, width):
result = TokenList()
result.append((self.token, ' '))
if cli.current_buffer.completer.smart_completion:
result.append((self.token.On, '[F2] Smart Completion: ON '))
else:
result.append((self.token.Off, '[F2] Smart Completion: OFF '))
if cli.current_buffer.always_multiline:
result.append((self.token.On, '[F3] Multiline: ON'))
else:
result.append((self.token.Off, '[F3] Multiline: OFF'))
if cli.current_buffer.always_multiline:
result.append((self.token,
' (Semi-colon [;] will end the line)'))
return result
## Instruction:
Use the default buffer instead of current_buffer.
## Code After:
from prompt_toolkit.layout.toolbars import Toolbar
from prompt_toolkit.layout.utils import TokenList
from pygments.token import Token
class PGToolbar(Toolbar):
def __init__(self, token=None):
token = token or Token.Toolbar.Status
super(self.__class__, self).__init__(token=token)
def get_tokens(self, cli, width):
result = TokenList()
result.append((self.token, ' '))
if cli.buffers['default'].completer.smart_completion:
result.append((self.token.On, '[F2] Smart Completion: ON '))
else:
result.append((self.token.Off, '[F2] Smart Completion: OFF '))
if cli.buffers['default'].always_multiline:
result.append((self.token.On, '[F3] Multiline: ON'))
else:
result.append((self.token.Off, '[F3] Multiline: OFF'))
if cli.buffers['default'].always_multiline:
result.append((self.token,
' (Semi-colon [;] will end the line)'))
return result
|
from prompt_toolkit.layout.toolbars import Toolbar
from prompt_toolkit.layout.utils import TokenList
from pygments.token import Token
class PGToolbar(Toolbar):
def __init__(self, token=None):
token = token or Token.Toolbar.Status
super(self.__class__, self).__init__(token=token)
def get_tokens(self, cli, width):
result = TokenList()
result.append((self.token, ' '))
- if cli.current_buffer.completer.smart_completion:
? --------
+ if cli.buffers['default'].completer.smart_completion:
? ++++++++++++
result.append((self.token.On, '[F2] Smart Completion: ON '))
else:
result.append((self.token.Off, '[F2] Smart Completion: OFF '))
- if cli.current_buffer.always_multiline:
? --------
+ if cli.buffers['default'].always_multiline:
? ++++++++++++
result.append((self.token.On, '[F3] Multiline: ON'))
else:
result.append((self.token.Off, '[F3] Multiline: OFF'))
- if cli.current_buffer.always_multiline:
? --------
+ if cli.buffers['default'].always_multiline:
? ++++++++++++
result.append((self.token,
' (Semi-colon [;] will end the line)'))
return result
|
4d2d940d672c6af14916cf4c4cecf2a5bb6de4ef
|
libqtile/layout/hybridlayoutdemo.py
|
libqtile/layout/hybridlayoutdemo.py
|
from base import SubLayout, Rect
from sublayouts import VerticalStack, Floating
from subtile import SubTile
class HybridLayoutDemo(SubLayout):
def _init_sublayouts(self):
class TopWindow(VerticalStack):
def filter_windows(self, windows):
windows = [w for w in windows if w.name == "htop"]
return ([windows[0],] if len(windows) else [])
def request_rectangle(self, r, windows):
if windows:
return r.split_horizontal(height=300)
else:
return (Rect(), r)
self.sublayouts.append(Floating(self.clientStack,
self.theme,
parent=self,
)
)
self.sublayouts.append(TopWindow(self.clientStack,
self.theme,
parent=self,
autohide=True,
)
)
self.sublayouts.append(SubTile(self.clientStack,
self.theme,
parent=self,
master_windows = 2,
)
)
def filter(self, client):
return True
|
from base import SubLayout, Rect
from sublayouts import VerticalStack, Floating
from subtile import SubTile
class HybridLayoutDemo(SubLayout):
def _init_sublayouts(self):
class TopWindow(VerticalStack):
def filter_windows(self, windows):
windows = [w for w in windows if w.name == "htop"]
return ([windows[0],] if len(windows) else [])
def request_rectangle(self, r, windows):
if windows:
return r.split_horizontal(height=300)
else:
return (Rect(), r)
self.sublayouts.append(Floating(self.clientStack,
self.theme,
parent=self,
)
)
self.sublayouts.append(TopWindow(self.clientStack,
self.theme,
parent=self,
autohide=True,
)
)
self.sublayouts.append(SubTile(self.clientStack,
self.theme,
parent=self,
master_windows = 2,
)
)
def filter(self, client):
return True
def request_rectangle(self, r, windows):
return (r, Rect())
|
Add request_rectange to HybridLayoutDemo - no clue why this never was here, but it stops it actually working
|
Add request_rectange to HybridLayoutDemo - no clue why this never was here, but it stops it actually working
|
Python
|
mit
|
dequis/qtile,farebord/qtile,de-vri-es/qtile,StephenBarnes/qtile,ramnes/qtile,ramnes/qtile,flacjacket/qtile,tych0/qtile,farebord/qtile,kseistrup/qtile,kopchik/qtile,aniruddhkanojia/qtile,kiniou/qtile,jdowner/qtile,kynikos/qtile,soulchainer/qtile,soulchainer/qtile,kynikos/qtile,andrewyoung1991/qtile,jdowner/qtile,apinsard/qtile,kiniou/qtile,rxcomm/qtile,de-vri-es/qtile,nxnfufunezn/qtile,rxcomm/qtile,aniruddhkanojia/qtile,encukou/qtile,zordsdavini/qtile,StephenBarnes/qtile,tych0/qtile,qtile/qtile,qtile/qtile,frostidaho/qtile,kopchik/qtile,cortesi/qtile,w1ndy/qtile,EndPointCorp/qtile,xplv/qtile,frostidaho/qtile,bavardage/qtile,EndPointCorp/qtile,kseistrup/qtile,flacjacket/qtile,cortesi/qtile,dequis/qtile,encukou/qtile,zordsdavini/qtile,w1ndy/qtile,xplv/qtile,himaaaatti/qtile,apinsard/qtile,andrewyoung1991/qtile,nxnfufunezn/qtile,himaaaatti/qtile
|
from base import SubLayout, Rect
from sublayouts import VerticalStack, Floating
from subtile import SubTile
class HybridLayoutDemo(SubLayout):
def _init_sublayouts(self):
class TopWindow(VerticalStack):
def filter_windows(self, windows):
windows = [w for w in windows if w.name == "htop"]
return ([windows[0],] if len(windows) else [])
def request_rectangle(self, r, windows):
if windows:
return r.split_horizontal(height=300)
else:
return (Rect(), r)
self.sublayouts.append(Floating(self.clientStack,
self.theme,
parent=self,
)
)
self.sublayouts.append(TopWindow(self.clientStack,
self.theme,
parent=self,
autohide=True,
)
)
self.sublayouts.append(SubTile(self.clientStack,
self.theme,
parent=self,
master_windows = 2,
)
)
def filter(self, client):
return True
+ def request_rectangle(self, r, windows):
+ return (r, Rect())
+
|
Add request_rectange to HybridLayoutDemo - no clue why this never was here, but it stops it actually working
|
## Code Before:
from base import SubLayout, Rect
from sublayouts import VerticalStack, Floating
from subtile import SubTile
class HybridLayoutDemo(SubLayout):
def _init_sublayouts(self):
class TopWindow(VerticalStack):
def filter_windows(self, windows):
windows = [w for w in windows if w.name == "htop"]
return ([windows[0],] if len(windows) else [])
def request_rectangle(self, r, windows):
if windows:
return r.split_horizontal(height=300)
else:
return (Rect(), r)
self.sublayouts.append(Floating(self.clientStack,
self.theme,
parent=self,
)
)
self.sublayouts.append(TopWindow(self.clientStack,
self.theme,
parent=self,
autohide=True,
)
)
self.sublayouts.append(SubTile(self.clientStack,
self.theme,
parent=self,
master_windows = 2,
)
)
def filter(self, client):
return True
## Instruction:
Add request_rectange to HybridLayoutDemo - no clue why this never was here, but it stops it actually working
## Code After:
from base import SubLayout, Rect
from sublayouts import VerticalStack, Floating
from subtile import SubTile
class HybridLayoutDemo(SubLayout):
def _init_sublayouts(self):
class TopWindow(VerticalStack):
def filter_windows(self, windows):
windows = [w for w in windows if w.name == "htop"]
return ([windows[0],] if len(windows) else [])
def request_rectangle(self, r, windows):
if windows:
return r.split_horizontal(height=300)
else:
return (Rect(), r)
self.sublayouts.append(Floating(self.clientStack,
self.theme,
parent=self,
)
)
self.sublayouts.append(TopWindow(self.clientStack,
self.theme,
parent=self,
autohide=True,
)
)
self.sublayouts.append(SubTile(self.clientStack,
self.theme,
parent=self,
master_windows = 2,
)
)
def filter(self, client):
return True
def request_rectangle(self, r, windows):
return (r, Rect())
|
from base import SubLayout, Rect
from sublayouts import VerticalStack, Floating
from subtile import SubTile
class HybridLayoutDemo(SubLayout):
def _init_sublayouts(self):
class TopWindow(VerticalStack):
def filter_windows(self, windows):
windows = [w for w in windows if w.name == "htop"]
return ([windows[0],] if len(windows) else [])
def request_rectangle(self, r, windows):
if windows:
return r.split_horizontal(height=300)
else:
return (Rect(), r)
self.sublayouts.append(Floating(self.clientStack,
self.theme,
parent=self,
)
)
self.sublayouts.append(TopWindow(self.clientStack,
self.theme,
parent=self,
autohide=True,
)
)
self.sublayouts.append(SubTile(self.clientStack,
self.theme,
parent=self,
master_windows = 2,
)
)
def filter(self, client):
return True
+
+ def request_rectangle(self, r, windows):
+ return (r, Rect())
|
e8e2ebb156ce75afa87d26a632ed7aa5c74ba2c6
|
eggsclaim.py
|
eggsclaim.py
|
import signal
import sys
import serial
import sms
from xbee import XBee
MOBILE_NUM = '0400000000'
NOTIFICATION_MSG = 'Cock-a-doodle-doo! An egg is waiting for you!'
egg_was_present = False
def signal_handler(signal, frame):
xbee.halt()
serial_port.close()
sys.exit(0)
def packet_received(packet):
samples = packet['samples'][0]
egg_is_present = samples['dio-4'] if 'dio-4' in samples else False
if egg_is_present and egg_is_present != egg_was_present:
sms.send(MOBILE_NUM, NOTIFICATION_MSG)
egg_was_present = egg_is_present
signal.signal(signal.SIGINT, signal_handler)
serial_port = serial.Serial('/dev/ttyp0', 9600)
xbee = XBee(serial_port, callback=packet_received)
|
import signal
import sys
import serial
import sms
from xbee import XBee
SERIAL_PORT = '/dev/usbserial-143'
MOBILE_NUM = '0400000000'
NOTIFICATION_MSG = 'Cock-a-doodle-doo! An egg is waiting for you!'
egg_was_present = False
def signal_handler(signal, frame):
xbee.halt()
serial_port.close()
sys.exit(0)
def packet_received(packet):
samples = packet['samples'][0]
egg_is_present = samples['dio-1'] if 'dio-1' in samples else False
if egg_is_present and egg_is_present != egg_was_present:
sms.send(MOBILE_NUM, NOTIFICATION_MSG)
egg_was_present = egg_is_present
signal.signal(signal.SIGINT, signal_handler)
serial_port = serial.Serial(SERIAL_PORT, 9600)
xbee = XBee(serial_port, callback=packet_received)
|
Use correct IO pins and serial port
|
Use correct IO pins and serial port
|
Python
|
mit
|
jamespettigrew/eggsclaim
|
import signal
import sys
import serial
import sms
from xbee import XBee
+ SERIAL_PORT = '/dev/usbserial-143'
MOBILE_NUM = '0400000000'
NOTIFICATION_MSG = 'Cock-a-doodle-doo! An egg is waiting for you!'
egg_was_present = False
def signal_handler(signal, frame):
xbee.halt()
serial_port.close()
sys.exit(0)
def packet_received(packet):
samples = packet['samples'][0]
- egg_is_present = samples['dio-4'] if 'dio-4' in samples else False
+ egg_is_present = samples['dio-1'] if 'dio-1' in samples else False
if egg_is_present and egg_is_present != egg_was_present:
sms.send(MOBILE_NUM, NOTIFICATION_MSG)
egg_was_present = egg_is_present
signal.signal(signal.SIGINT, signal_handler)
- serial_port = serial.Serial('/dev/ttyp0', 9600)
+ serial_port = serial.Serial(SERIAL_PORT, 9600)
xbee = XBee(serial_port, callback=packet_received)
|
Use correct IO pins and serial port
|
## Code Before:
import signal
import sys
import serial
import sms
from xbee import XBee
MOBILE_NUM = '0400000000'
NOTIFICATION_MSG = 'Cock-a-doodle-doo! An egg is waiting for you!'
egg_was_present = False
def signal_handler(signal, frame):
xbee.halt()
serial_port.close()
sys.exit(0)
def packet_received(packet):
samples = packet['samples'][0]
egg_is_present = samples['dio-4'] if 'dio-4' in samples else False
if egg_is_present and egg_is_present != egg_was_present:
sms.send(MOBILE_NUM, NOTIFICATION_MSG)
egg_was_present = egg_is_present
signal.signal(signal.SIGINT, signal_handler)
serial_port = serial.Serial('/dev/ttyp0', 9600)
xbee = XBee(serial_port, callback=packet_received)
## Instruction:
Use correct IO pins and serial port
## Code After:
import signal
import sys
import serial
import sms
from xbee import XBee
SERIAL_PORT = '/dev/usbserial-143'
MOBILE_NUM = '0400000000'
NOTIFICATION_MSG = 'Cock-a-doodle-doo! An egg is waiting for you!'
egg_was_present = False
def signal_handler(signal, frame):
xbee.halt()
serial_port.close()
sys.exit(0)
def packet_received(packet):
samples = packet['samples'][0]
egg_is_present = samples['dio-1'] if 'dio-1' in samples else False
if egg_is_present and egg_is_present != egg_was_present:
sms.send(MOBILE_NUM, NOTIFICATION_MSG)
egg_was_present = egg_is_present
signal.signal(signal.SIGINT, signal_handler)
serial_port = serial.Serial(SERIAL_PORT, 9600)
xbee = XBee(serial_port, callback=packet_received)
|
import signal
import sys
import serial
import sms
from xbee import XBee
+ SERIAL_PORT = '/dev/usbserial-143'
MOBILE_NUM = '0400000000'
NOTIFICATION_MSG = 'Cock-a-doodle-doo! An egg is waiting for you!'
egg_was_present = False
def signal_handler(signal, frame):
xbee.halt()
serial_port.close()
sys.exit(0)
def packet_received(packet):
samples = packet['samples'][0]
- egg_is_present = samples['dio-4'] if 'dio-4' in samples else False
? ^ ^
+ egg_is_present = samples['dio-1'] if 'dio-1' in samples else False
? ^ ^
if egg_is_present and egg_is_present != egg_was_present:
sms.send(MOBILE_NUM, NOTIFICATION_MSG)
egg_was_present = egg_is_present
signal.signal(signal.SIGINT, signal_handler)
- serial_port = serial.Serial('/dev/ttyp0', 9600)
? ^^^^^^^^^^^^
+ serial_port = serial.Serial(SERIAL_PORT, 9600)
? ^^^^^^^^^^^
xbee = XBee(serial_port, callback=packet_received)
|
9d05f18dcb4b52c1d4e68f53f24e5ccebab10a58
|
bot/models.py
|
bot/models.py
|
from sqlalchemy import create_engine, Column, String
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
def db_connect():
"""
Performs database connection
Returns sqlalchemy engine instance
"""
return create_engine('postgres://avvcurseaphtxf:X0466JySVtLq6nyq_5pb7BQNjR@'
'ec2-54-227-250-80.compute-1.amazonaws.com'
':5432/d7do67r1b7t1nn', echo=False)
def create_battletag_table(engine):
Base.metadata.create_all(engine)
class Battletags(Base):
"""
Table to store user battletags
"""
__tablename__ = 'Battletags'
disc_name = Column(String, primary_key=True)
battletag = Column(String, unique=True)
|
from sqlalchemy import create_engine, Column, String
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
def db_connect():
"""
Performs database connection
Returns sqlalchemy engine instance
"""
return create_engine('postgres://fbcmeskynsvati:aURfAdENt6-kumO0j224GuXRWH'
'@ec2-54-221-235-135.compute-1.amazonaws.com'
':5432/d2cc1tb2t1iges', echo=False)
def create_battletag_table(engine):
Base.metadata.create_all(engine)
class Battletags(Base):
"""
Table to store user battletags
"""
__tablename__ = 'Battletags'
disc_name = Column(String, primary_key=True)
battletag = Column(String, unique=True)
|
Change database url for create_engine()
|
Change database url for create_engine()
|
Python
|
mit
|
alexbotello/BastionBot
|
from sqlalchemy import create_engine, Column, String
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
def db_connect():
"""
Performs database connection
Returns sqlalchemy engine instance
"""
- return create_engine('postgres://avvcurseaphtxf:X0466JySVtLq6nyq_5pb7BQNjR@'
+ return create_engine('postgres://fbcmeskynsvati:aURfAdENt6-kumO0j224GuXRWH'
- 'ec2-54-227-250-80.compute-1.amazonaws.com'
+ '@ec2-54-221-235-135.compute-1.amazonaws.com'
- ':5432/d7do67r1b7t1nn', echo=False)
+ ':5432/d2cc1tb2t1iges', echo=False)
def create_battletag_table(engine):
Base.metadata.create_all(engine)
class Battletags(Base):
"""
Table to store user battletags
"""
__tablename__ = 'Battletags'
disc_name = Column(String, primary_key=True)
battletag = Column(String, unique=True)
|
Change database url for create_engine()
|
## Code Before:
from sqlalchemy import create_engine, Column, String
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
def db_connect():
"""
Performs database connection
Returns sqlalchemy engine instance
"""
return create_engine('postgres://avvcurseaphtxf:X0466JySVtLq6nyq_5pb7BQNjR@'
'ec2-54-227-250-80.compute-1.amazonaws.com'
':5432/d7do67r1b7t1nn', echo=False)
def create_battletag_table(engine):
Base.metadata.create_all(engine)
class Battletags(Base):
"""
Table to store user battletags
"""
__tablename__ = 'Battletags'
disc_name = Column(String, primary_key=True)
battletag = Column(String, unique=True)
## Instruction:
Change database url for create_engine()
## Code After:
from sqlalchemy import create_engine, Column, String
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
def db_connect():
"""
Performs database connection
Returns sqlalchemy engine instance
"""
return create_engine('postgres://fbcmeskynsvati:aURfAdENt6-kumO0j224GuXRWH'
'@ec2-54-221-235-135.compute-1.amazonaws.com'
':5432/d2cc1tb2t1iges', echo=False)
def create_battletag_table(engine):
Base.metadata.create_all(engine)
class Battletags(Base):
"""
Table to store user battletags
"""
__tablename__ = 'Battletags'
disc_name = Column(String, primary_key=True)
battletag = Column(String, unique=True)
|
from sqlalchemy import create_engine, Column, String
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
def db_connect():
"""
Performs database connection
Returns sqlalchemy engine instance
"""
- return create_engine('postgres://avvcurseaphtxf:X0466JySVtLq6nyq_5pb7BQNjR@'
+ return create_engine('postgres://fbcmeskynsvati:aURfAdENt6-kumO0j224GuXRWH'
- 'ec2-54-227-250-80.compute-1.amazonaws.com'
? ^ - ^^
+ '@ec2-54-221-235-135.compute-1.amazonaws.com'
? + ^ + ^^^
- ':5432/d7do67r1b7t1nn', echo=False)
? ^^^^^^ ^ ^^
+ ':5432/d2cc1tb2t1iges', echo=False)
? ^^^ + ^ ^^^^
def create_battletag_table(engine):
Base.metadata.create_all(engine)
class Battletags(Base):
"""
Table to store user battletags
"""
__tablename__ = 'Battletags'
disc_name = Column(String, primary_key=True)
battletag = Column(String, unique=True)
|
ed6146566d57105af88855c6b8668b4f76e98dbf
|
xmanager/xm_local/__init__.py
|
xmanager/xm_local/__init__.py
|
"""Implementation of the XManager Launch API within the local scheduler."""
from xmanager.xm_local import experiment
from xmanager.xm_local.executors import Caip
from xmanager.xm_local.executors import Kubernetes
from xmanager.xm_local.executors import Local
from xmanager.xm_local.executors import TensorboardCapability
create_experiment = experiment.create_experiment
|
"""Implementation of the XManager Launch API within the local scheduler."""
from xmanager.xm_local import experiment
from xmanager.xm_local.executors import *
create_experiment = experiment.create_experiment
|
Make `DockerOptions` part of the `xm_local` module
|
Make `DockerOptions` part of the `xm_local` module
PiperOrigin-RevId: 376139511
Change-Id: Ia0ec1337b9ef2c175dea6b0c45e0a99b285d2b31
GitOrigin-RevId: 799d3ef6a98a6e4922b0b60c190c0d82cd538548
|
Python
|
apache-2.0
|
deepmind/xmanager,deepmind/xmanager
|
"""Implementation of the XManager Launch API within the local scheduler."""
from xmanager.xm_local import experiment
- from xmanager.xm_local.executors import Caip
+ from xmanager.xm_local.executors import *
- from xmanager.xm_local.executors import Kubernetes
- from xmanager.xm_local.executors import Local
- from xmanager.xm_local.executors import TensorboardCapability
create_experiment = experiment.create_experiment
|
Make `DockerOptions` part of the `xm_local` module
|
## Code Before:
"""Implementation of the XManager Launch API within the local scheduler."""
from xmanager.xm_local import experiment
from xmanager.xm_local.executors import Caip
from xmanager.xm_local.executors import Kubernetes
from xmanager.xm_local.executors import Local
from xmanager.xm_local.executors import TensorboardCapability
create_experiment = experiment.create_experiment
## Instruction:
Make `DockerOptions` part of the `xm_local` module
## Code After:
"""Implementation of the XManager Launch API within the local scheduler."""
from xmanager.xm_local import experiment
from xmanager.xm_local.executors import *
create_experiment = experiment.create_experiment
|
"""Implementation of the XManager Launch API within the local scheduler."""
from xmanager.xm_local import experiment
- from xmanager.xm_local.executors import Caip
? ^^^^
+ from xmanager.xm_local.executors import *
? ^
- from xmanager.xm_local.executors import Kubernetes
- from xmanager.xm_local.executors import Local
- from xmanager.xm_local.executors import TensorboardCapability
create_experiment = experiment.create_experiment
|
73660f4f539a1aeb520c33112cfc41183e4dd43a
|
luigi/tasks/rfam/clans_csv.py
|
luigi/tasks/rfam/clans_csv.py
|
import operator as op
import luigi
from databases.rfam.clans import parse
from tasks.config import rfam
from tasks.utils.fetch import FetchTask
from tasks.utils.writers import CsvOutput
class RfamClansCSV(luigi.Task):
def requires(self):
conf = rfam()
return FetchTask(
remote_path=conf.query('clans.sql'),
local_path=conf.raw('clans.tsv'),
)
def output(self):
conf = rfam()
return CsvOutput(
conf.clans,
['id', 'name', 'description', 'family_count'],
op.methodcaller('writeable'),
)
def run(self):
with self.requires().output.open('r') as raw:
self.output().populate(parse(raw))
|
import operator as op
import luigi
from databases.rfam import clans
from tasks.config import rfam
from tasks.utils.writers import CsvOutput
from tasks.utils.mysql import MysqlQueryTask
class RfamClansCSV(luigi.Task):
def requires(self):
conf = rfam()
return MysqlQueryTask(
db=conf,
query=clans.QUERY,
local_path=conf.raw('clans.tsv'),
)
def output(self):
conf = rfam()
return CsvOutput(
conf.clans,
['id', 'name', 'description', 'family_count'],
op.methodcaller('writeable'),
)
def run(self):
with self.requires().output.open('r') as raw:
self.output().populate(clans.parse(raw))
|
Use MysqlQueryTask for getting clan data
|
Use MysqlQueryTask for getting clan data
|
Python
|
apache-2.0
|
RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline
|
import operator as op
import luigi
- from databases.rfam.clans import parse
+ from databases.rfam import clans
from tasks.config import rfam
- from tasks.utils.fetch import FetchTask
from tasks.utils.writers import CsvOutput
+ from tasks.utils.mysql import MysqlQueryTask
class RfamClansCSV(luigi.Task):
def requires(self):
conf = rfam()
- return FetchTask(
+ return MysqlQueryTask(
- remote_path=conf.query('clans.sql'),
+ db=conf,
+ query=clans.QUERY,
local_path=conf.raw('clans.tsv'),
)
def output(self):
conf = rfam()
return CsvOutput(
conf.clans,
['id', 'name', 'description', 'family_count'],
op.methodcaller('writeable'),
)
def run(self):
with self.requires().output.open('r') as raw:
- self.output().populate(parse(raw))
+ self.output().populate(clans.parse(raw))
|
Use MysqlQueryTask for getting clan data
|
## Code Before:
import operator as op
import luigi
from databases.rfam.clans import parse
from tasks.config import rfam
from tasks.utils.fetch import FetchTask
from tasks.utils.writers import CsvOutput
class RfamClansCSV(luigi.Task):
def requires(self):
conf = rfam()
return FetchTask(
remote_path=conf.query('clans.sql'),
local_path=conf.raw('clans.tsv'),
)
def output(self):
conf = rfam()
return CsvOutput(
conf.clans,
['id', 'name', 'description', 'family_count'],
op.methodcaller('writeable'),
)
def run(self):
with self.requires().output.open('r') as raw:
self.output().populate(parse(raw))
## Instruction:
Use MysqlQueryTask for getting clan data
## Code After:
import operator as op
import luigi
from databases.rfam import clans
from tasks.config import rfam
from tasks.utils.writers import CsvOutput
from tasks.utils.mysql import MysqlQueryTask
class RfamClansCSV(luigi.Task):
def requires(self):
conf = rfam()
return MysqlQueryTask(
db=conf,
query=clans.QUERY,
local_path=conf.raw('clans.tsv'),
)
def output(self):
conf = rfam()
return CsvOutput(
conf.clans,
['id', 'name', 'description', 'family_count'],
op.methodcaller('writeable'),
)
def run(self):
with self.requires().output.open('r') as raw:
self.output().populate(clans.parse(raw))
|
import operator as op
import luigi
- from databases.rfam.clans import parse
? ------ ^ ^ -
+ from databases.rfam import clans
? ^^ ^
from tasks.config import rfam
- from tasks.utils.fetch import FetchTask
from tasks.utils.writers import CsvOutput
+ from tasks.utils.mysql import MysqlQueryTask
class RfamClansCSV(luigi.Task):
def requires(self):
conf = rfam()
- return FetchTask(
? ^ ^^^
+ return MysqlQueryTask(
? ^^^^^^^ ^^
- remote_path=conf.query('clans.sql'),
+ db=conf,
+ query=clans.QUERY,
local_path=conf.raw('clans.tsv'),
)
def output(self):
conf = rfam()
return CsvOutput(
conf.clans,
['id', 'name', 'description', 'family_count'],
op.methodcaller('writeable'),
)
def run(self):
with self.requires().output.open('r') as raw:
- self.output().populate(parse(raw))
+ self.output().populate(clans.parse(raw))
? ++++++
|
aee49d59b76400389ffa768950b479094059e385
|
linguist/tests/translations.py
|
linguist/tests/translations.py
|
from django.db import models
from ..base import ModelTranslationBase
from ..mixins import ModelMixin, ManagerMixin
class FooManager(ManagerMixin, models.Manager):
pass
class BarManager(ManagerMixin, models.Manager):
pass
class FooModel(ModelMixin, models.Model):
title = models.CharField(max_length=255, null=True, blank=True)
excerpt = models.TextField(null=True, blank=True)
body = models.TextField(null=True, blank=True)
created_at = models.DateTimeField(auto_now_add=True)
objects = FooManager()
class FooTranslation(ModelTranslationBase):
model = FooModel
identifier = 'foo'
fields = ('title', 'excerpt', 'body')
class BarModel(ModelMixin, models.Model):
title = models.CharField(max_length=255, null=True, blank=True)
objects = BarManager()
class BarTranslation(ModelTranslationBase):
model = BarModel
identifier = 'bar'
fields = ('title', )
class BadTranslation(object):
pass
class BadModel(object):
pass
|
from django.db import models
from ..base import ModelTranslationBase
from ..mixins import ModelMixin, ManagerMixin
class FooManager(ManagerMixin, models.Manager):
pass
class BarManager(ManagerMixin, models.Manager):
pass
class FooModel(ModelMixin, models.Model):
title = models.CharField(max_length=255)
excerpt = models.TextField(null=True, blank=True)
body = models.TextField(null=True, blank=True)
created_at = models.DateTimeField(auto_now_add=True)
objects = FooManager()
class Meta:
linguist = {
'identifier': 'foo',
'fields': ('title', 'excerpt', 'body'),
}
class BarModel(ModelMixin, models.Model):
title = models.CharField(max_length=255, null=True, blank=True)
objects = BarManager()
class Meta:
linguist = {
'identifier': 'bar',
'fields': ('title', ),
}
class BadTranslation(object):
pass
class BadModel(object):
pass
|
Update test models for new metaclass support.
|
Update test models for new metaclass support.
|
Python
|
mit
|
ulule/django-linguist
|
from django.db import models
from ..base import ModelTranslationBase
from ..mixins import ModelMixin, ManagerMixin
class FooManager(ManagerMixin, models.Manager):
pass
class BarManager(ManagerMixin, models.Manager):
pass
class FooModel(ModelMixin, models.Model):
- title = models.CharField(max_length=255, null=True, blank=True)
+ title = models.CharField(max_length=255)
excerpt = models.TextField(null=True, blank=True)
body = models.TextField(null=True, blank=True)
created_at = models.DateTimeField(auto_now_add=True)
objects = FooManager()
+ class Meta:
+ linguist = {
+ 'identifier': 'foo',
-
- class FooTranslation(ModelTranslationBase):
- model = FooModel
- identifier = 'foo'
- fields = ('title', 'excerpt', 'body')
+ 'fields': ('title', 'excerpt', 'body'),
+ }
class BarModel(ModelMixin, models.Model):
title = models.CharField(max_length=255, null=True, blank=True)
objects = BarManager()
+ class Meta:
+ linguist = {
+ 'identifier': 'bar',
-
- class BarTranslation(ModelTranslationBase):
- model = BarModel
- identifier = 'bar'
- fields = ('title', )
+ 'fields': ('title', ),
+ }
class BadTranslation(object):
pass
class BadModel(object):
pass
|
Update test models for new metaclass support.
|
## Code Before:
from django.db import models
from ..base import ModelTranslationBase
from ..mixins import ModelMixin, ManagerMixin
class FooManager(ManagerMixin, models.Manager):
pass
class BarManager(ManagerMixin, models.Manager):
pass
class FooModel(ModelMixin, models.Model):
title = models.CharField(max_length=255, null=True, blank=True)
excerpt = models.TextField(null=True, blank=True)
body = models.TextField(null=True, blank=True)
created_at = models.DateTimeField(auto_now_add=True)
objects = FooManager()
class FooTranslation(ModelTranslationBase):
model = FooModel
identifier = 'foo'
fields = ('title', 'excerpt', 'body')
class BarModel(ModelMixin, models.Model):
title = models.CharField(max_length=255, null=True, blank=True)
objects = BarManager()
class BarTranslation(ModelTranslationBase):
model = BarModel
identifier = 'bar'
fields = ('title', )
class BadTranslation(object):
pass
class BadModel(object):
pass
## Instruction:
Update test models for new metaclass support.
## Code After:
from django.db import models
from ..base import ModelTranslationBase
from ..mixins import ModelMixin, ManagerMixin
class FooManager(ManagerMixin, models.Manager):
pass
class BarManager(ManagerMixin, models.Manager):
pass
class FooModel(ModelMixin, models.Model):
title = models.CharField(max_length=255)
excerpt = models.TextField(null=True, blank=True)
body = models.TextField(null=True, blank=True)
created_at = models.DateTimeField(auto_now_add=True)
objects = FooManager()
class Meta:
linguist = {
'identifier': 'foo',
'fields': ('title', 'excerpt', 'body'),
}
class BarModel(ModelMixin, models.Model):
title = models.CharField(max_length=255, null=True, blank=True)
objects = BarManager()
class Meta:
linguist = {
'identifier': 'bar',
'fields': ('title', ),
}
class BadTranslation(object):
pass
class BadModel(object):
pass
|
from django.db import models
from ..base import ModelTranslationBase
from ..mixins import ModelMixin, ManagerMixin
class FooManager(ManagerMixin, models.Manager):
pass
class BarManager(ManagerMixin, models.Manager):
pass
class FooModel(ModelMixin, models.Model):
- title = models.CharField(max_length=255, null=True, blank=True)
? -----------------------
+ title = models.CharField(max_length=255)
excerpt = models.TextField(null=True, blank=True)
body = models.TextField(null=True, blank=True)
created_at = models.DateTimeField(auto_now_add=True)
objects = FooManager()
+ class Meta:
+ linguist = {
+ 'identifier': 'foo',
-
- class FooTranslation(ModelTranslationBase):
- model = FooModel
- identifier = 'foo'
- fields = ('title', 'excerpt', 'body')
? ^^
+ 'fields': ('title', 'excerpt', 'body'),
? +++++++++ ^^ +
+ }
class BarModel(ModelMixin, models.Model):
title = models.CharField(max_length=255, null=True, blank=True)
objects = BarManager()
+ class Meta:
+ linguist = {
+ 'identifier': 'bar',
-
- class BarTranslation(ModelTranslationBase):
- model = BarModel
- identifier = 'bar'
- fields = ('title', )
? ^^
+ 'fields': ('title', ),
? +++++++++ ^^ +
+ }
class BadTranslation(object):
pass
class BadModel(object):
pass
|
441a1b85f6ab954ab89f32977e4f00293270aac6
|
sphinxcontrib/multilatex/__init__.py
|
sphinxcontrib/multilatex/__init__.py
|
import directive
import builder
#===========================================================================
# Node visitor functions
def visit_passthrough(self, node):
pass
def depart_passthrough(self, node):
pass
passthrough = (visit_passthrough, depart_passthrough)
#===========================================================================
# Setup and register extension
def setup(app):
app.add_node(directive.latex_document,
html=passthrough)
app.add_directive("latex-document", directive.LatexDocumentDirective)
app.add_builder(builder.MultiLatexBuilder)
return {"version": "0.0"}
|
import directive
import builder
#===========================================================================
# Node visitor functions
def visit_passthrough(self, node):
pass
def depart_passthrough(self, node):
pass
passthrough = (visit_passthrough, depart_passthrough)
#===========================================================================
# Setup and register extension
def setup(app):
app.add_node(directive.latex_document,
latex=passthrough,
html=passthrough)
app.add_directive("latex-document", directive.LatexDocumentDirective)
app.add_builder(builder.MultiLatexBuilder)
return {"version": "0.0"}
|
Set LaTeX builder to skip latex_document nodes
|
Set LaTeX builder to skip latex_document nodes
This stops Sphinx' built-in LaTeX builder from complaining about unknown
latex_document node type.
|
Python
|
apache-2.0
|
t4ngo/sphinxcontrib-multilatex,t4ngo/sphinxcontrib-multilatex
|
import directive
import builder
#===========================================================================
# Node visitor functions
def visit_passthrough(self, node):
pass
def depart_passthrough(self, node):
pass
passthrough = (visit_passthrough, depart_passthrough)
#===========================================================================
# Setup and register extension
def setup(app):
app.add_node(directive.latex_document,
+ latex=passthrough,
html=passthrough)
app.add_directive("latex-document", directive.LatexDocumentDirective)
app.add_builder(builder.MultiLatexBuilder)
return {"version": "0.0"}
|
Set LaTeX builder to skip latex_document nodes
|
## Code Before:
import directive
import builder
#===========================================================================
# Node visitor functions
def visit_passthrough(self, node):
pass
def depart_passthrough(self, node):
pass
passthrough = (visit_passthrough, depart_passthrough)
#===========================================================================
# Setup and register extension
def setup(app):
app.add_node(directive.latex_document,
html=passthrough)
app.add_directive("latex-document", directive.LatexDocumentDirective)
app.add_builder(builder.MultiLatexBuilder)
return {"version": "0.0"}
## Instruction:
Set LaTeX builder to skip latex_document nodes
## Code After:
import directive
import builder
#===========================================================================
# Node visitor functions
def visit_passthrough(self, node):
pass
def depart_passthrough(self, node):
pass
passthrough = (visit_passthrough, depart_passthrough)
#===========================================================================
# Setup and register extension
def setup(app):
app.add_node(directive.latex_document,
latex=passthrough,
html=passthrough)
app.add_directive("latex-document", directive.LatexDocumentDirective)
app.add_builder(builder.MultiLatexBuilder)
return {"version": "0.0"}
|
import directive
import builder
#===========================================================================
# Node visitor functions
def visit_passthrough(self, node):
pass
def depart_passthrough(self, node):
pass
passthrough = (visit_passthrough, depart_passthrough)
#===========================================================================
# Setup and register extension
def setup(app):
app.add_node(directive.latex_document,
+ latex=passthrough,
html=passthrough)
app.add_directive("latex-document", directive.LatexDocumentDirective)
app.add_builder(builder.MultiLatexBuilder)
return {"version": "0.0"}
|
066833caebddb9a6e0735e635ff214448e078405
|
check_env.py
|
check_env.py
|
def test_import_pandas():
import pandas
def test_pandas_version():
import pandas
version_found = pandas.__version__.split(".")
version_found = tuple(int(num) for num in version_found)
assert version_found > (0, 15)
def test_import_numpy():
import numpy
def test_import_matplotlib():
import matplotlib
def test_scrape_web():
import pandas as pd
pd.read_html("http://en.wikipedia.org/wiki/World_population")
if __name__ == "__main__":
import nose
nose.run(defaultTest=__name__)
|
from os.path import dirname, join
HERE = dirname(__file__)
def test_import_pandas():
import pandas
def test_pandas_version():
import pandas
version_found = pandas.__version__.split(".")
version_found = tuple(int(num) for num in version_found)
assert version_found > (0, 15)
def test_import_numpy():
import numpy
def test_import_matplotlib():
import matplotlib.pyplot as plt
plt.figure
plt.plot
plt.legend
plt.imshow
def test_import_statsmodels():
import statsmodels as sm
from statsmodels.formula.api import ols
from statsmodels.tsa.ar_model import AR
def test_read_html():
import pandas
pandas.read_html(join(HERE, "demos", "climate_timeseries", "data",
"sea_levels", "Obtaining Tide Gauge Data.html"))
def test_scrape_web():
import pandas as pd
pd.read_html("http://en.wikipedia.org/wiki/World_population")
if __name__ == "__main__":
import nose
nose.run(defaultTest=__name__)
|
Add some more content in tests including with statsmodels.
|
Add some more content in tests including with statsmodels.
|
Python
|
mit
|
wateryhcho/pandas_tutorial,linan7788626/pandas_tutorial,jonathanrocher/pandas_tutorial,wateryhcho/pandas_tutorial,Sandor-PRA/pandas_tutorial,Sandor-PRA/pandas_tutorial,ajaykliyara/pandas_tutorial,ajaykliyara/pandas_tutorial,jonathanrocher/pandas_tutorial,jonathanrocher/pandas_tutorial,ajaykliyara/pandas_tutorial,linan7788626/pandas_tutorial,Sandor-PRA/pandas_tutorial,linan7788626/pandas_tutorial,wateryhcho/pandas_tutorial
|
+ from os.path import dirname, join
+
+ HERE = dirname(__file__)
def test_import_pandas():
import pandas
def test_pandas_version():
import pandas
version_found = pandas.__version__.split(".")
version_found = tuple(int(num) for num in version_found)
assert version_found > (0, 15)
def test_import_numpy():
import numpy
def test_import_matplotlib():
- import matplotlib
+ import matplotlib.pyplot as plt
+ plt.figure
+ plt.plot
+ plt.legend
+ plt.imshow
+
+
+ def test_import_statsmodels():
+ import statsmodels as sm
+ from statsmodels.formula.api import ols
+ from statsmodels.tsa.ar_model import AR
+
+
+ def test_read_html():
+ import pandas
+ pandas.read_html(join(HERE, "demos", "climate_timeseries", "data",
+ "sea_levels", "Obtaining Tide Gauge Data.html"))
def test_scrape_web():
import pandas as pd
pd.read_html("http://en.wikipedia.org/wiki/World_population")
if __name__ == "__main__":
import nose
nose.run(defaultTest=__name__)
|
Add some more content in tests including with statsmodels.
|
## Code Before:
def test_import_pandas():
import pandas
def test_pandas_version():
import pandas
version_found = pandas.__version__.split(".")
version_found = tuple(int(num) for num in version_found)
assert version_found > (0, 15)
def test_import_numpy():
import numpy
def test_import_matplotlib():
import matplotlib
def test_scrape_web():
import pandas as pd
pd.read_html("http://en.wikipedia.org/wiki/World_population")
if __name__ == "__main__":
import nose
nose.run(defaultTest=__name__)
## Instruction:
Add some more content in tests including with statsmodels.
## Code After:
from os.path import dirname, join
HERE = dirname(__file__)
def test_import_pandas():
import pandas
def test_pandas_version():
import pandas
version_found = pandas.__version__.split(".")
version_found = tuple(int(num) for num in version_found)
assert version_found > (0, 15)
def test_import_numpy():
import numpy
def test_import_matplotlib():
import matplotlib.pyplot as plt
plt.figure
plt.plot
plt.legend
plt.imshow
def test_import_statsmodels():
import statsmodels as sm
from statsmodels.formula.api import ols
from statsmodels.tsa.ar_model import AR
def test_read_html():
import pandas
pandas.read_html(join(HERE, "demos", "climate_timeseries", "data",
"sea_levels", "Obtaining Tide Gauge Data.html"))
def test_scrape_web():
import pandas as pd
pd.read_html("http://en.wikipedia.org/wiki/World_population")
if __name__ == "__main__":
import nose
nose.run(defaultTest=__name__)
|
+ from os.path import dirname, join
+
+ HERE = dirname(__file__)
def test_import_pandas():
import pandas
def test_pandas_version():
import pandas
version_found = pandas.__version__.split(".")
version_found = tuple(int(num) for num in version_found)
assert version_found > (0, 15)
def test_import_numpy():
import numpy
def test_import_matplotlib():
- import matplotlib
+ import matplotlib.pyplot as plt
? ++++++++++++++
+ plt.figure
+ plt.plot
+ plt.legend
+ plt.imshow
+
+
+ def test_import_statsmodels():
+ import statsmodels as sm
+ from statsmodels.formula.api import ols
+ from statsmodels.tsa.ar_model import AR
+
+
+ def test_read_html():
+ import pandas
+ pandas.read_html(join(HERE, "demos", "climate_timeseries", "data",
+ "sea_levels", "Obtaining Tide Gauge Data.html"))
def test_scrape_web():
import pandas as pd
pd.read_html("http://en.wikipedia.org/wiki/World_population")
if __name__ == "__main__":
import nose
nose.run(defaultTest=__name__)
|
0f3c33de86d38cf47f84df97a79e838d37264b7c
|
sugar/session/LogWriter.py
|
sugar/session/LogWriter.py
|
import os
import sys
import dbus
class LogWriter:
def __init__(self, application):
self._application = application
bus = dbus.SessionBus()
proxy_obj = bus.get_object('com.redhat.Sugar.Logger', '/com/redhat/Sugar/Logger')
self._logger = dbus.Interface(proxy_obj, 'com.redhat.Sugar.Logger')
def start(self):
if os.environ.has_key('SUGAR_USE_CONSOLE'):
sys.stdout = self
sys.stderr = self
def write(self, s):
self._logger.log(self._application, s)
|
import os
import sys
import dbus
import gobject
class LogWriter:
def __init__(self, application):
self._application = application
bus = dbus.SessionBus()
proxy_obj = bus.get_object('com.redhat.Sugar.Logger', '/com/redhat/Sugar/Logger')
self._logger = dbus.Interface(proxy_obj, 'com.redhat.Sugar.Logger')
def start(self):
if os.environ.has_key('SUGAR_USE_CONSOLE'):
sys.stdout = self
sys.stderr = self
def write(self, s):
gobject.idle_add(self._write, s)
def _write(self, s):
self._logger.log(self._application, s)
return False
|
Add messages on idle so that we don't break
|
Add messages on idle so that we don't break
|
Python
|
lgpl-2.1
|
sugarlabs/sugar-toolkit,ceibal-tatu/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit,manuq/sugar-toolkit-gtk3,quozl/sugar-toolkit-gtk3,godiard/sugar-toolkit-gtk3,Daksh/sugar-toolkit-gtk3,ceibal-tatu/sugar-toolkit-gtk3,ceibal-tatu/sugar-toolkit,gusDuarte/sugar-toolkit-gtk3,i5o/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit-gtk3,tchx84/sugar-toolkit-gtk3,tchx84/sugar-toolkit-gtk3,godiard/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit,puneetgkaur/sugar-toolkit-gtk3,Daksh/sugar-toolkit-gtk3,samdroid-apps/sugar-toolkit-gtk3,samdroid-apps/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit-gtk3,i5o/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit,puneetgkaur/backup_sugar_sugartoolkit,ceibal-tatu/sugar-toolkit,samdroid-apps/sugar-toolkit-gtk3,gusDuarte/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit-gtk3,quozl/sugar-toolkit-gtk3,puneetgkaur/backup_sugar_sugartoolkit,puneetgkaur/sugar-toolkit-gtk3,ceibal-tatu/sugar-toolkit-gtk3,puneetgkaur/backup_sugar_sugartoolkit,samdroid-apps/sugar-toolkit-gtk3,quozl/sugar-toolkit-gtk3,quozl/sugar-toolkit-gtk3,puneetgkaur/sugar-toolkit-gtk3,manuq/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit,i5o/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit-gtk3,gusDuarte/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit-gtk3,ceibal-tatu/sugar-toolkit,manuq/sugar-toolkit-gtk3,tchx84/sugar-toolkit-gtk3,Daksh/sugar-toolkit-gtk3,gusDuarte/sugar-toolkit-gtk3,godiard/sugar-toolkit-gtk3,i5o/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit,tchx84/debian-pkg-sugar-toolkit
|
import os
import sys
import dbus
+ import gobject
class LogWriter:
def __init__(self, application):
self._application = application
bus = dbus.SessionBus()
proxy_obj = bus.get_object('com.redhat.Sugar.Logger', '/com/redhat/Sugar/Logger')
self._logger = dbus.Interface(proxy_obj, 'com.redhat.Sugar.Logger')
def start(self):
if os.environ.has_key('SUGAR_USE_CONSOLE'):
sys.stdout = self
sys.stderr = self
def write(self, s):
- self._logger.log(self._application, s)
+ gobject.idle_add(self._write, s)
+ def _write(self, s):
+ self._logger.log(self._application, s)
+ return False
+
|
Add messages on idle so that we don't break
|
## Code Before:
import os
import sys
import dbus
class LogWriter:
def __init__(self, application):
self._application = application
bus = dbus.SessionBus()
proxy_obj = bus.get_object('com.redhat.Sugar.Logger', '/com/redhat/Sugar/Logger')
self._logger = dbus.Interface(proxy_obj, 'com.redhat.Sugar.Logger')
def start(self):
if os.environ.has_key('SUGAR_USE_CONSOLE'):
sys.stdout = self
sys.stderr = self
def write(self, s):
self._logger.log(self._application, s)
## Instruction:
Add messages on idle so that we don't break
## Code After:
import os
import sys
import dbus
import gobject
class LogWriter:
def __init__(self, application):
self._application = application
bus = dbus.SessionBus()
proxy_obj = bus.get_object('com.redhat.Sugar.Logger', '/com/redhat/Sugar/Logger')
self._logger = dbus.Interface(proxy_obj, 'com.redhat.Sugar.Logger')
def start(self):
if os.environ.has_key('SUGAR_USE_CONSOLE'):
sys.stdout = self
sys.stderr = self
def write(self, s):
gobject.idle_add(self._write, s)
def _write(self, s):
self._logger.log(self._application, s)
return False
|
import os
import sys
import dbus
+ import gobject
class LogWriter:
def __init__(self, application):
self._application = application
bus = dbus.SessionBus()
proxy_obj = bus.get_object('com.redhat.Sugar.Logger', '/com/redhat/Sugar/Logger')
self._logger = dbus.Interface(proxy_obj, 'com.redhat.Sugar.Logger')
def start(self):
if os.environ.has_key('SUGAR_USE_CONSOLE'):
sys.stdout = self
sys.stderr = self
def write(self, s):
+ gobject.idle_add(self._write, s)
+
+ def _write(self, s):
- self._logger.log(self._application, s)
? ---
+ self._logger.log(self._application, s)
+ return False
|
5dfd723b37e208c1b81e65cd2df1b7d9226493b3
|
numpy/_array_api/_sorting_functions.py
|
numpy/_array_api/_sorting_functions.py
|
def argsort(x, /, *, axis=-1, descending=False, stable=True):
from .. import argsort
from .. import flip
# Note: this keyword argument is different, and the default is different.
kind = 'stable' if stable else 'quicksort'
res = argsort(x, axis=axis, kind=kind)
if descending:
res = flip(res, axis=axis)
def sort(x, /, *, axis=-1, descending=False, stable=True):
from .. import sort
from .. import flip
# Note: this keyword argument is different, and the default is different.
kind = 'stable' if stable else 'quicksort'
res = sort(x, axis=axis, kind=kind)
if descending:
res = flip(res, axis=axis)
|
def argsort(x, /, *, axis=-1, descending=False, stable=True):
from .. import argsort
from .. import flip
# Note: this keyword argument is different, and the default is different.
kind = 'stable' if stable else 'quicksort'
res = argsort(x, axis=axis, kind=kind)
if descending:
res = flip(res, axis=axis)
return res
def sort(x, /, *, axis=-1, descending=False, stable=True):
from .. import sort
from .. import flip
# Note: this keyword argument is different, and the default is different.
kind = 'stable' if stable else 'quicksort'
res = sort(x, axis=axis, kind=kind)
if descending:
res = flip(res, axis=axis)
return res
|
Add missing returns to the array API sorting functions
|
Add missing returns to the array API sorting functions
|
Python
|
mit
|
cupy/cupy,cupy/cupy,cupy/cupy,cupy/cupy
|
def argsort(x, /, *, axis=-1, descending=False, stable=True):
from .. import argsort
from .. import flip
# Note: this keyword argument is different, and the default is different.
kind = 'stable' if stable else 'quicksort'
res = argsort(x, axis=axis, kind=kind)
if descending:
res = flip(res, axis=axis)
+ return res
def sort(x, /, *, axis=-1, descending=False, stable=True):
from .. import sort
from .. import flip
# Note: this keyword argument is different, and the default is different.
kind = 'stable' if stable else 'quicksort'
res = sort(x, axis=axis, kind=kind)
if descending:
res = flip(res, axis=axis)
+ return res
|
Add missing returns to the array API sorting functions
|
## Code Before:
def argsort(x, /, *, axis=-1, descending=False, stable=True):
from .. import argsort
from .. import flip
# Note: this keyword argument is different, and the default is different.
kind = 'stable' if stable else 'quicksort'
res = argsort(x, axis=axis, kind=kind)
if descending:
res = flip(res, axis=axis)
def sort(x, /, *, axis=-1, descending=False, stable=True):
from .. import sort
from .. import flip
# Note: this keyword argument is different, and the default is different.
kind = 'stable' if stable else 'quicksort'
res = sort(x, axis=axis, kind=kind)
if descending:
res = flip(res, axis=axis)
## Instruction:
Add missing returns to the array API sorting functions
## Code After:
def argsort(x, /, *, axis=-1, descending=False, stable=True):
from .. import argsort
from .. import flip
# Note: this keyword argument is different, and the default is different.
kind = 'stable' if stable else 'quicksort'
res = argsort(x, axis=axis, kind=kind)
if descending:
res = flip(res, axis=axis)
return res
def sort(x, /, *, axis=-1, descending=False, stable=True):
from .. import sort
from .. import flip
# Note: this keyword argument is different, and the default is different.
kind = 'stable' if stable else 'quicksort'
res = sort(x, axis=axis, kind=kind)
if descending:
res = flip(res, axis=axis)
return res
|
def argsort(x, /, *, axis=-1, descending=False, stable=True):
from .. import argsort
from .. import flip
# Note: this keyword argument is different, and the default is different.
kind = 'stable' if stable else 'quicksort'
res = argsort(x, axis=axis, kind=kind)
if descending:
res = flip(res, axis=axis)
+ return res
def sort(x, /, *, axis=-1, descending=False, stable=True):
from .. import sort
from .. import flip
# Note: this keyword argument is different, and the default is different.
kind = 'stable' if stable else 'quicksort'
res = sort(x, axis=axis, kind=kind)
if descending:
res = flip(res, axis=axis)
+ return res
|
c5d0595acb080bdc33efdc95a5781ed6b87b0a2e
|
warehouse/packages/models.py
|
warehouse/packages/models.py
|
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from sqlalchemy import event
from sqlalchemy.schema import FetchedValue
from sqlalchemy.dialects import postgresql as pg
from warehouse import db
from warehouse.databases.mixins import UUIDPrimaryKeyMixin
from warehouse.database.types import CIText
class Project(UUIDPrimaryKeyMixin, db.Model):
__tablename__ = "projects"
name = db.Column(CIText, unique=True, nullable=False)
normalized = db.Column(CIText, unique=True, nullable=False,
server_default=FetchedValue(),
server_onupdate=FetchedValue())
def __init__(self, name):
self.name = name
def __repr__(self):
return "<Project: {name}>".format(name=self.name)
# Create the Trigger to fill in normalized (TODO: Move this to Alembic?)
event.listen(Project.__table__, "after_create", db.DDL("""
CREATE OR REPLACE FUNCTION normalize_name() RETURNS trigger AS
$body$
BEGIN
new.normalized = lower(regexp_replace(new.name, '[^A-Za-z0-9.]+', '-'));
RETURN new;
end;
$body$ LANGUAGE plpgsql;
CREATE TRIGGER %(table)s_normalize_name
BEFORE INSERT OR UPDATE
ON %(table)s
FOR EACH ROW
EXECUTE PROCEDURE normalize_name();
"""))
|
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from sqlalchemy.schema import FetchedValue
from sqlalchemy.dialects import postgresql as pg
from sqlalchemy.ext.declarative import declared_attr
from warehouse import db
from warehouse.database.mixins import UUIDPrimaryKeyMixin, TimeStampedMixin
from warehouse.database.schema import TableDDL
from warehouse.database.types import CIText
from warehouse.database.utils import table_args
class Project(UUIDPrimaryKeyMixin, TimeStampedMixin, db.Model):
__tablename__ = "projects"
__table_args__ = declared_attr(table_args((
TableDDL("""
CREATE OR REPLACE FUNCTION normalize_name()
RETURNS trigger AS $$
BEGIN
NEW.normalized = lower(regexp_replace(new.name, '[^A-Za-z0-9.]+', '-'));
return NEW;
END;
$$ LANGUAGE plpgsql;
CREATE TRIGGER %(table)s_normalize_name
BEFORE INSERT OR UPDATE
ON %(table)s
FOR EACH ROW
EXECUTE PROCEDURE normalize_name();
"""),
)))
name = db.Column(CIText, unique=True, nullable=False)
normalized = db.Column(CIText, unique=True, nullable=False,
server_default=FetchedValue(),
server_onupdate=FetchedValue())
def __init__(self, name):
self.name = name
def __repr__(self):
return "<Project: {name}>".format(name=self.name)
|
Refactor Project to use new mixins and methods
|
Refactor Project to use new mixins and methods
|
Python
|
bsd-2-clause
|
davidfischer/warehouse
|
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
- from sqlalchemy import event
from sqlalchemy.schema import FetchedValue
from sqlalchemy.dialects import postgresql as pg
+ from sqlalchemy.ext.declarative import declared_attr
from warehouse import db
- from warehouse.databases.mixins import UUIDPrimaryKeyMixin
+ from warehouse.database.mixins import UUIDPrimaryKeyMixin, TimeStampedMixin
+ from warehouse.database.schema import TableDDL
from warehouse.database.types import CIText
+ from warehouse.database.utils import table_args
- class Project(UUIDPrimaryKeyMixin, db.Model):
+ class Project(UUIDPrimaryKeyMixin, TimeStampedMixin, db.Model):
__tablename__ = "projects"
+ __table_args__ = declared_attr(table_args((
+ TableDDL("""
+ CREATE OR REPLACE FUNCTION normalize_name()
+ RETURNS trigger AS $$
+ BEGIN
+ NEW.normalized = lower(regexp_replace(new.name, '[^A-Za-z0-9.]+', '-'));
+ return NEW;
+ END;
+ $$ LANGUAGE plpgsql;
+
+ CREATE TRIGGER %(table)s_normalize_name
+ BEFORE INSERT OR UPDATE
+ ON %(table)s
+ FOR EACH ROW
+ EXECUTE PROCEDURE normalize_name();
+ """),
+ )))
name = db.Column(CIText, unique=True, nullable=False)
normalized = db.Column(CIText, unique=True, nullable=False,
server_default=FetchedValue(),
server_onupdate=FetchedValue())
def __init__(self, name):
self.name = name
def __repr__(self):
return "<Project: {name}>".format(name=self.name)
-
- # Create the Trigger to fill in normalized (TODO: Move this to Alembic?)
- event.listen(Project.__table__, "after_create", db.DDL("""
- CREATE OR REPLACE FUNCTION normalize_name() RETURNS trigger AS
- $body$
- BEGIN
- new.normalized = lower(regexp_replace(new.name, '[^A-Za-z0-9.]+', '-'));
- RETURN new;
- end;
- $body$ LANGUAGE plpgsql;
-
- CREATE TRIGGER %(table)s_normalize_name
- BEFORE INSERT OR UPDATE
- ON %(table)s
- FOR EACH ROW
- EXECUTE PROCEDURE normalize_name();
- """))
-
|
Refactor Project to use new mixins and methods
|
## Code Before:
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from sqlalchemy import event
from sqlalchemy.schema import FetchedValue
from sqlalchemy.dialects import postgresql as pg
from warehouse import db
from warehouse.databases.mixins import UUIDPrimaryKeyMixin
from warehouse.database.types import CIText
class Project(UUIDPrimaryKeyMixin, db.Model):
__tablename__ = "projects"
name = db.Column(CIText, unique=True, nullable=False)
normalized = db.Column(CIText, unique=True, nullable=False,
server_default=FetchedValue(),
server_onupdate=FetchedValue())
def __init__(self, name):
self.name = name
def __repr__(self):
return "<Project: {name}>".format(name=self.name)
# Create the Trigger to fill in normalized (TODO: Move this to Alembic?)
event.listen(Project.__table__, "after_create", db.DDL("""
CREATE OR REPLACE FUNCTION normalize_name() RETURNS trigger AS
$body$
BEGIN
new.normalized = lower(regexp_replace(new.name, '[^A-Za-z0-9.]+', '-'));
RETURN new;
end;
$body$ LANGUAGE plpgsql;
CREATE TRIGGER %(table)s_normalize_name
BEFORE INSERT OR UPDATE
ON %(table)s
FOR EACH ROW
EXECUTE PROCEDURE normalize_name();
"""))
## Instruction:
Refactor Project to use new mixins and methods
## Code After:
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from sqlalchemy.schema import FetchedValue
from sqlalchemy.dialects import postgresql as pg
from sqlalchemy.ext.declarative import declared_attr
from warehouse import db
from warehouse.database.mixins import UUIDPrimaryKeyMixin, TimeStampedMixin
from warehouse.database.schema import TableDDL
from warehouse.database.types import CIText
from warehouse.database.utils import table_args
class Project(UUIDPrimaryKeyMixin, TimeStampedMixin, db.Model):
__tablename__ = "projects"
__table_args__ = declared_attr(table_args((
TableDDL("""
CREATE OR REPLACE FUNCTION normalize_name()
RETURNS trigger AS $$
BEGIN
NEW.normalized = lower(regexp_replace(new.name, '[^A-Za-z0-9.]+', '-'));
return NEW;
END;
$$ LANGUAGE plpgsql;
CREATE TRIGGER %(table)s_normalize_name
BEFORE INSERT OR UPDATE
ON %(table)s
FOR EACH ROW
EXECUTE PROCEDURE normalize_name();
"""),
)))
name = db.Column(CIText, unique=True, nullable=False)
normalized = db.Column(CIText, unique=True, nullable=False,
server_default=FetchedValue(),
server_onupdate=FetchedValue())
def __init__(self, name):
self.name = name
def __repr__(self):
return "<Project: {name}>".format(name=self.name)
|
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
- from sqlalchemy import event
from sqlalchemy.schema import FetchedValue
from sqlalchemy.dialects import postgresql as pg
+ from sqlalchemy.ext.declarative import declared_attr
from warehouse import db
- from warehouse.databases.mixins import UUIDPrimaryKeyMixin
? -
+ from warehouse.database.mixins import UUIDPrimaryKeyMixin, TimeStampedMixin
? ++++++++++++++++++
+ from warehouse.database.schema import TableDDL
from warehouse.database.types import CIText
+ from warehouse.database.utils import table_args
- class Project(UUIDPrimaryKeyMixin, db.Model):
+ class Project(UUIDPrimaryKeyMixin, TimeStampedMixin, db.Model):
? ++++++++++++++++++
__tablename__ = "projects"
+ __table_args__ = declared_attr(table_args((
+ TableDDL("""
+ CREATE OR REPLACE FUNCTION normalize_name()
+ RETURNS trigger AS $$
+ BEGIN
+ NEW.normalized = lower(regexp_replace(new.name, '[^A-Za-z0-9.]+', '-'));
+ return NEW;
+ END;
+ $$ LANGUAGE plpgsql;
+
+ CREATE TRIGGER %(table)s_normalize_name
+ BEFORE INSERT OR UPDATE
+ ON %(table)s
+ FOR EACH ROW
+ EXECUTE PROCEDURE normalize_name();
+ """),
+ )))
name = db.Column(CIText, unique=True, nullable=False)
normalized = db.Column(CIText, unique=True, nullable=False,
server_default=FetchedValue(),
server_onupdate=FetchedValue())
def __init__(self, name):
self.name = name
def __repr__(self):
return "<Project: {name}>".format(name=self.name)
-
-
- # Create the Trigger to fill in normalized (TODO: Move this to Alembic?)
- event.listen(Project.__table__, "after_create", db.DDL("""
- CREATE OR REPLACE FUNCTION normalize_name() RETURNS trigger AS
- $body$
- BEGIN
- new.normalized = lower(regexp_replace(new.name, '[^A-Za-z0-9.]+', '-'));
- RETURN new;
- end;
- $body$ LANGUAGE plpgsql;
-
- CREATE TRIGGER %(table)s_normalize_name
- BEFORE INSERT OR UPDATE
- ON %(table)s
- FOR EACH ROW
- EXECUTE PROCEDURE normalize_name();
- """))
|
b330afbcdc907343ab609a5b000f08e69671116e
|
sobotka/lib/ssh_config_util.py
|
sobotka/lib/ssh_config_util.py
|
from storm.parsers.ssh_config_parser import ConfigParser as StormParser
from os.path import expanduser
def add_host(name, user, hostname, key_file):
sconfig = StormParser(expanduser("~/.ssh/config"))
sconfig.load()
sconfig.add_host(name, {
'user': user,
'hostname': hostname,
'IdentityFile': key_file
})
sconfig.write_to_ssh_config()
def remove_host(name):
sconfig = StormParser(expanduser("~/.ssh/config"))
sconfig.load()
sconfig.delete_host(name)
sconfig.write_to_ssh_config()
|
from storm.parsers.ssh_config_parser import ConfigParser as StormParser
from os.path import expanduser
def add_host(name, user, hostname, key_file):
sconfig = StormParser(expanduser("~/.ssh/config"))
sconfig.load()
sconfig.add_host(name, {
'user': user,
'hostname': hostname,
'IdentityFile': key_file,
"StrictHostKeyChecking": "no"
})
sconfig.write_to_ssh_config()
def remove_host(name):
sconfig = StormParser(expanduser("~/.ssh/config"))
sconfig.load()
sconfig.delete_host(name)
sconfig.write_to_ssh_config()
|
Disable strict host checking for ssh
|
Disable strict host checking for ssh
|
Python
|
mit
|
looneym/sobotka,looneym/sobotka
|
from storm.parsers.ssh_config_parser import ConfigParser as StormParser
from os.path import expanduser
def add_host(name, user, hostname, key_file):
sconfig = StormParser(expanduser("~/.ssh/config"))
sconfig.load()
sconfig.add_host(name, {
'user': user,
'hostname': hostname,
- 'IdentityFile': key_file
+ 'IdentityFile': key_file,
+ "StrictHostKeyChecking": "no"
})
sconfig.write_to_ssh_config()
def remove_host(name):
sconfig = StormParser(expanduser("~/.ssh/config"))
sconfig.load()
sconfig.delete_host(name)
sconfig.write_to_ssh_config()
|
Disable strict host checking for ssh
|
## Code Before:
from storm.parsers.ssh_config_parser import ConfigParser as StormParser
from os.path import expanduser
def add_host(name, user, hostname, key_file):
sconfig = StormParser(expanduser("~/.ssh/config"))
sconfig.load()
sconfig.add_host(name, {
'user': user,
'hostname': hostname,
'IdentityFile': key_file
})
sconfig.write_to_ssh_config()
def remove_host(name):
sconfig = StormParser(expanduser("~/.ssh/config"))
sconfig.load()
sconfig.delete_host(name)
sconfig.write_to_ssh_config()
## Instruction:
Disable strict host checking for ssh
## Code After:
from storm.parsers.ssh_config_parser import ConfigParser as StormParser
from os.path import expanduser
def add_host(name, user, hostname, key_file):
sconfig = StormParser(expanduser("~/.ssh/config"))
sconfig.load()
sconfig.add_host(name, {
'user': user,
'hostname': hostname,
'IdentityFile': key_file,
"StrictHostKeyChecking": "no"
})
sconfig.write_to_ssh_config()
def remove_host(name):
sconfig = StormParser(expanduser("~/.ssh/config"))
sconfig.load()
sconfig.delete_host(name)
sconfig.write_to_ssh_config()
|
from storm.parsers.ssh_config_parser import ConfigParser as StormParser
from os.path import expanduser
def add_host(name, user, hostname, key_file):
sconfig = StormParser(expanduser("~/.ssh/config"))
sconfig.load()
sconfig.add_host(name, {
'user': user,
'hostname': hostname,
- 'IdentityFile': key_file
+ 'IdentityFile': key_file,
? +
+ "StrictHostKeyChecking": "no"
})
sconfig.write_to_ssh_config()
def remove_host(name):
sconfig = StormParser(expanduser("~/.ssh/config"))
sconfig.load()
sconfig.delete_host(name)
sconfig.write_to_ssh_config()
|
7809a22a67f9c9d67d4765c2029d9b30656606bc
|
hello.py
|
hello.py
|
print("hello")
|
def greeting(msg):
print(msg)
if __name__=='__main__':
greeting("hello")
|
Add function to print a message
|
Add function to print a message
|
Python
|
mit
|
ag4ml/cs3240-labdemo
|
- print("hello")
+ def greeting(msg):
+ print(msg)
+ if __name__=='__main__':
+ greeting("hello")
+
|
Add function to print a message
|
## Code Before:
print("hello")
## Instruction:
Add function to print a message
## Code After:
def greeting(msg):
print(msg)
if __name__=='__main__':
greeting("hello")
|
- print("hello")
+ def greeting(msg):
+ print(msg)
+
+ if __name__=='__main__':
+ greeting("hello")
|
f3e1c74d9b85814cd56397560c5023e7ef536caa
|
tests/test_statuspage.py
|
tests/test_statuspage.py
|
import unittest
import sys
from helpers.statuspage import StatusPage
from test_postgresql import MockConnect
if sys.hexversion >= 0x03000000:
from io import BytesIO as IO
else:
from StringIO import StringIO as IO
class TestStatusPage(unittest.TestCase):
def test_do_GET(self):
for mock_recovery in [True, False]:
for page in [b'GET /pg_master', b'GET /pg_slave', b'GET /pg_status', b'GET /not_found']:
self.http_server = MockServer(('0.0.0.0', 8888), StatusPage, page, mock_recovery)
class MockRequest(object):
def __init__(self, path):
self.path = path
def makefile(self, *args, **kwargs):
return IO(self.path)
class MockServer(object):
def __init__(self, ip_port, Handler, path, mock_recovery=False):
self.postgresql = MockConnect()
self.postgresql.mock_values['mock_recovery'] = mock_recovery
Handler(MockRequest(path), ip_port, self)
if __name__ == '__main__':
unittest.main()
|
import unittest
import sys
from helpers.statuspage import StatusPage
from test_postgresql import MockConnect
if sys.hexversion >= 0x03000000:
from io import BytesIO as IO
else:
from StringIO import StringIO as IO
class TestStatusPage(unittest.TestCase):
def test_do_GET(self):
for mock_recovery in [True, False]:
for page in [b'GET /pg_master', b'GET /pg_slave', b'GET /pg_status', b'GET /not_found']:
self.http_server = MockServer(('0.0.0.0', 8888), StatusPage, page, mock_recovery)
class MockRequest(object):
def __init__(self, path):
self.path = path
def makefile(self, *args, **kwargs):
return IO(self.path)
class MockServer(object):
def __init__(self, ip_port, Handler, path, mock_recovery=False):
self.postgresql = MockConnect()
self.postgresql.mock_values['mock_recovery'] = mock_recovery
Handler(MockRequest(path), ip_port, self)
|
Remove some more unneccesary code.
|
Unittests: Remove some more unneccesary code.
|
Python
|
mit
|
zalando/patroni,zalando/patroni,sean-/patroni,pgexperts/patroni,sean-/patroni,pgexperts/patroni,jinty/patroni,jinty/patroni
|
import unittest
import sys
from helpers.statuspage import StatusPage
from test_postgresql import MockConnect
if sys.hexversion >= 0x03000000:
from io import BytesIO as IO
else:
from StringIO import StringIO as IO
class TestStatusPage(unittest.TestCase):
def test_do_GET(self):
for mock_recovery in [True, False]:
for page in [b'GET /pg_master', b'GET /pg_slave', b'GET /pg_status', b'GET /not_found']:
self.http_server = MockServer(('0.0.0.0', 8888), StatusPage, page, mock_recovery)
class MockRequest(object):
def __init__(self, path):
self.path = path
def makefile(self, *args, **kwargs):
return IO(self.path)
class MockServer(object):
def __init__(self, ip_port, Handler, path, mock_recovery=False):
self.postgresql = MockConnect()
self.postgresql.mock_values['mock_recovery'] = mock_recovery
Handler(MockRequest(path), ip_port, self)
-
- if __name__ == '__main__':
- unittest.main()
-
|
Remove some more unneccesary code.
|
## Code Before:
import unittest
import sys
from helpers.statuspage import StatusPage
from test_postgresql import MockConnect
if sys.hexversion >= 0x03000000:
from io import BytesIO as IO
else:
from StringIO import StringIO as IO
class TestStatusPage(unittest.TestCase):
def test_do_GET(self):
for mock_recovery in [True, False]:
for page in [b'GET /pg_master', b'GET /pg_slave', b'GET /pg_status', b'GET /not_found']:
self.http_server = MockServer(('0.0.0.0', 8888), StatusPage, page, mock_recovery)
class MockRequest(object):
def __init__(self, path):
self.path = path
def makefile(self, *args, **kwargs):
return IO(self.path)
class MockServer(object):
def __init__(self, ip_port, Handler, path, mock_recovery=False):
self.postgresql = MockConnect()
self.postgresql.mock_values['mock_recovery'] = mock_recovery
Handler(MockRequest(path), ip_port, self)
if __name__ == '__main__':
unittest.main()
## Instruction:
Remove some more unneccesary code.
## Code After:
import unittest
import sys
from helpers.statuspage import StatusPage
from test_postgresql import MockConnect
if sys.hexversion >= 0x03000000:
from io import BytesIO as IO
else:
from StringIO import StringIO as IO
class TestStatusPage(unittest.TestCase):
def test_do_GET(self):
for mock_recovery in [True, False]:
for page in [b'GET /pg_master', b'GET /pg_slave', b'GET /pg_status', b'GET /not_found']:
self.http_server = MockServer(('0.0.0.0', 8888), StatusPage, page, mock_recovery)
class MockRequest(object):
def __init__(self, path):
self.path = path
def makefile(self, *args, **kwargs):
return IO(self.path)
class MockServer(object):
def __init__(self, ip_port, Handler, path, mock_recovery=False):
self.postgresql = MockConnect()
self.postgresql.mock_values['mock_recovery'] = mock_recovery
Handler(MockRequest(path), ip_port, self)
|
import unittest
import sys
from helpers.statuspage import StatusPage
from test_postgresql import MockConnect
if sys.hexversion >= 0x03000000:
from io import BytesIO as IO
else:
from StringIO import StringIO as IO
class TestStatusPage(unittest.TestCase):
def test_do_GET(self):
for mock_recovery in [True, False]:
for page in [b'GET /pg_master', b'GET /pg_slave', b'GET /pg_status', b'GET /not_found']:
self.http_server = MockServer(('0.0.0.0', 8888), StatusPage, page, mock_recovery)
class MockRequest(object):
def __init__(self, path):
self.path = path
def makefile(self, *args, **kwargs):
return IO(self.path)
class MockServer(object):
def __init__(self, ip_port, Handler, path, mock_recovery=False):
self.postgresql = MockConnect()
self.postgresql.mock_values['mock_recovery'] = mock_recovery
Handler(MockRequest(path), ip_port, self)
-
-
- if __name__ == '__main__':
- unittest.main()
|
daa3504942e088fb6cd23eaccff78e613460f517
|
mezzanine/accounts/admin.py
|
mezzanine/accounts/admin.py
|
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from mezzanine.accounts.models import get_profile_model
Profile = get_profile_model()
class ProfileInline(admin.StackedInline):
model = Profile
can_delete = False
template = "admin/profile_inline.html"
extra = 0
class UserProfileAdmin(UserAdmin):
inlines = (ProfileInline,) if Profile else ()
admin.site.unregister(User)
admin.site.register(User, UserProfileAdmin)
|
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from mezzanine.accounts.models import get_profile_model
from mezzanine.core.admin import AdminProfileInline
Profile = get_profile_model()
class ProfileInline(admin.StackedInline):
model = Profile
can_delete = False
template = "admin/profile_inline.html"
extra = 0
class UserProfileAdmin(UserAdmin):
inlines = (ProfileInline, AdminProfileInline) if Profile else (AdminProfileInline,)
admin.site.unregister(User)
admin.site.register(User, UserProfileAdmin)
|
Include AdminProfileInline so that it is not lost if the user enables Mezzanine accounts.
|
Include AdminProfileInline so that it is not lost if the user enables Mezzanine accounts.
--HG--
branch : admin_site_permissions
|
Python
|
bsd-2-clause
|
spookylukey/mezzanine,christianwgd/mezzanine,molokov/mezzanine,frankier/mezzanine,SoLoHiC/mezzanine,ZeroXn/mezzanine,readevalprint/mezzanine,eino-makitalo/mezzanine,stbarnabas/mezzanine,theclanks/mezzanine,scarcry/snm-mezzanine,Cajoline/mezzanine,molokov/mezzanine,sjdines/mezzanine,joshcartme/mezzanine,frankchin/mezzanine,saintbird/mezzanine,Cicero-Zhao/mezzanine,wbtuomela/mezzanine,stephenmcd/mezzanine,tuxinhang1989/mezzanine,promil23/mezzanine,fusionbox/mezzanine,stephenmcd/mezzanine,dekomote/mezzanine-modeltranslation-backport,promil23/mezzanine,wbtuomela/mezzanine,wrwrwr/mezzanine,adrian-the-git/mezzanine,emile2016/mezzanine,christianwgd/mezzanine,christianwgd/mezzanine,geodesign/mezzanine,douglaskastle/mezzanine,wrwrwr/mezzanine,industrydive/mezzanine,webounty/mezzanine,jerivas/mezzanine,scarcry/snm-mezzanine,dsanders11/mezzanine,PegasusWang/mezzanine,ZeroXn/mezzanine,Kniyl/mezzanine,promil23/mezzanine,dsanders11/mezzanine,dekomote/mezzanine-modeltranslation-backport,gradel/mezzanine,viaregio/mezzanine,jjz/mezzanine,dustinrb/mezzanine,webounty/mezzanine,dustinrb/mezzanine,orlenko/sfpirg,nikolas/mezzanine,molokov/mezzanine,spookylukey/mezzanine,saintbird/mezzanine,viaregio/mezzanine,agepoly/mezzanine,biomassives/mezzanine,sjuxax/mezzanine,emile2016/mezzanine,Skytorn86/mezzanine,industrydive/mezzanine,douglaskastle/mezzanine,ZeroXn/mezzanine,frankchin/mezzanine,damnfine/mezzanine,frankchin/mezzanine,theclanks/mezzanine,webounty/mezzanine,douglaskastle/mezzanine,ryneeverett/mezzanine,vladir/mezzanine,wbtuomela/mezzanine,tuxinhang1989/mezzanine,joshcartme/mezzanine,joshcartme/mezzanine,ryneeverett/mezzanine,theclanks/mezzanine,PegasusWang/mezzanine,jjz/mezzanine,wyzex/mezzanine,eino-makitalo/mezzanine,wyzex/mezzanine,orlenko/plei,jerivas/mezzanine,Kniyl/mezzanine,Kniyl/mezzanine,adrian-the-git/mezzanine,gradel/mezzanine,orlenko/sfpirg,dovydas/mezzanine,Cajoline/mezzanine,stbarnabas/mezzanine,sjuxax/mezzanine,Cicero-Zhao/mezzanine,mush42/mezzanine,geodesign/mezzanine,emile2016/mezzanine,Skytorn86/mezzanine,viaregio/mezzanine,orlenko/plei,SoLoHiC/mezzanine,mush42/mezzanine,fusionbox/mezzanine,dovydas/mezzanine,sjdines/mezzanine,adrian-the-git/mezzanine,orlenko/plei,readevalprint/mezzanine,batpad/mezzanine,eino-makitalo/mezzanine,batpad/mezzanine,vladir/mezzanine,SoLoHiC/mezzanine,geodesign/mezzanine,orlenko/sfpirg,AlexHill/mezzanine,AlexHill/mezzanine,damnfine/mezzanine,damnfine/mezzanine,nikolas/mezzanine,nikolas/mezzanine,agepoly/mezzanine,cccs-web/mezzanine,saintbird/mezzanine,mush42/mezzanine,jerivas/mezzanine,industrydive/mezzanine,biomassives/mezzanine,spookylukey/mezzanine,gradel/mezzanine,dekomote/mezzanine-modeltranslation-backport,Cajoline/mezzanine,cccs-web/mezzanine,sjuxax/mezzanine,sjdines/mezzanine,Skytorn86/mezzanine,stephenmcd/mezzanine,dsanders11/mezzanine,dovydas/mezzanine,frankier/mezzanine,wyzex/mezzanine,biomassives/mezzanine,PegasusWang/mezzanine,scarcry/snm-mezzanine,dustinrb/mezzanine,tuxinhang1989/mezzanine,agepoly/mezzanine,readevalprint/mezzanine,frankier/mezzanine,jjz/mezzanine,vladir/mezzanine,ryneeverett/mezzanine
|
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from mezzanine.accounts.models import get_profile_model
+ from mezzanine.core.admin import AdminProfileInline
Profile = get_profile_model()
class ProfileInline(admin.StackedInline):
model = Profile
can_delete = False
template = "admin/profile_inline.html"
extra = 0
class UserProfileAdmin(UserAdmin):
- inlines = (ProfileInline,) if Profile else ()
+ inlines = (ProfileInline, AdminProfileInline) if Profile else (AdminProfileInline,)
admin.site.unregister(User)
admin.site.register(User, UserProfileAdmin)
|
Include AdminProfileInline so that it is not lost if the user enables Mezzanine accounts.
|
## Code Before:
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from mezzanine.accounts.models import get_profile_model
Profile = get_profile_model()
class ProfileInline(admin.StackedInline):
model = Profile
can_delete = False
template = "admin/profile_inline.html"
extra = 0
class UserProfileAdmin(UserAdmin):
inlines = (ProfileInline,) if Profile else ()
admin.site.unregister(User)
admin.site.register(User, UserProfileAdmin)
## Instruction:
Include AdminProfileInline so that it is not lost if the user enables Mezzanine accounts.
## Code After:
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from mezzanine.accounts.models import get_profile_model
from mezzanine.core.admin import AdminProfileInline
Profile = get_profile_model()
class ProfileInline(admin.StackedInline):
model = Profile
can_delete = False
template = "admin/profile_inline.html"
extra = 0
class UserProfileAdmin(UserAdmin):
inlines = (ProfileInline, AdminProfileInline) if Profile else (AdminProfileInline,)
admin.site.unregister(User)
admin.site.register(User, UserProfileAdmin)
|
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from mezzanine.accounts.models import get_profile_model
+ from mezzanine.core.admin import AdminProfileInline
Profile = get_profile_model()
class ProfileInline(admin.StackedInline):
model = Profile
can_delete = False
template = "admin/profile_inline.html"
extra = 0
class UserProfileAdmin(UserAdmin):
- inlines = (ProfileInline,) if Profile else ()
+ inlines = (ProfileInline, AdminProfileInline) if Profile else (AdminProfileInline,)
admin.site.unregister(User)
admin.site.register(User, UserProfileAdmin)
|
ac5ceee751b0c374ffcf1bd0e52ce085e8d7295c
|
nyucal/cli.py
|
nyucal/cli.py
|
import io
import click
from nyucal import nyucal
import requests
@click.group()
def main(args=None):
"""Console script for nyucal."""
click.echo("cli for nyucal")
@main.command()
def list(source=None):
"""List the available calendars in the calendar source"""
if source is None:
source = nyucal.SOURCE_URL # noqa
store = nyucal.CalendarStore(source)
for line in store.calendar_names:
click.echo(line)
if __name__ == "__main__":
main()
|
import io
import click
from nyucal import nyucal
import requests
@click.group()
def main(args=None):
"""Console script for nyucal."""
pass
@main.command()
@click.option('--source', '-s', default=nyucal.SOURCE_URL,
help="""Calendars source (URL, file path, or string).
(default: {} """.format(nyucal.SOURCE_URL))
def list(source):
"""List the available calendars in the calendar source
Since the calendar store is, by default, scraped from a web page,
this command will fail if no source is specified and the computer
is not online.
"""
store = nyucal.CalendarStore(source)
for line in store.calendar_names:
click.echo(line)
@main.command()
@click.argument('name', nargs=1)
@click.option('--source', '-s', default=nyucal.SOURCE_URL,
help="""Calendars source (URL, file path, or string).
(default: {} """.format(nyucal.SOURCE_URL))
@click.option('--format', '-f',
type=click.Choice(['gcalcsv']),
default='gcalcsv',
help='Write in this format')
@click.option('--output', '-o', type=click.File('w'), default='-',
help='Write to this file (default: stdout)')
def get(source, name, format, output):
"""Get the calendar named NAME and output in the specified format
If NAME contains a space, it will need to be quoted.
Since the calendar store is, by default, scraped from a web page,
this command will fail if no source is specified and the computer
is not online.
"""
store = nyucal.CalendarStore(source)
calendar = store.calendar(name)
writers = {'gcalcsv': nyucal.GcalCsvWriter}
writer = writers[format.lower()](output)
writer.write(calendar)
if __name__ == "__main__":
main()
|
Add the `get` command to the CLI.
|
Add the `get` command to the CLI.
|
Python
|
mit
|
nyumathclinic/nyucal,nyumathclinic/nyucal
|
import io
import click
from nyucal import nyucal
import requests
@click.group()
def main(args=None):
"""Console script for nyucal."""
- click.echo("cli for nyucal")
+ pass
@main.command()
+ @click.option('--source', '-s', default=nyucal.SOURCE_URL,
+ help="""Calendars source (URL, file path, or string).
+ (default: {} """.format(nyucal.SOURCE_URL))
- def list(source=None):
+ def list(source):
- """List the available calendars in the calendar source"""
+ """List the available calendars in the calendar source
- if source is None:
- source = nyucal.SOURCE_URL # noqa
+
+ Since the calendar store is, by default, scraped from a web page,
+ this command will fail if no source is specified and the computer
+ is not online.
+ """
store = nyucal.CalendarStore(source)
for line in store.calendar_names:
click.echo(line)
+ @main.command()
+ @click.argument('name', nargs=1)
+ @click.option('--source', '-s', default=nyucal.SOURCE_URL,
+ help="""Calendars source (URL, file path, or string).
+ (default: {} """.format(nyucal.SOURCE_URL))
+ @click.option('--format', '-f',
+ type=click.Choice(['gcalcsv']),
+ default='gcalcsv',
+ help='Write in this format')
+ @click.option('--output', '-o', type=click.File('w'), default='-',
+ help='Write to this file (default: stdout)')
+ def get(source, name, format, output):
+ """Get the calendar named NAME and output in the specified format
+
+ If NAME contains a space, it will need to be quoted.
+
+ Since the calendar store is, by default, scraped from a web page,
+ this command will fail if no source is specified and the computer
+ is not online.
+ """
+ store = nyucal.CalendarStore(source)
+ calendar = store.calendar(name)
+ writers = {'gcalcsv': nyucal.GcalCsvWriter}
+ writer = writers[format.lower()](output)
+ writer.write(calendar)
+
+
if __name__ == "__main__":
main()
|
Add the `get` command to the CLI.
|
## Code Before:
import io
import click
from nyucal import nyucal
import requests
@click.group()
def main(args=None):
"""Console script for nyucal."""
click.echo("cli for nyucal")
@main.command()
def list(source=None):
"""List the available calendars in the calendar source"""
if source is None:
source = nyucal.SOURCE_URL # noqa
store = nyucal.CalendarStore(source)
for line in store.calendar_names:
click.echo(line)
if __name__ == "__main__":
main()
## Instruction:
Add the `get` command to the CLI.
## Code After:
import io
import click
from nyucal import nyucal
import requests
@click.group()
def main(args=None):
"""Console script for nyucal."""
pass
@main.command()
@click.option('--source', '-s', default=nyucal.SOURCE_URL,
help="""Calendars source (URL, file path, or string).
(default: {} """.format(nyucal.SOURCE_URL))
def list(source):
"""List the available calendars in the calendar source
Since the calendar store is, by default, scraped from a web page,
this command will fail if no source is specified and the computer
is not online.
"""
store = nyucal.CalendarStore(source)
for line in store.calendar_names:
click.echo(line)
@main.command()
@click.argument('name', nargs=1)
@click.option('--source', '-s', default=nyucal.SOURCE_URL,
help="""Calendars source (URL, file path, or string).
(default: {} """.format(nyucal.SOURCE_URL))
@click.option('--format', '-f',
type=click.Choice(['gcalcsv']),
default='gcalcsv',
help='Write in this format')
@click.option('--output', '-o', type=click.File('w'), default='-',
help='Write to this file (default: stdout)')
def get(source, name, format, output):
"""Get the calendar named NAME and output in the specified format
If NAME contains a space, it will need to be quoted.
Since the calendar store is, by default, scraped from a web page,
this command will fail if no source is specified and the computer
is not online.
"""
store = nyucal.CalendarStore(source)
calendar = store.calendar(name)
writers = {'gcalcsv': nyucal.GcalCsvWriter}
writer = writers[format.lower()](output)
writer.write(calendar)
if __name__ == "__main__":
main()
|
import io
import click
from nyucal import nyucal
import requests
@click.group()
def main(args=None):
"""Console script for nyucal."""
- click.echo("cli for nyucal")
+ pass
@main.command()
+ @click.option('--source', '-s', default=nyucal.SOURCE_URL,
+ help="""Calendars source (URL, file path, or string).
+ (default: {} """.format(nyucal.SOURCE_URL))
- def list(source=None):
? -----
+ def list(source):
- """List the available calendars in the calendar source"""
? ---
+ """List the available calendars in the calendar source
- if source is None:
- source = nyucal.SOURCE_URL # noqa
+
+ Since the calendar store is, by default, scraped from a web page,
+ this command will fail if no source is specified and the computer
+ is not online.
+ """
store = nyucal.CalendarStore(source)
for line in store.calendar_names:
click.echo(line)
+ @main.command()
+ @click.argument('name', nargs=1)
+ @click.option('--source', '-s', default=nyucal.SOURCE_URL,
+ help="""Calendars source (URL, file path, or string).
+ (default: {} """.format(nyucal.SOURCE_URL))
+ @click.option('--format', '-f',
+ type=click.Choice(['gcalcsv']),
+ default='gcalcsv',
+ help='Write in this format')
+ @click.option('--output', '-o', type=click.File('w'), default='-',
+ help='Write to this file (default: stdout)')
+ def get(source, name, format, output):
+ """Get the calendar named NAME and output in the specified format
+
+ If NAME contains a space, it will need to be quoted.
+
+ Since the calendar store is, by default, scraped from a web page,
+ this command will fail if no source is specified and the computer
+ is not online.
+ """
+ store = nyucal.CalendarStore(source)
+ calendar = store.calendar(name)
+ writers = {'gcalcsv': nyucal.GcalCsvWriter}
+ writer = writers[format.lower()](output)
+ writer.write(calendar)
+
+
if __name__ == "__main__":
main()
|
081dcb1a6f3531249f8948b019d8fdc4175dbe61
|
makerscience_profile/api.py
|
makerscience_profile/api.py
|
from .models import MakerScienceProfile
from tastypie.resources import ModelResource
from tastypie.authorization import DjangoAuthorization
from tastypie import fields
from tastypie.constants import ALL_WITH_RELATIONS
from dataserver.authentication import AnonymousApiKeyAuthentication
from accounts.api import ProfileResource
from scout.api import PostalAddressResource
class MakerScienceProfileResource(ModelResource):
parent = fields.OneToOneField(ProfileResource, 'parent')
location = fields.ToOneField(PostalAddressResource, 'location', null=True, blank=True, full=True)
class Meta:
queryset = MakerScienceProfile.objects.all()
allowed_methods = ['get', 'post', 'put', 'patch']
resource_name = 'makerscience/profile'
authentication = AnonymousApiKeyAuthentication()
authorization = DjangoAuthorization()
always_return_data = True
filtering = {
'parent' : ALL_WITH_RELATIONS,
}
def dehydrate(self, bundle):
bundle.data["first_name"] = bundle.obj.parent.user.first_name
bundle.data["last_name"] = bundle.obj.parent.user.last_name
return bundle
|
from .models import MakerScienceProfile
from tastypie.resources import ModelResource
from tastypie.authorization import DjangoAuthorization
from tastypie import fields
from tastypie.constants import ALL_WITH_RELATIONS
from dataserver.authentication import AnonymousApiKeyAuthentication
from accounts.api import ProfileResource
from scout.api import PostalAddressResource
class MakerScienceProfileResource(ModelResource):
parent = fields.OneToOneField(ProfileResource, 'parent')
location = fields.ToOneField(PostalAddressResource, 'location', null=True, blank=True, full=True)
class Meta:
queryset = MakerScienceProfile.objects.all()
allowed_methods = ['get', 'post', 'put', 'patch']
resource_name = 'makerscience/profile'
authentication = AnonymousApiKeyAuthentication()
authorization = DjangoAuthorization()
always_return_data = True
filtering = {
'parent' : ALL_WITH_RELATIONS,
}
def dehydrate(self, bundle):
bundle.data["first_name"] = bundle.obj.parent.user.first_name
bundle.data["last_name"] = bundle.obj.parent.user.last_name
bundle.data["full_name"] = "%s %s" % (bundle.obj.parent.user.first_name, bundle.obj.parent.user.last_name)
return bundle
|
Add fullname in REST response
|
Add fullname in REST response
|
Python
|
agpl-3.0
|
atiberghien/makerscience-server,atiberghien/makerscience-server
|
from .models import MakerScienceProfile
from tastypie.resources import ModelResource
from tastypie.authorization import DjangoAuthorization
from tastypie import fields
from tastypie.constants import ALL_WITH_RELATIONS
from dataserver.authentication import AnonymousApiKeyAuthentication
from accounts.api import ProfileResource
from scout.api import PostalAddressResource
class MakerScienceProfileResource(ModelResource):
parent = fields.OneToOneField(ProfileResource, 'parent')
location = fields.ToOneField(PostalAddressResource, 'location', null=True, blank=True, full=True)
class Meta:
queryset = MakerScienceProfile.objects.all()
allowed_methods = ['get', 'post', 'put', 'patch']
resource_name = 'makerscience/profile'
authentication = AnonymousApiKeyAuthentication()
authorization = DjangoAuthorization()
always_return_data = True
filtering = {
'parent' : ALL_WITH_RELATIONS,
}
def dehydrate(self, bundle):
bundle.data["first_name"] = bundle.obj.parent.user.first_name
bundle.data["last_name"] = bundle.obj.parent.user.last_name
+ bundle.data["full_name"] = "%s %s" % (bundle.obj.parent.user.first_name, bundle.obj.parent.user.last_name)
return bundle
|
Add fullname in REST response
|
## Code Before:
from .models import MakerScienceProfile
from tastypie.resources import ModelResource
from tastypie.authorization import DjangoAuthorization
from tastypie import fields
from tastypie.constants import ALL_WITH_RELATIONS
from dataserver.authentication import AnonymousApiKeyAuthentication
from accounts.api import ProfileResource
from scout.api import PostalAddressResource
class MakerScienceProfileResource(ModelResource):
parent = fields.OneToOneField(ProfileResource, 'parent')
location = fields.ToOneField(PostalAddressResource, 'location', null=True, blank=True, full=True)
class Meta:
queryset = MakerScienceProfile.objects.all()
allowed_methods = ['get', 'post', 'put', 'patch']
resource_name = 'makerscience/profile'
authentication = AnonymousApiKeyAuthentication()
authorization = DjangoAuthorization()
always_return_data = True
filtering = {
'parent' : ALL_WITH_RELATIONS,
}
def dehydrate(self, bundle):
bundle.data["first_name"] = bundle.obj.parent.user.first_name
bundle.data["last_name"] = bundle.obj.parent.user.last_name
return bundle
## Instruction:
Add fullname in REST response
## Code After:
from .models import MakerScienceProfile
from tastypie.resources import ModelResource
from tastypie.authorization import DjangoAuthorization
from tastypie import fields
from tastypie.constants import ALL_WITH_RELATIONS
from dataserver.authentication import AnonymousApiKeyAuthentication
from accounts.api import ProfileResource
from scout.api import PostalAddressResource
class MakerScienceProfileResource(ModelResource):
parent = fields.OneToOneField(ProfileResource, 'parent')
location = fields.ToOneField(PostalAddressResource, 'location', null=True, blank=True, full=True)
class Meta:
queryset = MakerScienceProfile.objects.all()
allowed_methods = ['get', 'post', 'put', 'patch']
resource_name = 'makerscience/profile'
authentication = AnonymousApiKeyAuthentication()
authorization = DjangoAuthorization()
always_return_data = True
filtering = {
'parent' : ALL_WITH_RELATIONS,
}
def dehydrate(self, bundle):
bundle.data["first_name"] = bundle.obj.parent.user.first_name
bundle.data["last_name"] = bundle.obj.parent.user.last_name
bundle.data["full_name"] = "%s %s" % (bundle.obj.parent.user.first_name, bundle.obj.parent.user.last_name)
return bundle
|
from .models import MakerScienceProfile
from tastypie.resources import ModelResource
from tastypie.authorization import DjangoAuthorization
from tastypie import fields
from tastypie.constants import ALL_WITH_RELATIONS
from dataserver.authentication import AnonymousApiKeyAuthentication
from accounts.api import ProfileResource
from scout.api import PostalAddressResource
class MakerScienceProfileResource(ModelResource):
parent = fields.OneToOneField(ProfileResource, 'parent')
location = fields.ToOneField(PostalAddressResource, 'location', null=True, blank=True, full=True)
class Meta:
queryset = MakerScienceProfile.objects.all()
allowed_methods = ['get', 'post', 'put', 'patch']
resource_name = 'makerscience/profile'
authentication = AnonymousApiKeyAuthentication()
authorization = DjangoAuthorization()
always_return_data = True
filtering = {
'parent' : ALL_WITH_RELATIONS,
}
def dehydrate(self, bundle):
bundle.data["first_name"] = bundle.obj.parent.user.first_name
bundle.data["last_name"] = bundle.obj.parent.user.last_name
+ bundle.data["full_name"] = "%s %s" % (bundle.obj.parent.user.first_name, bundle.obj.parent.user.last_name)
return bundle
|
1bd2bddca6de75f3139f986cb5bb6a76320f192a
|
axel/cleaner.py
|
axel/cleaner.py
|
import datetime
import textwrap
import transmissionrpc
from axel import config
from axel import pb_notify
def clean():
transmission_client = transmissionrpc.Client(
config['transmission']['host'], port=config['transmission']['port']
)
torrents = transmission_client.get_torrents()
now = datetime.datetime.now()
time_threshold = config['transmission']['time_threshold']
for torrent in torrents:
if torrent.status == 'seeding':
done = torrent.date_done
diff = now - done
if diff.days >= time_threshold:
# TODO: Use pb_notify instead
pb_notify(
textwrap.dedent(
'''
Torrent {torrent} older than {days} days:
removing (with data)
'''.format(torrent=torrent.name, days=time_threshold)
).strip()
)
transmission_client.remove_torrent(
torrent.id, delete_data=True
)
elif torrent.ratio >= config['transmission']['ratio_threshold']:
pb_notify(
textwrap.dedent(
'''
Torrent {0} reached threshold ratio or higher:
removing (with data)
'''.format(torrent.name)
).strip()
)
transmission_client.remove_torrent(
torrent.id, delete_data=True
)
|
import datetime
import textwrap
import transmissionrpc
from axel import config
from axel import pb_notify
def clean():
transmission_client = transmissionrpc.Client(
config['transmission']['host'], port=config['transmission']['port']
)
torrents = transmission_client.get_torrents()
now = datetime.datetime.now()
time_threshold = config['transmission']['time_threshold']
for torrent in torrents:
if torrent.status in ('seeding', 'stopped'):
done = torrent.date_done
diff = now - done
if diff.days >= time_threshold:
pb_notify(
textwrap.dedent(
'''
Torrent {torrent} older than {days} days:
removing (with data)
'''.format(torrent=torrent.name, days=time_threshold)
).strip()
)
transmission_client.remove_torrent(
torrent.id, delete_data=True
)
elif torrent.ratio >= config['transmission']['ratio_threshold']:
pb_notify(
textwrap.dedent(
'''
Torrent {0} reached threshold ratio or higher:
removing (with data)
'''.format(torrent.name)
).strip()
)
transmission_client.remove_torrent(
torrent.id, delete_data=True
)
|
Check stopped torrents when cleaning
|
Check stopped torrents when cleaning
|
Python
|
mit
|
craigcabrey/axel
|
import datetime
import textwrap
import transmissionrpc
from axel import config
from axel import pb_notify
def clean():
transmission_client = transmissionrpc.Client(
config['transmission']['host'], port=config['transmission']['port']
)
torrents = transmission_client.get_torrents()
now = datetime.datetime.now()
time_threshold = config['transmission']['time_threshold']
for torrent in torrents:
- if torrent.status == 'seeding':
+ if torrent.status in ('seeding', 'stopped'):
done = torrent.date_done
diff = now - done
if diff.days >= time_threshold:
- # TODO: Use pb_notify instead
pb_notify(
textwrap.dedent(
'''
Torrent {torrent} older than {days} days:
removing (with data)
'''.format(torrent=torrent.name, days=time_threshold)
).strip()
)
transmission_client.remove_torrent(
torrent.id, delete_data=True
)
elif torrent.ratio >= config['transmission']['ratio_threshold']:
pb_notify(
textwrap.dedent(
'''
Torrent {0} reached threshold ratio or higher:
removing (with data)
'''.format(torrent.name)
).strip()
)
transmission_client.remove_torrent(
torrent.id, delete_data=True
)
|
Check stopped torrents when cleaning
|
## Code Before:
import datetime
import textwrap
import transmissionrpc
from axel import config
from axel import pb_notify
def clean():
transmission_client = transmissionrpc.Client(
config['transmission']['host'], port=config['transmission']['port']
)
torrents = transmission_client.get_torrents()
now = datetime.datetime.now()
time_threshold = config['transmission']['time_threshold']
for torrent in torrents:
if torrent.status == 'seeding':
done = torrent.date_done
diff = now - done
if diff.days >= time_threshold:
# TODO: Use pb_notify instead
pb_notify(
textwrap.dedent(
'''
Torrent {torrent} older than {days} days:
removing (with data)
'''.format(torrent=torrent.name, days=time_threshold)
).strip()
)
transmission_client.remove_torrent(
torrent.id, delete_data=True
)
elif torrent.ratio >= config['transmission']['ratio_threshold']:
pb_notify(
textwrap.dedent(
'''
Torrent {0} reached threshold ratio or higher:
removing (with data)
'''.format(torrent.name)
).strip()
)
transmission_client.remove_torrent(
torrent.id, delete_data=True
)
## Instruction:
Check stopped torrents when cleaning
## Code After:
import datetime
import textwrap
import transmissionrpc
from axel import config
from axel import pb_notify
def clean():
transmission_client = transmissionrpc.Client(
config['transmission']['host'], port=config['transmission']['port']
)
torrents = transmission_client.get_torrents()
now = datetime.datetime.now()
time_threshold = config['transmission']['time_threshold']
for torrent in torrents:
if torrent.status in ('seeding', 'stopped'):
done = torrent.date_done
diff = now - done
if diff.days >= time_threshold:
pb_notify(
textwrap.dedent(
'''
Torrent {torrent} older than {days} days:
removing (with data)
'''.format(torrent=torrent.name, days=time_threshold)
).strip()
)
transmission_client.remove_torrent(
torrent.id, delete_data=True
)
elif torrent.ratio >= config['transmission']['ratio_threshold']:
pb_notify(
textwrap.dedent(
'''
Torrent {0} reached threshold ratio or higher:
removing (with data)
'''.format(torrent.name)
).strip()
)
transmission_client.remove_torrent(
torrent.id, delete_data=True
)
|
import datetime
import textwrap
import transmissionrpc
from axel import config
from axel import pb_notify
def clean():
transmission_client = transmissionrpc.Client(
config['transmission']['host'], port=config['transmission']['port']
)
torrents = transmission_client.get_torrents()
now = datetime.datetime.now()
time_threshold = config['transmission']['time_threshold']
for torrent in torrents:
- if torrent.status == 'seeding':
? ^^
+ if torrent.status in ('seeding', 'stopped'):
? ^^ + ++++++++++++
done = torrent.date_done
diff = now - done
if diff.days >= time_threshold:
- # TODO: Use pb_notify instead
pb_notify(
textwrap.dedent(
'''
Torrent {torrent} older than {days} days:
removing (with data)
'''.format(torrent=torrent.name, days=time_threshold)
).strip()
)
transmission_client.remove_torrent(
torrent.id, delete_data=True
)
elif torrent.ratio >= config['transmission']['ratio_threshold']:
pb_notify(
textwrap.dedent(
'''
Torrent {0} reached threshold ratio or higher:
removing (with data)
'''.format(torrent.name)
).strip()
)
transmission_client.remove_torrent(
torrent.id, delete_data=True
)
|
a778a41c8deb6fd9812e405143e34679122c18db
|
website/addons/base/utils.py
|
website/addons/base/utils.py
|
from os.path import basename
from website import settings
def serialize_addon_config(config, user):
lookup = config.template_lookup
return {
'addon_short_name': config.short_name,
'addon_full_name': config.full_name,
'node_settings_template': lookup.get_template(basename(config.node_settings_template)),
'user_settings_template': lookup.get_template(basename(config.user_settings_template)),
'is_enabled': user.get_addon(config.short_name) is not None,
}
def get_addons_by_config_type(config_type, user):
addons = [addon for addon in settings.ADDONS_AVAILABLE if config_type in addon.configs]
addon_settings = []
for addon_config in sorted(addons, key=lambda cfg: cfg.full_name.lower()):
# short_name = addon_config.short_name
config = serialize_addon_config(addon_config, user)
'''
user_settings = user.get_addon(short_name)
if user_settings:
user_settings = user_settings.to_json(user)
config.update({
'user_settings': user_settings or {}
})
'''
addon_settings.append(config)
return addon_settings
|
from os.path import basename
from website import settings
def serialize_addon_config(config, user):
lookup = config.template_lookup
user_addon = user.get_addon(config.short_name)
ret = {
'addon_short_name': config.short_name,
'addon_full_name': config.full_name,
'node_settings_template': lookup.get_template(basename(config.node_settings_template)),
'user_settings_template': lookup.get_template(basename(config.user_settings_template)),
'is_enabled': user_addon is not None,
}
ret.update(user_addon.to_json(user) if user_addon else {})
return ret
def get_addons_by_config_type(config_type, user):
addons = [addon for addon in settings.ADDONS_AVAILABLE if config_type in addon.configs]
return [serialize_addon_config(addon_config, user) for addon_config in sorted(addons, key=lambda cfg: cfg.full_name.lower())]
|
Add user_settings to serialized addon settings
|
Add user_settings to serialized addon settings
|
Python
|
apache-2.0
|
ZobairAlijan/osf.io,leb2dg/osf.io,doublebits/osf.io,mluo613/osf.io,jolene-esposito/osf.io,alexschiller/osf.io,mattclark/osf.io,laurenrevere/osf.io,jolene-esposito/osf.io,SSJohns/osf.io,billyhunt/osf.io,pattisdr/osf.io,samanehsan/osf.io,DanielSBrown/osf.io,cslzchen/osf.io,caseyrygt/osf.io,zachjanicki/osf.io,Nesiehr/osf.io,TomHeatwole/osf.io,reinaH/osf.io,danielneis/osf.io,cldershem/osf.io,cldershem/osf.io,mluo613/osf.io,MerlinZhang/osf.io,petermalcolm/osf.io,doublebits/osf.io,amyshi188/osf.io,SSJohns/osf.io,brandonPurvis/osf.io,asanfilippo7/osf.io,baylee-d/osf.io,mfraezz/osf.io,Nesiehr/osf.io,ticklemepierce/osf.io,samanehsan/osf.io,sloria/osf.io,samchrisinger/osf.io,kch8qx/osf.io,ckc6cz/osf.io,ckc6cz/osf.io,leb2dg/osf.io,emetsger/osf.io,cwisecarver/osf.io,hmoco/osf.io,hmoco/osf.io,erinspace/osf.io,doublebits/osf.io,aaxelb/osf.io,Ghalko/osf.io,jmcarp/osf.io,DanielSBrown/osf.io,bdyetton/prettychart,bdyetton/prettychart,TomHeatwole/osf.io,zachjanicki/osf.io,doublebits/osf.io,Nesiehr/osf.io,brianjgeiger/osf.io,acshi/osf.io,cslzchen/osf.io,jnayak1/osf.io,RomanZWang/osf.io,acshi/osf.io,sbt9uc/osf.io,danielneis/osf.io,crcresearch/osf.io,zamattiac/osf.io,monikagrabowska/osf.io,cslzchen/osf.io,erinspace/osf.io,rdhyee/osf.io,alexschiller/osf.io,mluke93/osf.io,alexschiller/osf.io,chennan47/osf.io,ckc6cz/osf.io,zamattiac/osf.io,zamattiac/osf.io,jolene-esposito/osf.io,caseyrollins/osf.io,haoyuchen1992/osf.io,arpitar/osf.io,SSJohns/osf.io,TomBaxter/osf.io,zachjanicki/osf.io,kch8qx/osf.io,lyndsysimon/osf.io,samchrisinger/osf.io,saradbowman/osf.io,acshi/osf.io,TomHeatwole/osf.io,kwierman/osf.io,wearpants/osf.io,laurenrevere/osf.io,felliott/osf.io,baylee-d/osf.io,petermalcolm/osf.io,SSJohns/osf.io,reinaH/osf.io,monikagrabowska/osf.io,amyshi188/osf.io,bdyetton/prettychart,TomHeatwole/osf.io,icereval/osf.io,haoyuchen1992/osf.io,RomanZWang/osf.io,TomBaxter/osf.io,sbt9uc/osf.io,jnayak1/osf.io,amyshi188/osf.io,emetsger/osf.io,DanielSBrown/osf.io,MerlinZhang/osf.io,cwisecarver/osf.io,Ghalko/osf.io,laurenrevere/osf.io,cslzchen/osf.io,leb2dg/osf.io,cosenal/osf.io,erinspace/osf.io,hmoco/osf.io,chrisseto/osf.io,felliott/osf.io,alexschiller/osf.io,KAsante95/osf.io,felliott/osf.io,ticklemepierce/osf.io,pattisdr/osf.io,reinaH/osf.io,icereval/osf.io,ticklemepierce/osf.io,brandonPurvis/osf.io,billyhunt/osf.io,brianjgeiger/osf.io,MerlinZhang/osf.io,abought/osf.io,mfraezz/osf.io,acshi/osf.io,HarryRybacki/osf.io,sbt9uc/osf.io,icereval/osf.io,danielneis/osf.io,CenterForOpenScience/osf.io,kch8qx/osf.io,emetsger/osf.io,zachjanicki/osf.io,GageGaskins/osf.io,rdhyee/osf.io,CenterForOpenScience/osf.io,jmcarp/osf.io,cwisecarver/osf.io,petermalcolm/osf.io,monikagrabowska/osf.io,Johnetordoff/osf.io,binoculars/osf.io,njantrania/osf.io,TomBaxter/osf.io,caseyrollins/osf.io,haoyuchen1992/osf.io,billyhunt/osf.io,RomanZWang/osf.io,mattclark/osf.io,mfraezz/osf.io,mluke93/osf.io,ZobairAlijan/osf.io,dplorimer/osf,caneruguz/osf.io,Nesiehr/osf.io,lyndsysimon/osf.io,njantrania/osf.io,kwierman/osf.io,haoyuchen1992/osf.io,rdhyee/osf.io,alexschiller/osf.io,HarryRybacki/osf.io,emetsger/osf.io,HarryRybacki/osf.io,chrisseto/osf.io,HalcyonChimera/osf.io,reinaH/osf.io,chrisseto/osf.io,ckc6cz/osf.io,mluo613/osf.io,wearpants/osf.io,mluo613/osf.io,chennan47/osf.io,caseyrygt/osf.io,leb2dg/osf.io,kwierman/osf.io,dplorimer/osf,HalcyonChimera/osf.io,KAsante95/osf.io,cwisecarver/osf.io,GageGaskins/osf.io,kch8qx/osf.io,samanehsan/osf.io,binoculars/osf.io,binoculars/osf.io,saradbowman/osf.io,jmcarp/osf.io,amyshi188/osf.io,ZobairAlijan/osf.io,Ghalko/osf.io,doublebits/osf.io,HalcyonChimera/osf.io,brandonPurvis/osf.io,abought/osf.io,HalcyonChimera/osf.io,petermalcolm/osf.io,kch8qx/osf.io,hmoco/osf.io,adlius/osf.io,asanfilippo7/osf.io,Johnetordoff/osf.io,aaxelb/osf.io,njantrania/osf.io,rdhyee/osf.io,DanielSBrown/osf.io,abought/osf.io,samanehsan/osf.io,pattisdr/osf.io,billyhunt/osf.io,dplorimer/osf,billyhunt/osf.io,KAsante95/osf.io,adlius/osf.io,caneruguz/osf.io,samchrisinger/osf.io,brandonPurvis/osf.io,CenterForOpenScience/osf.io,dplorimer/osf,mluke93/osf.io,sloria/osf.io,crcresearch/osf.io,jnayak1/osf.io,danielneis/osf.io,cldershem/osf.io,abought/osf.io,cosenal/osf.io,caneruguz/osf.io,CenterForOpenScience/osf.io,crcresearch/osf.io,sbt9uc/osf.io,caseyrygt/osf.io,Ghalko/osf.io,mattclark/osf.io,sloria/osf.io,ZobairAlijan/osf.io,wearpants/osf.io,caseyrygt/osf.io,chennan47/osf.io,aaxelb/osf.io,cosenal/osf.io,baylee-d/osf.io,asanfilippo7/osf.io,asanfilippo7/osf.io,jnayak1/osf.io,GageGaskins/osf.io,GageGaskins/osf.io,GageGaskins/osf.io,jmcarp/osf.io,lyndsysimon/osf.io,mluke93/osf.io,HarryRybacki/osf.io,KAsante95/osf.io,adlius/osf.io,zamattiac/osf.io,kwierman/osf.io,samchrisinger/osf.io,chrisseto/osf.io,brianjgeiger/osf.io,arpitar/osf.io,brandonPurvis/osf.io,RomanZWang/osf.io,cldershem/osf.io,arpitar/osf.io,caneruguz/osf.io,wearpants/osf.io,Johnetordoff/osf.io,njantrania/osf.io,monikagrabowska/osf.io,monikagrabowska/osf.io,mluo613/osf.io,felliott/osf.io,ticklemepierce/osf.io,brianjgeiger/osf.io,KAsante95/osf.io,acshi/osf.io,RomanZWang/osf.io,bdyetton/prettychart,cosenal/osf.io,arpitar/osf.io,adlius/osf.io,aaxelb/osf.io,lyndsysimon/osf.io,MerlinZhang/osf.io,Johnetordoff/osf.io,jolene-esposito/osf.io,mfraezz/osf.io,caseyrollins/osf.io
|
from os.path import basename
from website import settings
def serialize_addon_config(config, user):
lookup = config.template_lookup
+ user_addon = user.get_addon(config.short_name)
- return {
+ ret = {
'addon_short_name': config.short_name,
'addon_full_name': config.full_name,
'node_settings_template': lookup.get_template(basename(config.node_settings_template)),
'user_settings_template': lookup.get_template(basename(config.user_settings_template)),
- 'is_enabled': user.get_addon(config.short_name) is not None,
+ 'is_enabled': user_addon is not None,
}
+ ret.update(user_addon.to_json(user) if user_addon else {})
+ return ret
def get_addons_by_config_type(config_type, user):
addons = [addon for addon in settings.ADDONS_AVAILABLE if config_type in addon.configs]
+ return [serialize_addon_config(addon_config, user) for addon_config in sorted(addons, key=lambda cfg: cfg.full_name.lower())]
- addon_settings = []
- for addon_config in sorted(addons, key=lambda cfg: cfg.full_name.lower()):
- # short_name = addon_config.short_name
- config = serialize_addon_config(addon_config, user)
- '''
- user_settings = user.get_addon(short_name)
- if user_settings:
- user_settings = user_settings.to_json(user)
- config.update({
- 'user_settings': user_settings or {}
- })
- '''
- addon_settings.append(config)
- return addon_settings
|
Add user_settings to serialized addon settings
|
## Code Before:
from os.path import basename
from website import settings
def serialize_addon_config(config, user):
lookup = config.template_lookup
return {
'addon_short_name': config.short_name,
'addon_full_name': config.full_name,
'node_settings_template': lookup.get_template(basename(config.node_settings_template)),
'user_settings_template': lookup.get_template(basename(config.user_settings_template)),
'is_enabled': user.get_addon(config.short_name) is not None,
}
def get_addons_by_config_type(config_type, user):
addons = [addon for addon in settings.ADDONS_AVAILABLE if config_type in addon.configs]
addon_settings = []
for addon_config in sorted(addons, key=lambda cfg: cfg.full_name.lower()):
# short_name = addon_config.short_name
config = serialize_addon_config(addon_config, user)
'''
user_settings = user.get_addon(short_name)
if user_settings:
user_settings = user_settings.to_json(user)
config.update({
'user_settings': user_settings or {}
})
'''
addon_settings.append(config)
return addon_settings
## Instruction:
Add user_settings to serialized addon settings
## Code After:
from os.path import basename
from website import settings
def serialize_addon_config(config, user):
lookup = config.template_lookup
user_addon = user.get_addon(config.short_name)
ret = {
'addon_short_name': config.short_name,
'addon_full_name': config.full_name,
'node_settings_template': lookup.get_template(basename(config.node_settings_template)),
'user_settings_template': lookup.get_template(basename(config.user_settings_template)),
'is_enabled': user_addon is not None,
}
ret.update(user_addon.to_json(user) if user_addon else {})
return ret
def get_addons_by_config_type(config_type, user):
addons = [addon for addon in settings.ADDONS_AVAILABLE if config_type in addon.configs]
return [serialize_addon_config(addon_config, user) for addon_config in sorted(addons, key=lambda cfg: cfg.full_name.lower())]
|
from os.path import basename
from website import settings
def serialize_addon_config(config, user):
lookup = config.template_lookup
+ user_addon = user.get_addon(config.short_name)
- return {
? ^^^
+ ret = {
? ^^
'addon_short_name': config.short_name,
'addon_full_name': config.full_name,
'node_settings_template': lookup.get_template(basename(config.node_settings_template)),
'user_settings_template': lookup.get_template(basename(config.user_settings_template)),
- 'is_enabled': user.get_addon(config.short_name) is not None,
? ---- -------------------
+ 'is_enabled': user_addon is not None,
}
+ ret.update(user_addon.to_json(user) if user_addon else {})
+ return ret
def get_addons_by_config_type(config_type, user):
addons = [addon for addon in settings.ADDONS_AVAILABLE if config_type in addon.configs]
+ return [serialize_addon_config(addon_config, user) for addon_config in sorted(addons, key=lambda cfg: cfg.full_name.lower())]
- addon_settings = []
- for addon_config in sorted(addons, key=lambda cfg: cfg.full_name.lower()):
- # short_name = addon_config.short_name
- config = serialize_addon_config(addon_config, user)
- '''
- user_settings = user.get_addon(short_name)
- if user_settings:
- user_settings = user_settings.to_json(user)
- config.update({
- 'user_settings': user_settings or {}
- })
- '''
- addon_settings.append(config)
- return addon_settings
|
87153cb1a9727d17d31f3aabb28affddca3191bf
|
sqltocpp.py
|
sqltocpp.py
|
import click
from sqltocpp import convert
@click.command()
@click.option('--sql', help='schema file name')
@click.option('--target', default='schema.hpp', help='hpp file name')
def execute(sql, target):
convert.schema_to_struct(sql, target)
if __name__ == '__main__':
try:
execute()
except:
execute("--help")
|
import click
from sqltocpp import convert
@click.command()
@click.argument('sql_schema_file')
@click.option('--target', default='schema.hpp', help='hpp file name')
def execute(sql_schema_file, target):
convert.schema_to_struct(sql_schema_file, target)
if __name__ == '__main__':
execute()
|
Add click based commandline interface script
|
Add click based commandline interface script
sqltocpp is intended to be a CLI command.
This enables it to be so
|
Python
|
mit
|
banjocat/SqlToCpp,banjocat/SqlToCpp
|
import click
from sqltocpp import convert
@click.command()
- @click.option('--sql', help='schema file name')
+ @click.argument('sql_schema_file')
@click.option('--target', default='schema.hpp', help='hpp file name')
- def execute(sql, target):
+ def execute(sql_schema_file, target):
- convert.schema_to_struct(sql, target)
+ convert.schema_to_struct(sql_schema_file, target)
if __name__ == '__main__':
- try:
- execute()
+ execute()
- except:
- execute("--help")
|
Add click based commandline interface script
|
## Code Before:
import click
from sqltocpp import convert
@click.command()
@click.option('--sql', help='schema file name')
@click.option('--target', default='schema.hpp', help='hpp file name')
def execute(sql, target):
convert.schema_to_struct(sql, target)
if __name__ == '__main__':
try:
execute()
except:
execute("--help")
## Instruction:
Add click based commandline interface script
## Code After:
import click
from sqltocpp import convert
@click.command()
@click.argument('sql_schema_file')
@click.option('--target', default='schema.hpp', help='hpp file name')
def execute(sql_schema_file, target):
convert.schema_to_struct(sql_schema_file, target)
if __name__ == '__main__':
execute()
|
import click
from sqltocpp import convert
@click.command()
- @click.option('--sql', help='schema file name')
+ @click.argument('sql_schema_file')
@click.option('--target', default='schema.hpp', help='hpp file name')
- def execute(sql, target):
+ def execute(sql_schema_file, target):
? ++++++++++++
- convert.schema_to_struct(sql, target)
+ convert.schema_to_struct(sql_schema_file, target)
? ++++++++++++
if __name__ == '__main__':
- try:
- execute()
? ----
+ execute()
- except:
- execute("--help")
|
fd90fc7ce0c8a8070966e4a8273c69b8c13955d3
|
masters/master.tryserver.webrtc/master_site_config.py
|
masters/master.tryserver.webrtc/master_site_config.py
|
"""ActiveMaster definition."""
from config_bootstrap import Master
class WebRTCTryServer(Master.Master4):
project_name = 'WebRTC Try Server'
master_port = 8070
slave_port = 8170
master_port_alt = 8270
try_job_port = 8370
from_address = '[email protected]'
reply_to = '[email protected]'
svn_url = 'svn://svn-mirror.golo.chromium.org/chrome-try/try-webrtc'
base_app_url = 'https://webrtc-status.appspot.com'
tree_status_url = base_app_url + '/status'
store_revisions_url = base_app_url + '/revisions'
last_good_url = base_app_url + '/lkgr'
code_review_site = 'https://webrtc-codereview.appspot.com'
buildbot_url = 'http://build.chromium.org/p/tryserver.webrtc/'
|
"""ActiveMaster definition."""
from config_bootstrap import Master
class WebRTCTryServer(Master.Master4):
project_name = 'WebRTC Try Server'
master_port = 8070
slave_port = 8170
master_port_alt = 8270
try_job_port = 8370
from_address = '[email protected]'
reply_to = '[email protected]'
svn_url = 'svn://svn-mirror.golo.chromium.org/chrome-try/try-webrtc'
base_app_url = 'https://webrtc-status.appspot.com'
tree_status_url = base_app_url + '/status'
store_revisions_url = base_app_url + '/revisions'
last_good_url = None
code_review_site = 'https://webrtc-codereview.appspot.com'
buildbot_url = 'http://build.chromium.org/p/tryserver.webrtc/'
|
Make trybots use HEAD instead of LKGR
|
WebRTC: Make trybots use HEAD instead of LKGR
It's about time we make this change, which turned out
to be very simple.
Review URL: https://codereview.chromium.org/776233003
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@293261 0039d316-1c4b-4281-b951-d872f2087c98
|
Python
|
bsd-3-clause
|
eunchong/build,eunchong/build,eunchong/build,eunchong/build
|
"""ActiveMaster definition."""
from config_bootstrap import Master
class WebRTCTryServer(Master.Master4):
project_name = 'WebRTC Try Server'
master_port = 8070
slave_port = 8170
master_port_alt = 8270
try_job_port = 8370
from_address = '[email protected]'
reply_to = '[email protected]'
svn_url = 'svn://svn-mirror.golo.chromium.org/chrome-try/try-webrtc'
base_app_url = 'https://webrtc-status.appspot.com'
tree_status_url = base_app_url + '/status'
store_revisions_url = base_app_url + '/revisions'
- last_good_url = base_app_url + '/lkgr'
+ last_good_url = None
code_review_site = 'https://webrtc-codereview.appspot.com'
buildbot_url = 'http://build.chromium.org/p/tryserver.webrtc/'
|
Make trybots use HEAD instead of LKGR
|
## Code Before:
"""ActiveMaster definition."""
from config_bootstrap import Master
class WebRTCTryServer(Master.Master4):
project_name = 'WebRTC Try Server'
master_port = 8070
slave_port = 8170
master_port_alt = 8270
try_job_port = 8370
from_address = '[email protected]'
reply_to = '[email protected]'
svn_url = 'svn://svn-mirror.golo.chromium.org/chrome-try/try-webrtc'
base_app_url = 'https://webrtc-status.appspot.com'
tree_status_url = base_app_url + '/status'
store_revisions_url = base_app_url + '/revisions'
last_good_url = base_app_url + '/lkgr'
code_review_site = 'https://webrtc-codereview.appspot.com'
buildbot_url = 'http://build.chromium.org/p/tryserver.webrtc/'
## Instruction:
Make trybots use HEAD instead of LKGR
## Code After:
"""ActiveMaster definition."""
from config_bootstrap import Master
class WebRTCTryServer(Master.Master4):
project_name = 'WebRTC Try Server'
master_port = 8070
slave_port = 8170
master_port_alt = 8270
try_job_port = 8370
from_address = '[email protected]'
reply_to = '[email protected]'
svn_url = 'svn://svn-mirror.golo.chromium.org/chrome-try/try-webrtc'
base_app_url = 'https://webrtc-status.appspot.com'
tree_status_url = base_app_url + '/status'
store_revisions_url = base_app_url + '/revisions'
last_good_url = None
code_review_site = 'https://webrtc-codereview.appspot.com'
buildbot_url = 'http://build.chromium.org/p/tryserver.webrtc/'
|
"""ActiveMaster definition."""
from config_bootstrap import Master
class WebRTCTryServer(Master.Master4):
project_name = 'WebRTC Try Server'
master_port = 8070
slave_port = 8170
master_port_alt = 8270
try_job_port = 8370
from_address = '[email protected]'
reply_to = '[email protected]'
svn_url = 'svn://svn-mirror.golo.chromium.org/chrome-try/try-webrtc'
base_app_url = 'https://webrtc-status.appspot.com'
tree_status_url = base_app_url + '/status'
store_revisions_url = base_app_url + '/revisions'
- last_good_url = base_app_url + '/lkgr'
+ last_good_url = None
code_review_site = 'https://webrtc-codereview.appspot.com'
buildbot_url = 'http://build.chromium.org/p/tryserver.webrtc/'
|
01a86c09b768f6cc4e5bf9b389d09512f9e56ceb
|
sample_agent.py
|
sample_agent.py
|
import numpy as np
import matplotlib.pyplot as plt
class Agent(object):
def __init__(self, dim_action):
self.dim_action = dim_action
def act(self, ob, reward, done, vision):
#print("ACT!")
# Get an Observation from the environment.
# Each observation vectors are numpy array.
# focus, opponents, track sensors are scaled into [0, 1]. When the agent
# is out of the road, sensor variables return -1/200.
# rpm, wheelSpinVel are raw values and then needed to be preprocessed.
# vision is given as a tensor with size of (3, 64, 64) <-- rgb
# and values are in [0, 255]
if vision is False:
focus, speedX, speedY, speedZ, opponents, rpm, track, wheelSpinVel = ob
else:
focus, speedX, speedY, speedZ, opponents, rpm, track, wheelSpinVel, vision = ob
""" The code below is for checking the vision input. This is very heavy for real-time Control
So you may need to remove.
"""
"""
img = np.ndarray((64,64,3))
for i in range(3):
img[:, :, i] = 255 - vision[i]
plt.imshow(img, origin='lower')
plt.draw()
plt.pause(0.001)
"""
return np.tanh(np.random.randn(self.dim_action)) # random action
|
import numpy as np
import matplotlib.pyplot as plt
class Agent(object):
def __init__(self, dim_action):
self.dim_action = dim_action
def act(self, ob, reward, done, vision_on):
#print("ACT!")
# Get an Observation from the environment.
# Each observation vectors are numpy array.
# focus, opponents, track sensors are scaled into [0, 1]. When the agent
# is out of the road, sensor variables return -1/200.
# rpm, wheelSpinVel are raw values and then needed to be preprocessed.
# vision is given as a tensor with size of (64*64, 3) = (4096, 3) <-- rgb
# and values are in [0, 255]
if vision_on is False:
focus, speedX, speedY, speedZ, opponents, rpm, track, wheelSpinVel = ob
else:
focus, speedX, speedY, speedZ, opponents, rpm, track, wheelSpinVel, vision = ob
""" The code below is for checking the vision input. This is very heavy for real-time Control
So you may need to remove.
"""
print(vision.shape)
"""
img = np.ndarray((64,64,3))
for i in range(3):
img[:, :, i] = 255 - vision[:, i].reshape((64, 64))
plt.imshow(img, origin='lower')
plt.draw()
plt.pause(0.001)
"""
return np.tanh(np.random.randn(self.dim_action)) # random action
|
Update to follow the new observation format (follow the vision input of OpenAI ATARI environment)
|
Update to follow the new observation format
(follow the vision input of OpenAI ATARI environment)
|
Python
|
mit
|
travistang/late_fyt,travistang/late_fyt,ugo-nama-kun/gym_torcs,travistang/late_fyt,ugo-nama-kun/gym_torcs,ugo-nama-kun/gym_torcs,travistang/late_fyt,travistang/late_fyt,ugo-nama-kun/gym_torcs,ugo-nama-kun/gym_torcs,travistang/late_fyt,ugo-nama-kun/gym_torcs,travistang/late_fyt,ugo-nama-kun/gym_torcs
|
import numpy as np
import matplotlib.pyplot as plt
class Agent(object):
def __init__(self, dim_action):
self.dim_action = dim_action
- def act(self, ob, reward, done, vision):
+ def act(self, ob, reward, done, vision_on):
#print("ACT!")
# Get an Observation from the environment.
# Each observation vectors are numpy array.
# focus, opponents, track sensors are scaled into [0, 1]. When the agent
# is out of the road, sensor variables return -1/200.
# rpm, wheelSpinVel are raw values and then needed to be preprocessed.
- # vision is given as a tensor with size of (3, 64, 64) <-- rgb
+ # vision is given as a tensor with size of (64*64, 3) = (4096, 3) <-- rgb
# and values are in [0, 255]
- if vision is False:
+ if vision_on is False:
focus, speedX, speedY, speedZ, opponents, rpm, track, wheelSpinVel = ob
else:
focus, speedX, speedY, speedZ, opponents, rpm, track, wheelSpinVel, vision = ob
""" The code below is for checking the vision input. This is very heavy for real-time Control
So you may need to remove.
"""
+ print(vision.shape)
"""
img = np.ndarray((64,64,3))
for i in range(3):
- img[:, :, i] = 255 - vision[i]
+ img[:, :, i] = 255 - vision[:, i].reshape((64, 64))
plt.imshow(img, origin='lower')
plt.draw()
plt.pause(0.001)
"""
return np.tanh(np.random.randn(self.dim_action)) # random action
|
Update to follow the new observation format (follow the vision input of OpenAI ATARI environment)
|
## Code Before:
import numpy as np
import matplotlib.pyplot as plt
class Agent(object):
def __init__(self, dim_action):
self.dim_action = dim_action
def act(self, ob, reward, done, vision):
#print("ACT!")
# Get an Observation from the environment.
# Each observation vectors are numpy array.
# focus, opponents, track sensors are scaled into [0, 1]. When the agent
# is out of the road, sensor variables return -1/200.
# rpm, wheelSpinVel are raw values and then needed to be preprocessed.
# vision is given as a tensor with size of (3, 64, 64) <-- rgb
# and values are in [0, 255]
if vision is False:
focus, speedX, speedY, speedZ, opponents, rpm, track, wheelSpinVel = ob
else:
focus, speedX, speedY, speedZ, opponents, rpm, track, wheelSpinVel, vision = ob
""" The code below is for checking the vision input. This is very heavy for real-time Control
So you may need to remove.
"""
"""
img = np.ndarray((64,64,3))
for i in range(3):
img[:, :, i] = 255 - vision[i]
plt.imshow(img, origin='lower')
plt.draw()
plt.pause(0.001)
"""
return np.tanh(np.random.randn(self.dim_action)) # random action
## Instruction:
Update to follow the new observation format (follow the vision input of OpenAI ATARI environment)
## Code After:
import numpy as np
import matplotlib.pyplot as plt
class Agent(object):
def __init__(self, dim_action):
self.dim_action = dim_action
def act(self, ob, reward, done, vision_on):
#print("ACT!")
# Get an Observation from the environment.
# Each observation vectors are numpy array.
# focus, opponents, track sensors are scaled into [0, 1]. When the agent
# is out of the road, sensor variables return -1/200.
# rpm, wheelSpinVel are raw values and then needed to be preprocessed.
# vision is given as a tensor with size of (64*64, 3) = (4096, 3) <-- rgb
# and values are in [0, 255]
if vision_on is False:
focus, speedX, speedY, speedZ, opponents, rpm, track, wheelSpinVel = ob
else:
focus, speedX, speedY, speedZ, opponents, rpm, track, wheelSpinVel, vision = ob
""" The code below is for checking the vision input. This is very heavy for real-time Control
So you may need to remove.
"""
print(vision.shape)
"""
img = np.ndarray((64,64,3))
for i in range(3):
img[:, :, i] = 255 - vision[:, i].reshape((64, 64))
plt.imshow(img, origin='lower')
plt.draw()
plt.pause(0.001)
"""
return np.tanh(np.random.randn(self.dim_action)) # random action
|
import numpy as np
import matplotlib.pyplot as plt
class Agent(object):
def __init__(self, dim_action):
self.dim_action = dim_action
- def act(self, ob, reward, done, vision):
+ def act(self, ob, reward, done, vision_on):
? +++
#print("ACT!")
# Get an Observation from the environment.
# Each observation vectors are numpy array.
# focus, opponents, track sensors are scaled into [0, 1]. When the agent
# is out of the road, sensor variables return -1/200.
# rpm, wheelSpinVel are raw values and then needed to be preprocessed.
- # vision is given as a tensor with size of (3, 64, 64) <-- rgb
? ^^^ ^
+ # vision is given as a tensor with size of (64*64, 3) = (4096, 3) <-- rgb
? ^^^ +++++++++ ^^^
# and values are in [0, 255]
- if vision is False:
+ if vision_on is False:
? +++
focus, speedX, speedY, speedZ, opponents, rpm, track, wheelSpinVel = ob
else:
focus, speedX, speedY, speedZ, opponents, rpm, track, wheelSpinVel, vision = ob
""" The code below is for checking the vision input. This is very heavy for real-time Control
So you may need to remove.
"""
+ print(vision.shape)
"""
img = np.ndarray((64,64,3))
for i in range(3):
- img[:, :, i] = 255 - vision[i]
+ img[:, :, i] = 255 - vision[:, i].reshape((64, 64))
? +++ ++++++++++++++++++
plt.imshow(img, origin='lower')
plt.draw()
plt.pause(0.001)
"""
return np.tanh(np.random.randn(self.dim_action)) # random action
|
d29e87eeb062df4d52c0c744919be4cae770fc2c
|
testing/config/settings/__init__.py
|
testing/config/settings/__init__.py
|
from daiquiri.core.settings.django import *
from daiquiri.core.settings.celery import *
from daiquiri.core.settings.daiquiri import *
from daiquiri.core.settings.logging import *
from daiquiri.core.settings.vendor import *
from daiquiri.archive.settings import *
from daiquiri.auth.settings import *
from daiquiri.conesearch.settings import *
from daiquiri.cutout.settings import *
from daiquiri.files.settings import *
from daiquiri.meetings.settings import *
from daiquiri.metadata.settings import *
from daiquiri.oai.settings import *
from daiquiri.query.settings import *
from daiquiri.serve.settings import *
from daiquiri.stats.settings import *
from daiquiri.tap.settings import *
from daiquiri.wordpress.settings import *
# override settings from base.py (which is checked in to git)
try:
from .base import *
except ImportError:
pass
# override settings from local.py (which is not checked in to git)
try:
from .local import *
except ImportError:
pass
|
from daiquiri.core.settings.django import *
from daiquiri.core.settings.celery import *
from daiquiri.core.settings.daiquiri import *
from daiquiri.core.settings.logging import *
from daiquiri.core.settings.vendor import *
from daiquiri.archive.settings import *
from daiquiri.auth.settings import *
from daiquiri.conesearch.settings import *
from daiquiri.cutout.settings import *
from daiquiri.files.settings import *
from daiquiri.meetings.settings import *
from daiquiri.metadata.settings import *
from daiquiri.oai.settings import *
from daiquiri.query.settings import *
from daiquiri.registry.settings import *
from daiquiri.serve.settings import *
from daiquiri.stats.settings import *
from daiquiri.tap.settings import *
from daiquiri.wordpress.settings import *
# override settings from base.py (which is checked in to git)
try:
from .base import *
except ImportError:
pass
# override settings from local.py (which is not checked in to git)
try:
from .local import *
except ImportError:
pass
|
Add registry settings to testing
|
Add registry settings to testing
|
Python
|
apache-2.0
|
aipescience/django-daiquiri,aipescience/django-daiquiri,aipescience/django-daiquiri
|
from daiquiri.core.settings.django import *
from daiquiri.core.settings.celery import *
from daiquiri.core.settings.daiquiri import *
from daiquiri.core.settings.logging import *
from daiquiri.core.settings.vendor import *
from daiquiri.archive.settings import *
from daiquiri.auth.settings import *
from daiquiri.conesearch.settings import *
from daiquiri.cutout.settings import *
from daiquiri.files.settings import *
from daiquiri.meetings.settings import *
from daiquiri.metadata.settings import *
from daiquiri.oai.settings import *
from daiquiri.query.settings import *
+ from daiquiri.registry.settings import *
from daiquiri.serve.settings import *
from daiquiri.stats.settings import *
from daiquiri.tap.settings import *
from daiquiri.wordpress.settings import *
# override settings from base.py (which is checked in to git)
try:
from .base import *
except ImportError:
pass
# override settings from local.py (which is not checked in to git)
try:
from .local import *
except ImportError:
pass
|
Add registry settings to testing
|
## Code Before:
from daiquiri.core.settings.django import *
from daiquiri.core.settings.celery import *
from daiquiri.core.settings.daiquiri import *
from daiquiri.core.settings.logging import *
from daiquiri.core.settings.vendor import *
from daiquiri.archive.settings import *
from daiquiri.auth.settings import *
from daiquiri.conesearch.settings import *
from daiquiri.cutout.settings import *
from daiquiri.files.settings import *
from daiquiri.meetings.settings import *
from daiquiri.metadata.settings import *
from daiquiri.oai.settings import *
from daiquiri.query.settings import *
from daiquiri.serve.settings import *
from daiquiri.stats.settings import *
from daiquiri.tap.settings import *
from daiquiri.wordpress.settings import *
# override settings from base.py (which is checked in to git)
try:
from .base import *
except ImportError:
pass
# override settings from local.py (which is not checked in to git)
try:
from .local import *
except ImportError:
pass
## Instruction:
Add registry settings to testing
## Code After:
from daiquiri.core.settings.django import *
from daiquiri.core.settings.celery import *
from daiquiri.core.settings.daiquiri import *
from daiquiri.core.settings.logging import *
from daiquiri.core.settings.vendor import *
from daiquiri.archive.settings import *
from daiquiri.auth.settings import *
from daiquiri.conesearch.settings import *
from daiquiri.cutout.settings import *
from daiquiri.files.settings import *
from daiquiri.meetings.settings import *
from daiquiri.metadata.settings import *
from daiquiri.oai.settings import *
from daiquiri.query.settings import *
from daiquiri.registry.settings import *
from daiquiri.serve.settings import *
from daiquiri.stats.settings import *
from daiquiri.tap.settings import *
from daiquiri.wordpress.settings import *
# override settings from base.py (which is checked in to git)
try:
from .base import *
except ImportError:
pass
# override settings from local.py (which is not checked in to git)
try:
from .local import *
except ImportError:
pass
|
from daiquiri.core.settings.django import *
from daiquiri.core.settings.celery import *
from daiquiri.core.settings.daiquiri import *
from daiquiri.core.settings.logging import *
from daiquiri.core.settings.vendor import *
from daiquiri.archive.settings import *
from daiquiri.auth.settings import *
from daiquiri.conesearch.settings import *
from daiquiri.cutout.settings import *
from daiquiri.files.settings import *
from daiquiri.meetings.settings import *
from daiquiri.metadata.settings import *
from daiquiri.oai.settings import *
from daiquiri.query.settings import *
+ from daiquiri.registry.settings import *
from daiquiri.serve.settings import *
from daiquiri.stats.settings import *
from daiquiri.tap.settings import *
from daiquiri.wordpress.settings import *
# override settings from base.py (which is checked in to git)
try:
from .base import *
except ImportError:
pass
# override settings from local.py (which is not checked in to git)
try:
from .local import *
except ImportError:
pass
|
d7a91fe283666f01aa06a707c536893cf1473fe3
|
rtwilio/models.py
|
rtwilio/models.py
|
import datetime
from django.db import models
class TwilioResponse(models.Model):
date = models.DateTimeField()
message = models.CharField(max_length=64, primary_key=True)
account = models.CharField(max_length=64)
sender = models.CharField(max_length=16)
recipient = models.CharField(max_length=16)
status = models.CharField(max_length=16)
def save(self, **kwargs):
if not self.date:
self.date = datetime.datetime.now()
return super(TwilioResponse, self).save(**kwargs)
|
from django.db import models
from django.utils import timezone
class TwilioResponse(models.Model):
date = models.DateTimeField()
message = models.CharField(max_length=64, primary_key=True)
account = models.CharField(max_length=64)
sender = models.CharField(max_length=16)
recipient = models.CharField(max_length=16)
status = models.CharField(max_length=16)
def save(self, **kwargs):
if not self.date:
self.date = timezone.now()
return super(TwilioResponse, self).save(**kwargs)
|
Use timezone aware datetime now.
|
Use timezone aware datetime now.
|
Python
|
bsd-3-clause
|
caktus/rapidsms-twilio
|
- import datetime
-
from django.db import models
+ from django.utils import timezone
class TwilioResponse(models.Model):
date = models.DateTimeField()
message = models.CharField(max_length=64, primary_key=True)
account = models.CharField(max_length=64)
sender = models.CharField(max_length=16)
recipient = models.CharField(max_length=16)
status = models.CharField(max_length=16)
def save(self, **kwargs):
if not self.date:
- self.date = datetime.datetime.now()
+ self.date = timezone.now()
return super(TwilioResponse, self).save(**kwargs)
|
Use timezone aware datetime now.
|
## Code Before:
import datetime
from django.db import models
class TwilioResponse(models.Model):
date = models.DateTimeField()
message = models.CharField(max_length=64, primary_key=True)
account = models.CharField(max_length=64)
sender = models.CharField(max_length=16)
recipient = models.CharField(max_length=16)
status = models.CharField(max_length=16)
def save(self, **kwargs):
if not self.date:
self.date = datetime.datetime.now()
return super(TwilioResponse, self).save(**kwargs)
## Instruction:
Use timezone aware datetime now.
## Code After:
from django.db import models
from django.utils import timezone
class TwilioResponse(models.Model):
date = models.DateTimeField()
message = models.CharField(max_length=64, primary_key=True)
account = models.CharField(max_length=64)
sender = models.CharField(max_length=16)
recipient = models.CharField(max_length=16)
status = models.CharField(max_length=16)
def save(self, **kwargs):
if not self.date:
self.date = timezone.now()
return super(TwilioResponse, self).save(**kwargs)
|
- import datetime
-
from django.db import models
+ from django.utils import timezone
class TwilioResponse(models.Model):
date = models.DateTimeField()
message = models.CharField(max_length=64, primary_key=True)
account = models.CharField(max_length=64)
sender = models.CharField(max_length=16)
recipient = models.CharField(max_length=16)
status = models.CharField(max_length=16)
def save(self, **kwargs):
if not self.date:
- self.date = datetime.datetime.now()
? ---- ^^^^^^^^
+ self.date = timezone.now()
? ^^^
return super(TwilioResponse, self).save(**kwargs)
|
d919c1e29645a52e795e85686de6de8f1e57196e
|
glue/plugins/ginga_viewer/__init__.py
|
glue/plugins/ginga_viewer/__init__.py
|
try:
from .client import *
from .qt_widget import *
except ImportError:
import warnings
warnings.warn("Could not import ginga plugin, since ginga is required")
# Register qt client
from ...config import qt_client
qt_client.add(GingaWidget)
|
try:
from .client import *
from .qt_widget import *
except ImportError:
import warnings
warnings.warn("Could not import ginga plugin, since ginga is required")
else:
# Register qt client
from ...config import qt_client
qt_client.add(GingaWidget)
|
Fix if ginga is not installed
|
Fix if ginga is not installed
|
Python
|
bsd-3-clause
|
JudoWill/glue,stscieisenhamer/glue,saimn/glue,JudoWill/glue,saimn/glue,stscieisenhamer/glue
|
try:
from .client import *
from .qt_widget import *
except ImportError:
import warnings
warnings.warn("Could not import ginga plugin, since ginga is required")
+ else:
+ # Register qt client
+ from ...config import qt_client
+ qt_client.add(GingaWidget)
- # Register qt client
- from ...config import qt_client
- qt_client.add(GingaWidget)
-
|
Fix if ginga is not installed
|
## Code Before:
try:
from .client import *
from .qt_widget import *
except ImportError:
import warnings
warnings.warn("Could not import ginga plugin, since ginga is required")
# Register qt client
from ...config import qt_client
qt_client.add(GingaWidget)
## Instruction:
Fix if ginga is not installed
## Code After:
try:
from .client import *
from .qt_widget import *
except ImportError:
import warnings
warnings.warn("Could not import ginga plugin, since ginga is required")
else:
# Register qt client
from ...config import qt_client
qt_client.add(GingaWidget)
|
try:
from .client import *
from .qt_widget import *
except ImportError:
import warnings
warnings.warn("Could not import ginga plugin, since ginga is required")
-
+ else:
- # Register qt client
+ # Register qt client
? ++++
- from ...config import qt_client
+ from ...config import qt_client
? ++++
- qt_client.add(GingaWidget)
+ qt_client.add(GingaWidget)
? ++++
|
35c44f0f585d11dea632e509b9eec20d4697dc9d
|
functions/eitu/timeedit_to_csv.py
|
functions/eitu/timeedit_to_csv.py
|
import requests
import csv
import ics_parser
URL_STUDY_ACTIVITIES = 'https://dk.timeedit.net/web/itu/db1/public/ri6Q7Z6QQw0Z5gQ9f50on7Xx5YY00ZQ1ZYQycZw.ics'
URL_ACTIVITIES = 'https://dk.timeedit.net/web/itu/db1/public/ri6g7058yYQZXxQ5oQgZZ0vZ56Y1Q0f5c0nZQwYQ.ics'
def fetch_and_parse(url):
return ics_parser.parse(requests.get(url).text)
# Fetch and parse iCalendar events
study_activities = fetch_and_parse(URL_STUDY_ACTIVITIES)
activities = fetch_and_parse(URL_ACTIVITIES)
events = study_activities + activities
# Remove duplicate events
events = {e['UID']: e for e in events}.values()
# Write csv
with open('timeedit.csv', 'w') as csvfile:
fieldnames = set()
for e in events: fieldnames = fieldnames | set(e.keys())
writer = csv.DictWriter(csvfile, fieldnames=sorted(list(fieldnames)))
writer.writeheader()
for e in events: writer.writerow(e)
|
import requests
import csv
from datetime import datetime
import ics_parser
URL_STUDY_ACTIVITIES = 'https://dk.timeedit.net/web/itu/db1/public/ri6Q7Z6QQw0Z5gQ9f50on7Xx5YY00ZQ1ZYQycZw.ics'
URL_ACTIVITIES = 'https://dk.timeedit.net/web/itu/db1/public/ri6g7058yYQZXxQ5oQgZZ0vZ56Y1Q0f5c0nZQwYQ.ics'
def fetch_and_parse(url):
return ics_parser.parse(requests.get(url).text)
# Fetch and parse iCalendar events
study_activities = fetch_and_parse(URL_STUDY_ACTIVITIES)
activities = fetch_and_parse(URL_ACTIVITIES)
events = study_activities + activities
# Remove duplicates and sort
events = {e['UID']: e for e in events}.values()
events = sorted(events, key=lambda e: e['DTSTART'])
# Write csv
with open('timeedit.csv', 'w') as csvfile:
fieldnames = set()
for e in events: fieldnames = fieldnames | set(e.keys())
writer = csv.DictWriter(csvfile, fieldnames=sorted(list(fieldnames)))
writer.writeheader()
for e in events:
for key, value in e.items():
if isinstance(value, datetime): e[key] = value.isoformat()
writer.writerow(e)
|
Sort events by start and iso format datetimes
|
Sort events by start and iso format datetimes
|
Python
|
mit
|
christianknu/eitu,christianknu/eitu,eitu/eitu,christianknu/eitu,eitu/eitu
|
import requests
import csv
+ from datetime import datetime
import ics_parser
URL_STUDY_ACTIVITIES = 'https://dk.timeedit.net/web/itu/db1/public/ri6Q7Z6QQw0Z5gQ9f50on7Xx5YY00ZQ1ZYQycZw.ics'
URL_ACTIVITIES = 'https://dk.timeedit.net/web/itu/db1/public/ri6g7058yYQZXxQ5oQgZZ0vZ56Y1Q0f5c0nZQwYQ.ics'
def fetch_and_parse(url):
return ics_parser.parse(requests.get(url).text)
# Fetch and parse iCalendar events
study_activities = fetch_and_parse(URL_STUDY_ACTIVITIES)
activities = fetch_and_parse(URL_ACTIVITIES)
events = study_activities + activities
- # Remove duplicate events
+ # Remove duplicates and sort
events = {e['UID']: e for e in events}.values()
+ events = sorted(events, key=lambda e: e['DTSTART'])
# Write csv
with open('timeedit.csv', 'w') as csvfile:
fieldnames = set()
for e in events: fieldnames = fieldnames | set(e.keys())
writer = csv.DictWriter(csvfile, fieldnames=sorted(list(fieldnames)))
writer.writeheader()
- for e in events: writer.writerow(e)
+ for e in events:
+ for key, value in e.items():
+ if isinstance(value, datetime): e[key] = value.isoformat()
+ writer.writerow(e)
|
Sort events by start and iso format datetimes
|
## Code Before:
import requests
import csv
import ics_parser
URL_STUDY_ACTIVITIES = 'https://dk.timeedit.net/web/itu/db1/public/ri6Q7Z6QQw0Z5gQ9f50on7Xx5YY00ZQ1ZYQycZw.ics'
URL_ACTIVITIES = 'https://dk.timeedit.net/web/itu/db1/public/ri6g7058yYQZXxQ5oQgZZ0vZ56Y1Q0f5c0nZQwYQ.ics'
def fetch_and_parse(url):
return ics_parser.parse(requests.get(url).text)
# Fetch and parse iCalendar events
study_activities = fetch_and_parse(URL_STUDY_ACTIVITIES)
activities = fetch_and_parse(URL_ACTIVITIES)
events = study_activities + activities
# Remove duplicate events
events = {e['UID']: e for e in events}.values()
# Write csv
with open('timeedit.csv', 'w') as csvfile:
fieldnames = set()
for e in events: fieldnames = fieldnames | set(e.keys())
writer = csv.DictWriter(csvfile, fieldnames=sorted(list(fieldnames)))
writer.writeheader()
for e in events: writer.writerow(e)
## Instruction:
Sort events by start and iso format datetimes
## Code After:
import requests
import csv
from datetime import datetime
import ics_parser
URL_STUDY_ACTIVITIES = 'https://dk.timeedit.net/web/itu/db1/public/ri6Q7Z6QQw0Z5gQ9f50on7Xx5YY00ZQ1ZYQycZw.ics'
URL_ACTIVITIES = 'https://dk.timeedit.net/web/itu/db1/public/ri6g7058yYQZXxQ5oQgZZ0vZ56Y1Q0f5c0nZQwYQ.ics'
def fetch_and_parse(url):
return ics_parser.parse(requests.get(url).text)
# Fetch and parse iCalendar events
study_activities = fetch_and_parse(URL_STUDY_ACTIVITIES)
activities = fetch_and_parse(URL_ACTIVITIES)
events = study_activities + activities
# Remove duplicates and sort
events = {e['UID']: e for e in events}.values()
events = sorted(events, key=lambda e: e['DTSTART'])
# Write csv
with open('timeedit.csv', 'w') as csvfile:
fieldnames = set()
for e in events: fieldnames = fieldnames | set(e.keys())
writer = csv.DictWriter(csvfile, fieldnames=sorted(list(fieldnames)))
writer.writeheader()
for e in events:
for key, value in e.items():
if isinstance(value, datetime): e[key] = value.isoformat()
writer.writerow(e)
|
import requests
import csv
+ from datetime import datetime
import ics_parser
URL_STUDY_ACTIVITIES = 'https://dk.timeedit.net/web/itu/db1/public/ri6Q7Z6QQw0Z5gQ9f50on7Xx5YY00ZQ1ZYQycZw.ics'
URL_ACTIVITIES = 'https://dk.timeedit.net/web/itu/db1/public/ri6g7058yYQZXxQ5oQgZZ0vZ56Y1Q0f5c0nZQwYQ.ics'
def fetch_and_parse(url):
return ics_parser.parse(requests.get(url).text)
# Fetch and parse iCalendar events
study_activities = fetch_and_parse(URL_STUDY_ACTIVITIES)
activities = fetch_and_parse(URL_ACTIVITIES)
events = study_activities + activities
- # Remove duplicate events
? ^^^ -
+ # Remove duplicates and sort
? + ^ +++++
events = {e['UID']: e for e in events}.values()
+ events = sorted(events, key=lambda e: e['DTSTART'])
# Write csv
with open('timeedit.csv', 'w') as csvfile:
fieldnames = set()
for e in events: fieldnames = fieldnames | set(e.keys())
writer = csv.DictWriter(csvfile, fieldnames=sorted(list(fieldnames)))
writer.writeheader()
+ for e in events:
+ for key, value in e.items():
+ if isinstance(value, datetime): e[key] = value.isoformat()
- for e in events: writer.writerow(e)
? --- - -- -------
+ writer.writerow(e)
|
1a04c983eb921f47452ea6c013313f90875c400f
|
pkgconf/__init__.py
|
pkgconf/__init__.py
|
import sys
from functools import partial, update_wrapper
from django.utils import six
def proxy(attr, default):
def wrapper(self):
# It has to be most recent,
# to override settings in tests
from django.conf import settings
value = getattr(settings, attr, default)
if callable(value):
func = partial(value, self)
return update_wrapper(func, value)
elif isinstance(value, property):
return value.__get__(self)
return value
return property(wrapper)
class ConfMeta(type):
def __new__(mcs, name, bases, attrs):
prefix = attrs.get('__prefix__', name.upper()) + '_'
fields = {
key: proxy(prefix + key, value)
for key, value in attrs.items()
if not key.startswith('__')
}
attrs.update(fields, __all__=fields.keys())
# Ready to build
cls = super(ConfMeta, mcs).__new__(mcs, name, bases, attrs)
# Sets non-abstract conf as module
abstract = attrs.get('__abstract__', False)
if not abstract:
# http://mail.python.org/pipermail/python-ideas/2012-May/
# 014969.html
ins = cls()
ins.__name__ = ins.__module__
sys.modules[ins.__module__] = ins
return cls
class Conf(six.with_metaclass(ConfMeta)):
__abstract__ = True
|
import sys
from functools import partial, update_wrapper
from django.utils import six
def proxy(attr, default):
def wrapper(self):
# It has to be most recent,
# to override settings in tests
from django.conf import settings
value = getattr(settings, attr, default)
if callable(value):
func = partial(value, self)
return update_wrapper(func, value)
elif isinstance(value, property):
return value.__get__(self)
return value
return property(wrapper)
class ConfMeta(type):
def __new__(mcs, name, bases, attrs):
prefix = attrs.get('__prefix__', name.upper()) + '_'
fields = {
key: proxy(prefix + key, value)
for key, value in attrs.items()
if not key.startswith('__')
}
attrs.update(fields, __all__=tuple(fields))
# Ready to build
cls = super(ConfMeta, mcs).__new__(mcs, name, bases, attrs)
# Sets non-abstract conf as module
abstract = attrs.get('__abstract__', False)
if not abstract:
# http://mail.python.org/pipermail/python-ideas/2012-May/
# 014969.html
ins = cls()
ins.__name__ = ins.__module__
sys.modules[ins.__module__] = ins
return cls
class Conf(six.with_metaclass(ConfMeta)):
__abstract__ = True
|
Fix "import star" feature support
|
Fix "import star" feature support
`dict.keys()` returns `dict_keys` object in py3,
which does not support indexing.
|
Python
|
bsd-3-clause
|
byashimov/django-pkgconf
|
import sys
from functools import partial, update_wrapper
from django.utils import six
def proxy(attr, default):
def wrapper(self):
# It has to be most recent,
# to override settings in tests
from django.conf import settings
value = getattr(settings, attr, default)
if callable(value):
func = partial(value, self)
return update_wrapper(func, value)
elif isinstance(value, property):
return value.__get__(self)
return value
return property(wrapper)
class ConfMeta(type):
def __new__(mcs, name, bases, attrs):
prefix = attrs.get('__prefix__', name.upper()) + '_'
fields = {
key: proxy(prefix + key, value)
for key, value in attrs.items()
if not key.startswith('__')
}
- attrs.update(fields, __all__=fields.keys())
+ attrs.update(fields, __all__=tuple(fields))
# Ready to build
cls = super(ConfMeta, mcs).__new__(mcs, name, bases, attrs)
# Sets non-abstract conf as module
abstract = attrs.get('__abstract__', False)
if not abstract:
# http://mail.python.org/pipermail/python-ideas/2012-May/
# 014969.html
ins = cls()
ins.__name__ = ins.__module__
sys.modules[ins.__module__] = ins
return cls
class Conf(six.with_metaclass(ConfMeta)):
__abstract__ = True
|
Fix "import star" feature support
|
## Code Before:
import sys
from functools import partial, update_wrapper
from django.utils import six
def proxy(attr, default):
def wrapper(self):
# It has to be most recent,
# to override settings in tests
from django.conf import settings
value = getattr(settings, attr, default)
if callable(value):
func = partial(value, self)
return update_wrapper(func, value)
elif isinstance(value, property):
return value.__get__(self)
return value
return property(wrapper)
class ConfMeta(type):
def __new__(mcs, name, bases, attrs):
prefix = attrs.get('__prefix__', name.upper()) + '_'
fields = {
key: proxy(prefix + key, value)
for key, value in attrs.items()
if not key.startswith('__')
}
attrs.update(fields, __all__=fields.keys())
# Ready to build
cls = super(ConfMeta, mcs).__new__(mcs, name, bases, attrs)
# Sets non-abstract conf as module
abstract = attrs.get('__abstract__', False)
if not abstract:
# http://mail.python.org/pipermail/python-ideas/2012-May/
# 014969.html
ins = cls()
ins.__name__ = ins.__module__
sys.modules[ins.__module__] = ins
return cls
class Conf(six.with_metaclass(ConfMeta)):
__abstract__ = True
## Instruction:
Fix "import star" feature support
## Code After:
import sys
from functools import partial, update_wrapper
from django.utils import six
def proxy(attr, default):
def wrapper(self):
# It has to be most recent,
# to override settings in tests
from django.conf import settings
value = getattr(settings, attr, default)
if callable(value):
func = partial(value, self)
return update_wrapper(func, value)
elif isinstance(value, property):
return value.__get__(self)
return value
return property(wrapper)
class ConfMeta(type):
def __new__(mcs, name, bases, attrs):
prefix = attrs.get('__prefix__', name.upper()) + '_'
fields = {
key: proxy(prefix + key, value)
for key, value in attrs.items()
if not key.startswith('__')
}
attrs.update(fields, __all__=tuple(fields))
# Ready to build
cls = super(ConfMeta, mcs).__new__(mcs, name, bases, attrs)
# Sets non-abstract conf as module
abstract = attrs.get('__abstract__', False)
if not abstract:
# http://mail.python.org/pipermail/python-ideas/2012-May/
# 014969.html
ins = cls()
ins.__name__ = ins.__module__
sys.modules[ins.__module__] = ins
return cls
class Conf(six.with_metaclass(ConfMeta)):
__abstract__ = True
|
import sys
from functools import partial, update_wrapper
from django.utils import six
def proxy(attr, default):
def wrapper(self):
# It has to be most recent,
# to override settings in tests
from django.conf import settings
value = getattr(settings, attr, default)
if callable(value):
func = partial(value, self)
return update_wrapper(func, value)
elif isinstance(value, property):
return value.__get__(self)
return value
return property(wrapper)
class ConfMeta(type):
def __new__(mcs, name, bases, attrs):
prefix = attrs.get('__prefix__', name.upper()) + '_'
fields = {
key: proxy(prefix + key, value)
for key, value in attrs.items()
if not key.startswith('__')
}
- attrs.update(fields, __all__=fields.keys())
? ------
+ attrs.update(fields, __all__=tuple(fields))
? ++++++
# Ready to build
cls = super(ConfMeta, mcs).__new__(mcs, name, bases, attrs)
# Sets non-abstract conf as module
abstract = attrs.get('__abstract__', False)
if not abstract:
# http://mail.python.org/pipermail/python-ideas/2012-May/
# 014969.html
ins = cls()
ins.__name__ = ins.__module__
sys.modules[ins.__module__] = ins
return cls
class Conf(six.with_metaclass(ConfMeta)):
__abstract__ = True
|
dd7513f4146679d11aff6d528f11927131dc692f
|
feder/monitorings/factories.py
|
feder/monitorings/factories.py
|
from .models import Monitoring
from feder.users.factories import UserFactory
import factory
class MonitoringFactory(factory.django.DjangoModelFactory):
name = factory.Sequence(lambda n: 'monitoring-%04d' % n)
user = factory.SubFactory(UserFactory)
class Meta:
model = Monitoring
django_get_or_create = ('name', )
|
from .models import Monitoring
from feder.users.factories import UserFactory
import factory
class MonitoringFactory(factory.django.DjangoModelFactory):
name = factory.Sequence(lambda n: 'monitoring-%04d' % n)
user = factory.SubFactory(UserFactory)
description = factory.Sequence(lambda n: 'description no.%04d' % n)
template = factory.Sequence(lambda n:
'template no.%04d. reply to {{EMAIL}}' % n)
class Meta:
model = Monitoring
django_get_or_create = ('name', )
|
Add description and template to MonitoringFactory
|
Add description and template to MonitoringFactory
|
Python
|
mit
|
watchdogpolska/feder,watchdogpolska/feder,watchdogpolska/feder,watchdogpolska/feder
|
from .models import Monitoring
from feder.users.factories import UserFactory
import factory
class MonitoringFactory(factory.django.DjangoModelFactory):
name = factory.Sequence(lambda n: 'monitoring-%04d' % n)
user = factory.SubFactory(UserFactory)
+ description = factory.Sequence(lambda n: 'description no.%04d' % n)
+ template = factory.Sequence(lambda n:
+ 'template no.%04d. reply to {{EMAIL}}' % n)
class Meta:
model = Monitoring
django_get_or_create = ('name', )
|
Add description and template to MonitoringFactory
|
## Code Before:
from .models import Monitoring
from feder.users.factories import UserFactory
import factory
class MonitoringFactory(factory.django.DjangoModelFactory):
name = factory.Sequence(lambda n: 'monitoring-%04d' % n)
user = factory.SubFactory(UserFactory)
class Meta:
model = Monitoring
django_get_or_create = ('name', )
## Instruction:
Add description and template to MonitoringFactory
## Code After:
from .models import Monitoring
from feder.users.factories import UserFactory
import factory
class MonitoringFactory(factory.django.DjangoModelFactory):
name = factory.Sequence(lambda n: 'monitoring-%04d' % n)
user = factory.SubFactory(UserFactory)
description = factory.Sequence(lambda n: 'description no.%04d' % n)
template = factory.Sequence(lambda n:
'template no.%04d. reply to {{EMAIL}}' % n)
class Meta:
model = Monitoring
django_get_or_create = ('name', )
|
from .models import Monitoring
from feder.users.factories import UserFactory
import factory
class MonitoringFactory(factory.django.DjangoModelFactory):
name = factory.Sequence(lambda n: 'monitoring-%04d' % n)
user = factory.SubFactory(UserFactory)
+ description = factory.Sequence(lambda n: 'description no.%04d' % n)
+ template = factory.Sequence(lambda n:
+ 'template no.%04d. reply to {{EMAIL}}' % n)
class Meta:
model = Monitoring
django_get_or_create = ('name', )
|
cde8c1b4c89a2e0ef3765372b4838373d5729cdb
|
alg_topological_sort.py
|
alg_topological_sort.py
|
def topological_sort_recur(adjacency_dict, start_vertex,
visited_set, finish_ls):
"""Topological Sorting by Recursion."""
visited_set.add(start_vertex)
for neighbor_vertex in adjacency_dict[start_vertex]:
if neighbor_vertex not in visited_set:
topological_sort_recur(
adjacency_dict, neighbor_vertex,
visited_set, finish_ls)
finish_ls.insert(0, start_vertex)
print(finish_ls)
def topological_sort(adjacency_dict):
"""Topological Sorting for Directed Acyclic Graph (DAG)."""
visited_set = set()
finish_ls = []
for vertex in adjacency_dict:
if vertex not in visited_set:
topological_sort_recur(
adjacency_dict, vertex,
visited_set, finish_ls)
print(finish_ls)
def main():
# DAG.
dag_adjacency_dict = {
'A': {'D'},
'B': {'D'},
'C': {'D'},
'D': {'G', 'E'},
'E': {'J'},
'F': {'G'},
'G': {'I'},
'I': {'J'},
'J': {}
}
topological_sort(dag_adjacency_dict)
if __name__ == '__main__':
main()
|
def topological_sort_recur(adjacency_dict, start_vertex,
visited_set, finish_ls):
"""Topological Sorting by Recursion."""
visited_set.add(start_vertex)
for neighbor_vertex in adjacency_dict[start_vertex] - visited_set:
topological_sort_recur(
adjacency_dict, neighbor_vertex,
visited_set, finish_ls)
finish_ls.insert(0, start_vertex)
print(finish_ls)
def topological_sort(adjacency_dict):
"""Topological Sorting for Directed Acyclic Graph (DAG)."""
visited_set = set()
finish_ls = []
for vertex in adjacency_dict:
if vertex not in visited_set:
topological_sort_recur(
adjacency_dict, vertex,
visited_set, finish_ls)
print(finish_ls)
def main():
# DAG.
dag_adjacency_dict = {
'A': {'D'},
'B': {'D'},
'C': {'D'},
'D': {'G', 'E'},
'E': {'J'},
'F': {'G'},
'G': {'I'},
'I': {'J'},
'J': set()
}
topological_sort(dag_adjacency_dict)
if __name__ == '__main__':
main()
|
Revise for loop to find neighbor vertices
|
Revise for loop to find neighbor vertices
|
Python
|
bsd-2-clause
|
bowen0701/algorithms_data_structures
|
def topological_sort_recur(adjacency_dict, start_vertex,
visited_set, finish_ls):
"""Topological Sorting by Recursion."""
visited_set.add(start_vertex)
- for neighbor_vertex in adjacency_dict[start_vertex]:
+ for neighbor_vertex in adjacency_dict[start_vertex] - visited_set:
- if neighbor_vertex not in visited_set:
topological_sort_recur(
adjacency_dict, neighbor_vertex,
visited_set, finish_ls)
finish_ls.insert(0, start_vertex)
print(finish_ls)
def topological_sort(adjacency_dict):
"""Topological Sorting for Directed Acyclic Graph (DAG)."""
visited_set = set()
finish_ls = []
for vertex in adjacency_dict:
if vertex not in visited_set:
topological_sort_recur(
adjacency_dict, vertex,
visited_set, finish_ls)
print(finish_ls)
def main():
# DAG.
dag_adjacency_dict = {
'A': {'D'},
'B': {'D'},
'C': {'D'},
'D': {'G', 'E'},
'E': {'J'},
'F': {'G'},
'G': {'I'},
'I': {'J'},
- 'J': {}
+ 'J': set()
}
topological_sort(dag_adjacency_dict)
if __name__ == '__main__':
main()
|
Revise for loop to find neighbor vertices
|
## Code Before:
def topological_sort_recur(adjacency_dict, start_vertex,
visited_set, finish_ls):
"""Topological Sorting by Recursion."""
visited_set.add(start_vertex)
for neighbor_vertex in adjacency_dict[start_vertex]:
if neighbor_vertex not in visited_set:
topological_sort_recur(
adjacency_dict, neighbor_vertex,
visited_set, finish_ls)
finish_ls.insert(0, start_vertex)
print(finish_ls)
def topological_sort(adjacency_dict):
"""Topological Sorting for Directed Acyclic Graph (DAG)."""
visited_set = set()
finish_ls = []
for vertex in adjacency_dict:
if vertex not in visited_set:
topological_sort_recur(
adjacency_dict, vertex,
visited_set, finish_ls)
print(finish_ls)
def main():
# DAG.
dag_adjacency_dict = {
'A': {'D'},
'B': {'D'},
'C': {'D'},
'D': {'G', 'E'},
'E': {'J'},
'F': {'G'},
'G': {'I'},
'I': {'J'},
'J': {}
}
topological_sort(dag_adjacency_dict)
if __name__ == '__main__':
main()
## Instruction:
Revise for loop to find neighbor vertices
## Code After:
def topological_sort_recur(adjacency_dict, start_vertex,
visited_set, finish_ls):
"""Topological Sorting by Recursion."""
visited_set.add(start_vertex)
for neighbor_vertex in adjacency_dict[start_vertex] - visited_set:
topological_sort_recur(
adjacency_dict, neighbor_vertex,
visited_set, finish_ls)
finish_ls.insert(0, start_vertex)
print(finish_ls)
def topological_sort(adjacency_dict):
"""Topological Sorting for Directed Acyclic Graph (DAG)."""
visited_set = set()
finish_ls = []
for vertex in adjacency_dict:
if vertex not in visited_set:
topological_sort_recur(
adjacency_dict, vertex,
visited_set, finish_ls)
print(finish_ls)
def main():
# DAG.
dag_adjacency_dict = {
'A': {'D'},
'B': {'D'},
'C': {'D'},
'D': {'G', 'E'},
'E': {'J'},
'F': {'G'},
'G': {'I'},
'I': {'J'},
'J': set()
}
topological_sort(dag_adjacency_dict)
if __name__ == '__main__':
main()
|
def topological_sort_recur(adjacency_dict, start_vertex,
visited_set, finish_ls):
"""Topological Sorting by Recursion."""
visited_set.add(start_vertex)
- for neighbor_vertex in adjacency_dict[start_vertex]:
+ for neighbor_vertex in adjacency_dict[start_vertex] - visited_set:
? ++++++++++++++
- if neighbor_vertex not in visited_set:
topological_sort_recur(
adjacency_dict, neighbor_vertex,
visited_set, finish_ls)
finish_ls.insert(0, start_vertex)
print(finish_ls)
def topological_sort(adjacency_dict):
"""Topological Sorting for Directed Acyclic Graph (DAG)."""
visited_set = set()
finish_ls = []
for vertex in adjacency_dict:
if vertex not in visited_set:
topological_sort_recur(
adjacency_dict, vertex,
visited_set, finish_ls)
print(finish_ls)
def main():
# DAG.
dag_adjacency_dict = {
'A': {'D'},
'B': {'D'},
'C': {'D'},
'D': {'G', 'E'},
'E': {'J'},
'F': {'G'},
'G': {'I'},
'I': {'J'},
- 'J': {}
+ 'J': set()
}
topological_sort(dag_adjacency_dict)
if __name__ == '__main__':
main()
|
8026b5f309264d4e72c3bc503601468cf1cdfcdd
|
src/nodeconductor_assembly_waldur/packages/filters.py
|
src/nodeconductor_assembly_waldur/packages/filters.py
|
import django_filters
from nodeconductor.core.filters import UUIDFilter
from . import models
class PackageTemplateFilter(django_filters.FilterSet):
name = django_filters.CharFilter(lookup_type='icontains')
settings_uuid = UUIDFilter(name='service_settings__uuid')
class Meta(object):
model = models.PackageTemplate
fields = ('name', 'settings_uuid',)
class OpenStackPackageFilter(django_filters.FilterSet):
name = django_filters.CharFilter(lookup_type='icontains')
settings_uuid = UUIDFilter(name='service_settings__uuid')
class Meta(object):
model = models.OpenStackPackage
fields = ('name', 'settings_uuid',)
|
import django_filters
from nodeconductor.core.filters import UUIDFilter
from . import models
class PackageTemplateFilter(django_filters.FilterSet):
name = django_filters.CharFilter(lookup_type='icontains')
settings_uuid = UUIDFilter(name='service_settings__uuid')
class Meta(object):
model = models.PackageTemplate
fields = ('name', 'settings_uuid',)
class OpenStackPackageFilter(django_filters.FilterSet):
name = django_filters.CharFilter(lookup_type='icontains')
customer = UUIDFilter(name='tenant__service_project_link__project__customer')
project = UUIDFilter(name='tenant__service_project_link__project')
class Meta(object):
model = models.OpenStackPackage
fields = ('name', 'customer', 'project')
|
Enable filtering OpenStack package by customer and project (WAL-49)
|
Enable filtering OpenStack package by customer and project (WAL-49)
|
Python
|
mit
|
opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/nodeconductor-assembly-waldur,opennode/waldur-mastermind,opennode/nodeconductor-assembly-waldur,opennode/nodeconductor-assembly-waldur
|
import django_filters
from nodeconductor.core.filters import UUIDFilter
from . import models
class PackageTemplateFilter(django_filters.FilterSet):
name = django_filters.CharFilter(lookup_type='icontains')
settings_uuid = UUIDFilter(name='service_settings__uuid')
class Meta(object):
model = models.PackageTemplate
fields = ('name', 'settings_uuid',)
class OpenStackPackageFilter(django_filters.FilterSet):
name = django_filters.CharFilter(lookup_type='icontains')
- settings_uuid = UUIDFilter(name='service_settings__uuid')
+ customer = UUIDFilter(name='tenant__service_project_link__project__customer')
+ project = UUIDFilter(name='tenant__service_project_link__project')
class Meta(object):
model = models.OpenStackPackage
- fields = ('name', 'settings_uuid',)
+ fields = ('name', 'customer', 'project')
|
Enable filtering OpenStack package by customer and project (WAL-49)
|
## Code Before:
import django_filters
from nodeconductor.core.filters import UUIDFilter
from . import models
class PackageTemplateFilter(django_filters.FilterSet):
name = django_filters.CharFilter(lookup_type='icontains')
settings_uuid = UUIDFilter(name='service_settings__uuid')
class Meta(object):
model = models.PackageTemplate
fields = ('name', 'settings_uuid',)
class OpenStackPackageFilter(django_filters.FilterSet):
name = django_filters.CharFilter(lookup_type='icontains')
settings_uuid = UUIDFilter(name='service_settings__uuid')
class Meta(object):
model = models.OpenStackPackage
fields = ('name', 'settings_uuid',)
## Instruction:
Enable filtering OpenStack package by customer and project (WAL-49)
## Code After:
import django_filters
from nodeconductor.core.filters import UUIDFilter
from . import models
class PackageTemplateFilter(django_filters.FilterSet):
name = django_filters.CharFilter(lookup_type='icontains')
settings_uuid = UUIDFilter(name='service_settings__uuid')
class Meta(object):
model = models.PackageTemplate
fields = ('name', 'settings_uuid',)
class OpenStackPackageFilter(django_filters.FilterSet):
name = django_filters.CharFilter(lookup_type='icontains')
customer = UUIDFilter(name='tenant__service_project_link__project__customer')
project = UUIDFilter(name='tenant__service_project_link__project')
class Meta(object):
model = models.OpenStackPackage
fields = ('name', 'customer', 'project')
|
import django_filters
from nodeconductor.core.filters import UUIDFilter
from . import models
class PackageTemplateFilter(django_filters.FilterSet):
name = django_filters.CharFilter(lookup_type='icontains')
settings_uuid = UUIDFilter(name='service_settings__uuid')
class Meta(object):
model = models.PackageTemplate
fields = ('name', 'settings_uuid',)
class OpenStackPackageFilter(django_filters.FilterSet):
name = django_filters.CharFilter(lookup_type='icontains')
- settings_uuid = UUIDFilter(name='service_settings__uuid')
+ customer = UUIDFilter(name='tenant__service_project_link__project__customer')
+ project = UUIDFilter(name='tenant__service_project_link__project')
class Meta(object):
model = models.OpenStackPackage
- fields = ('name', 'settings_uuid',)
+ fields = ('name', 'customer', 'project')
|
d80a92cfe45907b9f91fd212a3b06fa0b2321364
|
wagtail/tests/routablepage/models.py
|
wagtail/tests/routablepage/models.py
|
from django.db import models
from django.http import HttpResponse
from django.conf.urls import url
from wagtail.contrib.wagtailroutablepage.models import RoutablePage
def routable_page_external_view(request, arg):
return HttpResponse("EXTERNAL VIEW: " + arg)
class RoutablePageTest(RoutablePage):
subpage_urls = (
url(r'^$', 'main', name='main'),
url(r'^archive/year/(\d+)/$', 'archive_by_year', name='archive_by_year'),
url(r'^archive/author/(?P<author_slug>.+)/$', 'archive_by_author', name='archive_by_author'),
url(r'^external/(.+)/$', routable_page_external_view, name='external_view')
)
def archive_by_year(self, request, year):
return HttpResponse("ARCHIVE BY YEAR: " + str(year))
def archive_by_author(self, request, author_slug):
return HttpResponse("ARCHIVE BY AUTHOR: " + author_slug)
def main(self, request):
return HttpResponse("MAIN VIEW")
|
from django.db import models
from django.http import HttpResponse
from django.conf.urls import url
from wagtail.contrib.wagtailroutablepage.models import RoutablePage
def routable_page_external_view(request, arg):
return HttpResponse("EXTERNAL VIEW: " + arg)
class RoutablePageTest(RoutablePage):
@property
def subpage_urls(self):
return (
url(r'^$', self.main, name='main'),
url(r'^archive/year/(\d+)/$', self.archive_by_year, name='archive_by_year'),
url(r'^archive/author/(?P<author_slug>.+)/$', self.archive_by_author, name='archive_by_author'),
url(r'^external/(.+)/$', routable_page_external_view, name='external_view')
)
def archive_by_year(self, request, year):
return HttpResponse("ARCHIVE BY YEAR: " + str(year))
def archive_by_author(self, request, author_slug):
return HttpResponse("ARCHIVE BY AUTHOR: " + author_slug)
def main(self, request):
return HttpResponse("MAIN VIEW")
|
Make subpage_urls a property on RoutablePageTest
|
Make subpage_urls a property on RoutablePageTest
|
Python
|
bsd-3-clause
|
JoshBarr/wagtail,mikedingjan/wagtail,gasman/wagtail,takeflight/wagtail,kurtw/wagtail,jorge-marques/wagtail,Pennebaker/wagtail,zerolab/wagtail,kurtw/wagtail,bjesus/wagtail,nilnvoid/wagtail,mayapurmedia/wagtail,chrxr/wagtail,nilnvoid/wagtail,zerolab/wagtail,Klaudit/wagtail,iho/wagtail,serzans/wagtail,Tivix/wagtail,wagtail/wagtail,zerolab/wagtail,kurtw/wagtail,inonit/wagtail,mephizzle/wagtail,quru/wagtail,mjec/wagtail,Toshakins/wagtail,rv816/wagtail,mjec/wagtail,nimasmi/wagtail,gasman/wagtail,nealtodd/wagtail,darith27/wagtail,KimGlazebrook/wagtail-experiment,quru/wagtail,iansprice/wagtail,rsalmaso/wagtail,gasman/wagtail,inonit/wagtail,WQuanfeng/wagtail,stevenewey/wagtail,stevenewey/wagtail,nealtodd/wagtail,KimGlazebrook/wagtail-experiment,hamsterbacke23/wagtail,kaedroho/wagtail,timorieber/wagtail,thenewguy/wagtail,mephizzle/wagtail,mixxorz/wagtail,janusnic/wagtail,chrxr/wagtail,zerolab/wagtail,mephizzle/wagtail,quru/wagtail,davecranwell/wagtail,takeshineshiro/wagtail,nimasmi/wagtail,thenewguy/wagtail,takeflight/wagtail,jordij/wagtail,Klaudit/wagtail,marctc/wagtail,hamsterbacke23/wagtail,mayapurmedia/wagtail,nutztherookie/wagtail,darith27/wagtail,Toshakins/wagtail,taedori81/wagtail,gogobook/wagtail,JoshBarr/wagtail,nealtodd/wagtail,hamsterbacke23/wagtail,takeshineshiro/wagtail,Klaudit/wagtail,thenewguy/wagtail,jorge-marques/wagtail,hanpama/wagtail,takeflight/wagtail,thenewguy/wagtail,stevenewey/wagtail,mjec/wagtail,hamsterbacke23/wagtail,nutztherookie/wagtail,mikedingjan/wagtail,nrsimha/wagtail,hanpama/wagtail,jnns/wagtail,kurtrwall/wagtail,mikedingjan/wagtail,Toshakins/wagtail,wagtail/wagtail,FlipperPA/wagtail,jordij/wagtail,janusnic/wagtail,gasman/wagtail,nimasmi/wagtail,darith27/wagtail,rsalmaso/wagtail,jorge-marques/wagtail,kurtw/wagtail,mixxorz/wagtail,kurtrwall/wagtail,iansprice/wagtail,serzans/wagtail,serzans/wagtail,iansprice/wagtail,nrsimha/wagtail,JoshBarr/wagtail,taedori81/wagtail,inonit/wagtail,bjesus/wagtail,mayapurmedia/wagtail,FlipperPA/wagtail,nrsimha/wagtail,Tivix/wagtail,marctc/wagtail,janusnic/wagtail,iho/wagtail,takeflight/wagtail,taedori81/wagtail,gogobook/wagtail,hanpama/wagtail,jorge-marques/wagtail,mephizzle/wagtail,jnns/wagtail,tangentlabs/wagtail,nilnvoid/wagtail,rsalmaso/wagtail,m-sanders/wagtail,taedori81/wagtail,nimasmi/wagtail,nutztherookie/wagtail,Pennebaker/wagtail,rjsproxy/wagtail,rjsproxy/wagtail,kaedroho/wagtail,hanpama/wagtail,chrxr/wagtail,jordij/wagtail,mayapurmedia/wagtail,JoshBarr/wagtail,jordij/wagtail,WQuanfeng/wagtail,taedori81/wagtail,gogobook/wagtail,Pennebaker/wagtail,m-sanders/wagtail,jnns/wagtail,timorieber/wagtail,mixxorz/wagtail,thenewguy/wagtail,davecranwell/wagtail,gasman/wagtail,nealtodd/wagtail,mixxorz/wagtail,KimGlazebrook/wagtail-experiment,iansprice/wagtail,tangentlabs/wagtail,wagtail/wagtail,m-sanders/wagtail,takeshineshiro/wagtail,davecranwell/wagtail,timorieber/wagtail,mixxorz/wagtail,rjsproxy/wagtail,Klaudit/wagtail,kaedroho/wagtail,wagtail/wagtail,janusnic/wagtail,jnns/wagtail,gogobook/wagtail,iho/wagtail,rv816/wagtail,KimGlazebrook/wagtail-experiment,mikedingjan/wagtail,torchbox/wagtail,darith27/wagtail,stevenewey/wagtail,rsalmaso/wagtail,torchbox/wagtail,bjesus/wagtail,Pennebaker/wagtail,iho/wagtail,chrxr/wagtail,WQuanfeng/wagtail,Tivix/wagtail,Toshakins/wagtail,rsalmaso/wagtail,mjec/wagtail,nilnvoid/wagtail,rv816/wagtail,kaedroho/wagtail,kurtrwall/wagtail,m-sanders/wagtail,nutztherookie/wagtail,nrsimha/wagtail,inonit/wagtail,tangentlabs/wagtail,FlipperPA/wagtail,kurtrwall/wagtail,FlipperPA/wagtail,torchbox/wagtail,rv816/wagtail,torchbox/wagtail,Tivix/wagtail,davecranwell/wagtail,quru/wagtail,WQuanfeng/wagtail,wagtail/wagtail,zerolab/wagtail,serzans/wagtail,takeshineshiro/wagtail,kaedroho/wagtail,marctc/wagtail,timorieber/wagtail,bjesus/wagtail,tangentlabs/wagtail,marctc/wagtail,rjsproxy/wagtail,jorge-marques/wagtail
|
from django.db import models
from django.http import HttpResponse
from django.conf.urls import url
from wagtail.contrib.wagtailroutablepage.models import RoutablePage
def routable_page_external_view(request, arg):
return HttpResponse("EXTERNAL VIEW: " + arg)
class RoutablePageTest(RoutablePage):
- subpage_urls = (
+ @property
+ def subpage_urls(self):
+ return (
- url(r'^$', 'main', name='main'),
+ url(r'^$', self.main, name='main'),
- url(r'^archive/year/(\d+)/$', 'archive_by_year', name='archive_by_year'),
+ url(r'^archive/year/(\d+)/$', self.archive_by_year, name='archive_by_year'),
- url(r'^archive/author/(?P<author_slug>.+)/$', 'archive_by_author', name='archive_by_author'),
+ url(r'^archive/author/(?P<author_slug>.+)/$', self.archive_by_author, name='archive_by_author'),
- url(r'^external/(.+)/$', routable_page_external_view, name='external_view')
+ url(r'^external/(.+)/$', routable_page_external_view, name='external_view')
- )
+ )
def archive_by_year(self, request, year):
return HttpResponse("ARCHIVE BY YEAR: " + str(year))
def archive_by_author(self, request, author_slug):
return HttpResponse("ARCHIVE BY AUTHOR: " + author_slug)
def main(self, request):
return HttpResponse("MAIN VIEW")
|
Make subpage_urls a property on RoutablePageTest
|
## Code Before:
from django.db import models
from django.http import HttpResponse
from django.conf.urls import url
from wagtail.contrib.wagtailroutablepage.models import RoutablePage
def routable_page_external_view(request, arg):
return HttpResponse("EXTERNAL VIEW: " + arg)
class RoutablePageTest(RoutablePage):
subpage_urls = (
url(r'^$', 'main', name='main'),
url(r'^archive/year/(\d+)/$', 'archive_by_year', name='archive_by_year'),
url(r'^archive/author/(?P<author_slug>.+)/$', 'archive_by_author', name='archive_by_author'),
url(r'^external/(.+)/$', routable_page_external_view, name='external_view')
)
def archive_by_year(self, request, year):
return HttpResponse("ARCHIVE BY YEAR: " + str(year))
def archive_by_author(self, request, author_slug):
return HttpResponse("ARCHIVE BY AUTHOR: " + author_slug)
def main(self, request):
return HttpResponse("MAIN VIEW")
## Instruction:
Make subpage_urls a property on RoutablePageTest
## Code After:
from django.db import models
from django.http import HttpResponse
from django.conf.urls import url
from wagtail.contrib.wagtailroutablepage.models import RoutablePage
def routable_page_external_view(request, arg):
return HttpResponse("EXTERNAL VIEW: " + arg)
class RoutablePageTest(RoutablePage):
@property
def subpage_urls(self):
return (
url(r'^$', self.main, name='main'),
url(r'^archive/year/(\d+)/$', self.archive_by_year, name='archive_by_year'),
url(r'^archive/author/(?P<author_slug>.+)/$', self.archive_by_author, name='archive_by_author'),
url(r'^external/(.+)/$', routable_page_external_view, name='external_view')
)
def archive_by_year(self, request, year):
return HttpResponse("ARCHIVE BY YEAR: " + str(year))
def archive_by_author(self, request, author_slug):
return HttpResponse("ARCHIVE BY AUTHOR: " + author_slug)
def main(self, request):
return HttpResponse("MAIN VIEW")
|
from django.db import models
from django.http import HttpResponse
from django.conf.urls import url
from wagtail.contrib.wagtailroutablepage.models import RoutablePage
def routable_page_external_view(request, arg):
return HttpResponse("EXTERNAL VIEW: " + arg)
class RoutablePageTest(RoutablePage):
- subpage_urls = (
+ @property
+ def subpage_urls(self):
+ return (
- url(r'^$', 'main', name='main'),
? ^ -
+ url(r'^$', self.main, name='main'),
? ++++ ^^^^^
- url(r'^archive/year/(\d+)/$', 'archive_by_year', name='archive_by_year'),
? ^ -
+ url(r'^archive/year/(\d+)/$', self.archive_by_year, name='archive_by_year'),
? ++++ ^^^^^
- url(r'^archive/author/(?P<author_slug>.+)/$', 'archive_by_author', name='archive_by_author'),
? ^ -
+ url(r'^archive/author/(?P<author_slug>.+)/$', self.archive_by_author, name='archive_by_author'),
? ++++ ^^^^^
- url(r'^external/(.+)/$', routable_page_external_view, name='external_view')
+ url(r'^external/(.+)/$', routable_page_external_view, name='external_view')
? ++++
- )
+ )
def archive_by_year(self, request, year):
return HttpResponse("ARCHIVE BY YEAR: " + str(year))
def archive_by_author(self, request, author_slug):
return HttpResponse("ARCHIVE BY AUTHOR: " + author_slug)
def main(self, request):
return HttpResponse("MAIN VIEW")
|
a7e87621b3223e0c4df9d417129fcb7da545c629
|
integration/integration.py
|
integration/integration.py
|
import random
# External Packages
import numpy as np
def sin_theta_sum(variables):
theta = 0
for var in variables:
theta += var
return np.sin(theta)
def gen_random_list(count, rmin, rmax):
variables = []
for i in range(count):
value = np.random.uniform(rmin, rmax)
variables.append(value)
test_range(rmin, rmax, value)
return variables
def run_monte_carlo(samples):
return False
def main():
rmax = np.pi/8
variables = gen_random_list(7, 0, rmax)
result = sin_theta_sum(variables)
print(variables)
print(result)
def test_range(rmin, rmax, value):
if (value <= rmin or value >= rmax):
print(False)
main()
|
import random
# External Packages
import numpy as np
def sin_theta_sum(theta):
return np.sin(theta)
def gen_random_value(count, rmin, rmax):
value = 0
for i in range(count):
value += np.random.uniform(rmin, rmax)
# test_range(rmin, rmax, value)
return value
def run_monte_carlo(samples, function, func_coeff, func_vars):
value = 0
for i in range(samples):
if i % 10000 == 0:
print(i)
value += function(func_vars)
value = value*func_coeff/samples
return value
def sin_monte_element(rmax):
value = gen_random_value(8, 0, rmax)
result = sin_theta_sum(value)
return result
def main():
rmax = np.pi/8
samples = 10000000
coefficient = 1000000
volume = np.power(np.pi/8, 8)
func_coeff = coefficient*volume
func_vars = rmax
result = run_monte_carlo(samples, sin_monte_element, func_coeff, func_vars)
print(result)
def test_range(rmin, rmax, value):
if (value <= rmin or value >= rmax):
print(False)
main()
|
Add preliminary function to execute monte-carlo approximation.
|
Add preliminary function to execute monte-carlo approximation.
Adjust functions, remove some generality for speed. Implement monte-carlo
for the exercise case with initial config. No error calculation or
execution for varied N yet. Initial tests with N = 10^7 give a
value of ~537.1 and take ~1.20min.
|
Python
|
mit
|
lemming52/white_knight
|
import random
# External Packages
import numpy as np
- def sin_theta_sum(variables):
+ def sin_theta_sum(theta):
-
- theta = 0
- for var in variables:
- theta += var
return np.sin(theta)
- def gen_random_list(count, rmin, rmax):
+ def gen_random_value(count, rmin, rmax):
- variables = []
+ value = 0
for i in range(count):
- value = np.random.uniform(rmin, rmax)
+ value += np.random.uniform(rmin, rmax)
- variables.append(value)
- test_range(rmin, rmax, value)
+ # test_range(rmin, rmax, value)
- return variables
+ return value
- def run_monte_carlo(samples):
+ def run_monte_carlo(samples, function, func_coeff, func_vars):
+ value = 0
+ for i in range(samples):
+ if i % 10000 == 0:
+ print(i)
+ value += function(func_vars)
+ value = value*func_coeff/samples
- return False
+ return value
+
+
+ def sin_monte_element(rmax):
+ value = gen_random_value(8, 0, rmax)
+ result = sin_theta_sum(value)
+ return result
def main():
rmax = np.pi/8
- variables = gen_random_list(7, 0, rmax)
- result = sin_theta_sum(variables)
- print(variables)
+ samples = 10000000
+ coefficient = 1000000
+ volume = np.power(np.pi/8, 8)
+ func_coeff = coefficient*volume
+ func_vars = rmax
+ result = run_monte_carlo(samples, sin_monte_element, func_coeff, func_vars)
print(result)
def test_range(rmin, rmax, value):
if (value <= rmin or value >= rmax):
print(False)
main()
|
Add preliminary function to execute monte-carlo approximation.
|
## Code Before:
import random
# External Packages
import numpy as np
def sin_theta_sum(variables):
theta = 0
for var in variables:
theta += var
return np.sin(theta)
def gen_random_list(count, rmin, rmax):
variables = []
for i in range(count):
value = np.random.uniform(rmin, rmax)
variables.append(value)
test_range(rmin, rmax, value)
return variables
def run_monte_carlo(samples):
return False
def main():
rmax = np.pi/8
variables = gen_random_list(7, 0, rmax)
result = sin_theta_sum(variables)
print(variables)
print(result)
def test_range(rmin, rmax, value):
if (value <= rmin or value >= rmax):
print(False)
main()
## Instruction:
Add preliminary function to execute monte-carlo approximation.
## Code After:
import random
# External Packages
import numpy as np
def sin_theta_sum(theta):
return np.sin(theta)
def gen_random_value(count, rmin, rmax):
value = 0
for i in range(count):
value += np.random.uniform(rmin, rmax)
# test_range(rmin, rmax, value)
return value
def run_monte_carlo(samples, function, func_coeff, func_vars):
value = 0
for i in range(samples):
if i % 10000 == 0:
print(i)
value += function(func_vars)
value = value*func_coeff/samples
return value
def sin_monte_element(rmax):
value = gen_random_value(8, 0, rmax)
result = sin_theta_sum(value)
return result
def main():
rmax = np.pi/8
samples = 10000000
coefficient = 1000000
volume = np.power(np.pi/8, 8)
func_coeff = coefficient*volume
func_vars = rmax
result = run_monte_carlo(samples, sin_monte_element, func_coeff, func_vars)
print(result)
def test_range(rmin, rmax, value):
if (value <= rmin or value >= rmax):
print(False)
main()
|
import random
# External Packages
import numpy as np
- def sin_theta_sum(variables):
? ^ -------
+ def sin_theta_sum(theta):
? ^^^^
-
- theta = 0
- for var in variables:
- theta += var
return np.sin(theta)
- def gen_random_list(count, rmin, rmax):
? ^^^
+ def gen_random_value(count, rmin, rmax):
? ++ ^^
- variables = []
+ value = 0
for i in range(count):
- value = np.random.uniform(rmin, rmax)
+ value += np.random.uniform(rmin, rmax)
? +
- variables.append(value)
- test_range(rmin, rmax, value)
+ # test_range(rmin, rmax, value)
? ++
- return variables
? ---- -
+ return value
? +
- def run_monte_carlo(samples):
+ def run_monte_carlo(samples, function, func_coeff, func_vars):
+ value = 0
+ for i in range(samples):
+ if i % 10000 == 0:
+ print(i)
+ value += function(func_vars)
+ value = value*func_coeff/samples
- return False
? ^ ^
+ return value
? ^ ^
+
+
+ def sin_monte_element(rmax):
+ value = gen_random_value(8, 0, rmax)
+ result = sin_theta_sum(value)
+ return result
def main():
rmax = np.pi/8
- variables = gen_random_list(7, 0, rmax)
- result = sin_theta_sum(variables)
- print(variables)
+ samples = 10000000
+ coefficient = 1000000
+ volume = np.power(np.pi/8, 8)
+ func_coeff = coefficient*volume
+ func_vars = rmax
+ result = run_monte_carlo(samples, sin_monte_element, func_coeff, func_vars)
print(result)
def test_range(rmin, rmax, value):
if (value <= rmin or value >= rmax):
print(False)
main()
|
64a78085fffe8dc525596b870c8e150d9171f271
|
resources/site-packages/pulsar/monitor.py
|
resources/site-packages/pulsar/monitor.py
|
import xbmc
import urllib2
import threading
from pulsar.config import PULSARD_HOST
class PulsarMonitor(xbmc.Monitor):
def __init__(self):
self._closing = threading.Event()
@property
def closing(self):
return self._closing
def onAbortRequested(self):
self._closing.set()
def onSettingsChanged(self):
try:
urllib2.urlopen("%s/reload" % PULSARD_HOST)
except:
pass
|
import xbmc
import urllib2
import threading
from pulsar.config import PULSARD_HOST
class PulsarMonitor(xbmc.Monitor):
def __init__(self):
self._closing = threading.Event()
@property
def closing(self):
return self._closing
def onAbortRequested(self):
# Only when closing Kodi
if xbmc.abortRequested:
self._closing.set()
self._closing.clear()
def onSettingsChanged(self):
try:
urllib2.urlopen("%s/reload" % PULSARD_HOST)
except:
pass
|
Fix issue where Pulsar would enter a restart loop when cancelling a buffering
|
Fix issue where Pulsar would enter a restart loop when cancelling a buffering
Signed-off-by: Steeve Morin <[email protected]>
|
Python
|
bsd-3-clause
|
likeitneverwentaway/plugin.video.quasar,komakino/plugin.video.pulsar,johnnyslt/plugin.video.quasar,elrosti/plugin.video.pulsar,Zopieux/plugin.video.pulsar,pmphxs/plugin.video.pulsar,johnnyslt/plugin.video.quasar,steeve/plugin.video.pulsar,peer23peer/plugin.video.quasar,peer23peer/plugin.video.quasar,likeitneverwentaway/plugin.video.quasar
|
import xbmc
import urllib2
import threading
from pulsar.config import PULSARD_HOST
class PulsarMonitor(xbmc.Monitor):
def __init__(self):
self._closing = threading.Event()
@property
def closing(self):
return self._closing
def onAbortRequested(self):
+ # Only when closing Kodi
+ if xbmc.abortRequested:
- self._closing.set()
+ self._closing.set()
+ self._closing.clear()
def onSettingsChanged(self):
try:
urllib2.urlopen("%s/reload" % PULSARD_HOST)
except:
pass
|
Fix issue where Pulsar would enter a restart loop when cancelling a buffering
|
## Code Before:
import xbmc
import urllib2
import threading
from pulsar.config import PULSARD_HOST
class PulsarMonitor(xbmc.Monitor):
def __init__(self):
self._closing = threading.Event()
@property
def closing(self):
return self._closing
def onAbortRequested(self):
self._closing.set()
def onSettingsChanged(self):
try:
urllib2.urlopen("%s/reload" % PULSARD_HOST)
except:
pass
## Instruction:
Fix issue where Pulsar would enter a restart loop when cancelling a buffering
## Code After:
import xbmc
import urllib2
import threading
from pulsar.config import PULSARD_HOST
class PulsarMonitor(xbmc.Monitor):
def __init__(self):
self._closing = threading.Event()
@property
def closing(self):
return self._closing
def onAbortRequested(self):
# Only when closing Kodi
if xbmc.abortRequested:
self._closing.set()
self._closing.clear()
def onSettingsChanged(self):
try:
urllib2.urlopen("%s/reload" % PULSARD_HOST)
except:
pass
|
import xbmc
import urllib2
import threading
from pulsar.config import PULSARD_HOST
class PulsarMonitor(xbmc.Monitor):
def __init__(self):
self._closing = threading.Event()
@property
def closing(self):
return self._closing
def onAbortRequested(self):
+ # Only when closing Kodi
+ if xbmc.abortRequested:
- self._closing.set()
+ self._closing.set()
? ++++
+ self._closing.clear()
def onSettingsChanged(self):
try:
urllib2.urlopen("%s/reload" % PULSARD_HOST)
except:
pass
|
90e557d681c9ea3f974ee5357ec67f294322d224
|
src/elm_doc/decorators.py
|
src/elm_doc/decorators.py
|
import functools
import subprocess
from doit.exceptions import TaskFailed
def capture_subprocess_error(fn):
@functools.wraps(fn)
def wrapper(*args, **kwargs):
try:
return fn(*args, **kwargs)
except subprocess.CalledProcessError as e:
return TaskFailed(
'Error while executing {}:\nstdout:\n{}\n\nstderr:\n{}'.format(
' '.join(e.cmd),
e.stdout.decode('utf8') if e.stdout else '',
e.stderr.decode('utf8') if e.stderr else ''))
return wrapper
|
import functools
import subprocess
from doit.exceptions import TaskFailed
def capture_subprocess_error(fn):
@functools.wraps(fn)
def wrapper(*args, **kwargs):
try:
return fn(*args, **kwargs)
except subprocess.CalledProcessError as e:
command_string = e.cmd if isinstance(e.cmd, str) else ' '.join(e.cmd)
return TaskFailed(
'Error while executing {}:\nstdout:\n{}\n\nstderr:\n{}'.format(
command_string,
e.stdout.decode('utf8') if e.stdout else '',
e.stderr.decode('utf8') if e.stderr else ''))
return wrapper
|
Clean error output when command is a string
|
Clean error output when command is a string
|
Python
|
bsd-3-clause
|
ento/elm-doc,ento/elm-doc
|
import functools
import subprocess
from doit.exceptions import TaskFailed
def capture_subprocess_error(fn):
@functools.wraps(fn)
def wrapper(*args, **kwargs):
try:
return fn(*args, **kwargs)
except subprocess.CalledProcessError as e:
+ command_string = e.cmd if isinstance(e.cmd, str) else ' '.join(e.cmd)
return TaskFailed(
'Error while executing {}:\nstdout:\n{}\n\nstderr:\n{}'.format(
- ' '.join(e.cmd),
+ command_string,
e.stdout.decode('utf8') if e.stdout else '',
e.stderr.decode('utf8') if e.stderr else ''))
return wrapper
|
Clean error output when command is a string
|
## Code Before:
import functools
import subprocess
from doit.exceptions import TaskFailed
def capture_subprocess_error(fn):
@functools.wraps(fn)
def wrapper(*args, **kwargs):
try:
return fn(*args, **kwargs)
except subprocess.CalledProcessError as e:
return TaskFailed(
'Error while executing {}:\nstdout:\n{}\n\nstderr:\n{}'.format(
' '.join(e.cmd),
e.stdout.decode('utf8') if e.stdout else '',
e.stderr.decode('utf8') if e.stderr else ''))
return wrapper
## Instruction:
Clean error output when command is a string
## Code After:
import functools
import subprocess
from doit.exceptions import TaskFailed
def capture_subprocess_error(fn):
@functools.wraps(fn)
def wrapper(*args, **kwargs):
try:
return fn(*args, **kwargs)
except subprocess.CalledProcessError as e:
command_string = e.cmd if isinstance(e.cmd, str) else ' '.join(e.cmd)
return TaskFailed(
'Error while executing {}:\nstdout:\n{}\n\nstderr:\n{}'.format(
command_string,
e.stdout.decode('utf8') if e.stdout else '',
e.stderr.decode('utf8') if e.stderr else ''))
return wrapper
|
import functools
import subprocess
from doit.exceptions import TaskFailed
def capture_subprocess_error(fn):
@functools.wraps(fn)
def wrapper(*args, **kwargs):
try:
return fn(*args, **kwargs)
except subprocess.CalledProcessError as e:
+ command_string = e.cmd if isinstance(e.cmd, str) else ' '.join(e.cmd)
return TaskFailed(
'Error while executing {}:\nstdout:\n{}\n\nstderr:\n{}'.format(
- ' '.join(e.cmd),
+ command_string,
e.stdout.decode('utf8') if e.stdout else '',
e.stderr.decode('utf8') if e.stderr else ''))
return wrapper
|
61a6d057302767aa49633d6d010f7da583035533
|
web/templatetags/getattribute.py
|
web/templatetags/getattribute.py
|
import re
from django import template
from django.conf import settings
numeric_test = re.compile("^\d+$")
register = template.Library()
def getattribute(value, arg):
"""Gets an attribute of an object dynamically from a string name"""
if hasattr(value, str(arg)):
return getattr(value, arg)
elif hasattr(value, 'has_key') and value.has_key(arg):
return value[arg]
elif numeric_test.match(str(arg)) and len(value) > int(arg):
return value[int(arg)]
else:
return settings.TEMPLATE_STRING_IF_INVALID
register.filter('getattribute', getattribute)
# Then, in template:
# {% load getattribute %}
# {{ object|getattribute:dynamic_string_var }}
|
import re
from django import template
from django.conf import settings
numeric_test = re.compile("^\d+$")
register = template.Library()
def getattribute(value, arg):
"""Gets an attribute of an object dynamically from a string name"""
if hasattr(value, str(arg)):
if callable(getattr(value, arg)):
return getattr(value, arg)()
return getattr(value, arg)
elif hasattr(value, 'has_key') and value.has_key(arg):
return value[arg]
elif numeric_test.match(str(arg)) and len(value) > int(arg):
return value[int(arg)]
else:
return settings.TEMPLATE_STRING_IF_INVALID
register.filter('getattribute', getattribute)
# Then, in template:
# {% load getattribute %}
# {{ object|getattribute:dynamic_string_var }}
|
Call objects methods directly from the templates yay
|
web: Call objects methods directly from the templates yay
|
Python
|
apache-2.0
|
SchoolIdolTomodachi/SchoolIdolAPI,laurenor/SchoolIdolAPI,dburr/SchoolIdolAPI,laurenor/SchoolIdolAPI,rdsathene/SchoolIdolAPI,SchoolIdolTomodachi/SchoolIdolAPI,SchoolIdolTomodachi/SchoolIdolAPI,rdsathene/SchoolIdolAPI,rdsathene/SchoolIdolAPI,dburr/SchoolIdolAPI,dburr/SchoolIdolAPI,laurenor/SchoolIdolAPI
|
import re
from django import template
from django.conf import settings
numeric_test = re.compile("^\d+$")
register = template.Library()
def getattribute(value, arg):
"""Gets an attribute of an object dynamically from a string name"""
if hasattr(value, str(arg)):
+ if callable(getattr(value, arg)):
+ return getattr(value, arg)()
return getattr(value, arg)
elif hasattr(value, 'has_key') and value.has_key(arg):
return value[arg]
elif numeric_test.match(str(arg)) and len(value) > int(arg):
return value[int(arg)]
else:
return settings.TEMPLATE_STRING_IF_INVALID
register.filter('getattribute', getattribute)
# Then, in template:
# {% load getattribute %}
# {{ object|getattribute:dynamic_string_var }}
|
Call objects methods directly from the templates yay
|
## Code Before:
import re
from django import template
from django.conf import settings
numeric_test = re.compile("^\d+$")
register = template.Library()
def getattribute(value, arg):
"""Gets an attribute of an object dynamically from a string name"""
if hasattr(value, str(arg)):
return getattr(value, arg)
elif hasattr(value, 'has_key') and value.has_key(arg):
return value[arg]
elif numeric_test.match(str(arg)) and len(value) > int(arg):
return value[int(arg)]
else:
return settings.TEMPLATE_STRING_IF_INVALID
register.filter('getattribute', getattribute)
# Then, in template:
# {% load getattribute %}
# {{ object|getattribute:dynamic_string_var }}
## Instruction:
Call objects methods directly from the templates yay
## Code After:
import re
from django import template
from django.conf import settings
numeric_test = re.compile("^\d+$")
register = template.Library()
def getattribute(value, arg):
"""Gets an attribute of an object dynamically from a string name"""
if hasattr(value, str(arg)):
if callable(getattr(value, arg)):
return getattr(value, arg)()
return getattr(value, arg)
elif hasattr(value, 'has_key') and value.has_key(arg):
return value[arg]
elif numeric_test.match(str(arg)) and len(value) > int(arg):
return value[int(arg)]
else:
return settings.TEMPLATE_STRING_IF_INVALID
register.filter('getattribute', getattribute)
# Then, in template:
# {% load getattribute %}
# {{ object|getattribute:dynamic_string_var }}
|
import re
from django import template
from django.conf import settings
numeric_test = re.compile("^\d+$")
register = template.Library()
def getattribute(value, arg):
"""Gets an attribute of an object dynamically from a string name"""
if hasattr(value, str(arg)):
+ if callable(getattr(value, arg)):
+ return getattr(value, arg)()
return getattr(value, arg)
elif hasattr(value, 'has_key') and value.has_key(arg):
return value[arg]
elif numeric_test.match(str(arg)) and len(value) > int(arg):
return value[int(arg)]
else:
return settings.TEMPLATE_STRING_IF_INVALID
register.filter('getattribute', getattribute)
# Then, in template:
# {% load getattribute %}
# {{ object|getattribute:dynamic_string_var }}
|
a5c723b589699fdf80c42a4186c2fdc0c8d84bb4
|
tests/sentry/app/tests.py
|
tests/sentry/app/tests.py
|
from __future__ import absolute_import
import mock
from sentry import app
from sentry.testutils import TestCase
class AppTest(TestCase):
def test_buffer_is_a_buffer(self):
from sentry.buffer.base import Buffer
self.assertEquals(type(app.buffer), Buffer)
class GetBufferTest(TestCase):
@mock.patch('sentry.app.import_string')
def test_instantiates_class_with_options(self, import_string):
options = {'hello': 'world'}
path = 'lol.FooBar'
result = app.get_instance(path, options)
import_string.assert_called_once_with(path)
import_string.return_value.assert_called_once_with(**options)
assert result == import_string.return_value.return_value
|
from __future__ import absolute_import
from sentry import app
from sentry.testutils import TestCase
class AppTest(TestCase):
def test_buffer_is_a_buffer(self):
from sentry.buffer.base import Buffer
self.assertEquals(type(app.buffer), Buffer)
|
Remove test that is probably more trouble than it's worth.
|
Remove test that is probably more trouble than it's worth.
|
Python
|
bsd-3-clause
|
JackDanger/sentry,mvaled/sentry,BuildingLink/sentry,alexm92/sentry,alexm92/sentry,mvaled/sentry,gencer/sentry,JamesMura/sentry,ifduyue/sentry,zenefits/sentry,jean/sentry,fotinakis/sentry,gencer/sentry,zenefits/sentry,gencer/sentry,JamesMura/sentry,zenefits/sentry,mvaled/sentry,mvaled/sentry,BuildingLink/sentry,gencer/sentry,beeftornado/sentry,fotinakis/sentry,beeftornado/sentry,mvaled/sentry,fotinakis/sentry,ifduyue/sentry,ifduyue/sentry,alexm92/sentry,looker/sentry,gencer/sentry,looker/sentry,looker/sentry,mvaled/sentry,ifduyue/sentry,BuildingLink/sentry,zenefits/sentry,BuildingLink/sentry,JackDanger/sentry,JamesMura/sentry,jean/sentry,looker/sentry,fotinakis/sentry,BuildingLink/sentry,JackDanger/sentry,JamesMura/sentry,ifduyue/sentry,jean/sentry,JamesMura/sentry,jean/sentry,jean/sentry,looker/sentry,zenefits/sentry,beeftornado/sentry
|
from __future__ import absolute_import
-
- import mock
from sentry import app
from sentry.testutils import TestCase
class AppTest(TestCase):
def test_buffer_is_a_buffer(self):
from sentry.buffer.base import Buffer
self.assertEquals(type(app.buffer), Buffer)
-
- class GetBufferTest(TestCase):
- @mock.patch('sentry.app.import_string')
- def test_instantiates_class_with_options(self, import_string):
- options = {'hello': 'world'}
- path = 'lol.FooBar'
-
- result = app.get_instance(path, options)
-
- import_string.assert_called_once_with(path)
- import_string.return_value.assert_called_once_with(**options)
-
- assert result == import_string.return_value.return_value
-
|
Remove test that is probably more trouble than it's worth.
|
## Code Before:
from __future__ import absolute_import
import mock
from sentry import app
from sentry.testutils import TestCase
class AppTest(TestCase):
def test_buffer_is_a_buffer(self):
from sentry.buffer.base import Buffer
self.assertEquals(type(app.buffer), Buffer)
class GetBufferTest(TestCase):
@mock.patch('sentry.app.import_string')
def test_instantiates_class_with_options(self, import_string):
options = {'hello': 'world'}
path = 'lol.FooBar'
result = app.get_instance(path, options)
import_string.assert_called_once_with(path)
import_string.return_value.assert_called_once_with(**options)
assert result == import_string.return_value.return_value
## Instruction:
Remove test that is probably more trouble than it's worth.
## Code After:
from __future__ import absolute_import
from sentry import app
from sentry.testutils import TestCase
class AppTest(TestCase):
def test_buffer_is_a_buffer(self):
from sentry.buffer.base import Buffer
self.assertEquals(type(app.buffer), Buffer)
|
from __future__ import absolute_import
-
- import mock
from sentry import app
from sentry.testutils import TestCase
class AppTest(TestCase):
def test_buffer_is_a_buffer(self):
from sentry.buffer.base import Buffer
self.assertEquals(type(app.buffer), Buffer)
-
-
- class GetBufferTest(TestCase):
- @mock.patch('sentry.app.import_string')
- def test_instantiates_class_with_options(self, import_string):
- options = {'hello': 'world'}
- path = 'lol.FooBar'
-
- result = app.get_instance(path, options)
-
- import_string.assert_called_once_with(path)
- import_string.return_value.assert_called_once_with(**options)
-
- assert result == import_string.return_value.return_value
|
22dcc9ee23841ecfbb23f76f2f8fd5c5c5bfb8cb
|
app/models.py
|
app/models.py
|
from app import db
class Base(db.Model):
__abstract__ = True
id = db.Column(db.Integer, primary_key=True)
created_at = db.Column(db.DateTime, default=db.func.current_timestamp())
updated_at = db.Column(db.DateTime, default=db.func.current_timestamp())
class Route(Base):
__tablename__ = 'routes'
origin_point = db.Column(db.String(128), nullable=False)
destination_point = db.Column(db.String(128), nullable=False)
distance = db.Column(db.Integer, nullable=False)
def __init__(self, origin_point, destination_point, distance):
self.origin_point = origin_point
self.destination_point = destination_point
self.distance = distance
def __repr__(self):
return '<Route <{0}-{1}-{2]>'.format(self.origin_pint,
self.destination_point,
self.distance)
|
from app import db
class Base(db.Model):
__abstract__ = True
id = db.Column(db.Integer, primary_key=True)
created_at = db.Column(db.DateTime, default=db.func.current_timestamp())
updated_at = db.Column(db.DateTime, default=db.func.current_timestamp())
class Route(Base):
__tablename__ = 'routes'
origin_point = db.Column(db.String(128), nullable=False)
destination_point = db.Column(db.String(128), nullable=False)
distance = db.Column(db.Integer, nullable=False)
def __repr__(self):
return '<Route <{0}-{1}-{2]>'.format(self.origin_pint,
self.destination_point,
self.distance)
|
Remove unecessary initialization from Route model
|
Remove unecessary initialization from Route model
|
Python
|
mit
|
mdsrosa/routes_api_python
|
from app import db
class Base(db.Model):
__abstract__ = True
id = db.Column(db.Integer, primary_key=True)
created_at = db.Column(db.DateTime, default=db.func.current_timestamp())
updated_at = db.Column(db.DateTime, default=db.func.current_timestamp())
class Route(Base):
__tablename__ = 'routes'
origin_point = db.Column(db.String(128), nullable=False)
destination_point = db.Column(db.String(128), nullable=False)
distance = db.Column(db.Integer, nullable=False)
- def __init__(self, origin_point, destination_point, distance):
- self.origin_point = origin_point
- self.destination_point = destination_point
- self.distance = distance
-
def __repr__(self):
return '<Route <{0}-{1}-{2]>'.format(self.origin_pint,
self.destination_point,
self.distance)
|
Remove unecessary initialization from Route model
|
## Code Before:
from app import db
class Base(db.Model):
__abstract__ = True
id = db.Column(db.Integer, primary_key=True)
created_at = db.Column(db.DateTime, default=db.func.current_timestamp())
updated_at = db.Column(db.DateTime, default=db.func.current_timestamp())
class Route(Base):
__tablename__ = 'routes'
origin_point = db.Column(db.String(128), nullable=False)
destination_point = db.Column(db.String(128), nullable=False)
distance = db.Column(db.Integer, nullable=False)
def __init__(self, origin_point, destination_point, distance):
self.origin_point = origin_point
self.destination_point = destination_point
self.distance = distance
def __repr__(self):
return '<Route <{0}-{1}-{2]>'.format(self.origin_pint,
self.destination_point,
self.distance)
## Instruction:
Remove unecessary initialization from Route model
## Code After:
from app import db
class Base(db.Model):
__abstract__ = True
id = db.Column(db.Integer, primary_key=True)
created_at = db.Column(db.DateTime, default=db.func.current_timestamp())
updated_at = db.Column(db.DateTime, default=db.func.current_timestamp())
class Route(Base):
__tablename__ = 'routes'
origin_point = db.Column(db.String(128), nullable=False)
destination_point = db.Column(db.String(128), nullable=False)
distance = db.Column(db.Integer, nullable=False)
def __repr__(self):
return '<Route <{0}-{1}-{2]>'.format(self.origin_pint,
self.destination_point,
self.distance)
|
from app import db
class Base(db.Model):
__abstract__ = True
id = db.Column(db.Integer, primary_key=True)
created_at = db.Column(db.DateTime, default=db.func.current_timestamp())
updated_at = db.Column(db.DateTime, default=db.func.current_timestamp())
class Route(Base):
__tablename__ = 'routes'
origin_point = db.Column(db.String(128), nullable=False)
destination_point = db.Column(db.String(128), nullable=False)
distance = db.Column(db.Integer, nullable=False)
- def __init__(self, origin_point, destination_point, distance):
- self.origin_point = origin_point
- self.destination_point = destination_point
- self.distance = distance
-
def __repr__(self):
return '<Route <{0}-{1}-{2]>'.format(self.origin_pint,
self.destination_point,
self.distance)
|
9541fd723308d51f7c380649a81b4992074a1193
|
workout_manager/urls.py
|
workout_manager/urls.py
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^admin/', include(admin.site.urls)),
url(r'^', include('manager.urls')),
url(r'exercise/', include('exercises.urls')),
url(r'weight/', include('weight.urls')),
url(r'nutrition/', include('nutrition.urls')),
url(r'^browserid/', include('django_browserid.urls')),
)
|
from django.conf.urls import patterns, include, url
from django.conf.urls.i18n import i18n_patterns
from django.contrib import admin
admin.autodiscover()
urlpatterns = i18n_patterns('',
url(r'^admin/', include(admin.site.urls)),
url(r'^', include('manager.urls')),
url(r'exercise/', include('exercises.urls')),
url(r'weight/', include('weight.urls')),
url(r'nutrition/', include('nutrition.urls')),
url(r'^browserid/', include('django_browserid.urls')),
)
|
Append the name of the current language to the URLs
|
Append the name of the current language to the URLs
--HG--
branch : 1.1-dev
|
Python
|
agpl-3.0
|
DeveloperMal/wger,petervanderdoes/wger,DeveloperMal/wger,kjagoo/wger_stark,petervanderdoes/wger,wger-project/wger,kjagoo/wger_stark,rolandgeider/wger,wger-project/wger,DeveloperMal/wger,kjagoo/wger_stark,DeveloperMal/wger,petervanderdoes/wger,wger-project/wger,rolandgeider/wger,rolandgeider/wger,kjagoo/wger_stark,petervanderdoes/wger,rolandgeider/wger,wger-project/wger
|
from django.conf.urls import patterns, include, url
+ from django.conf.urls.i18n import i18n_patterns
from django.contrib import admin
admin.autodiscover()
- urlpatterns = patterns('',
+ urlpatterns = i18n_patterns('',
url(r'^admin/', include(admin.site.urls)),
url(r'^', include('manager.urls')),
url(r'exercise/', include('exercises.urls')),
url(r'weight/', include('weight.urls')),
url(r'nutrition/', include('nutrition.urls')),
url(r'^browserid/', include('django_browserid.urls')),
)
|
Append the name of the current language to the URLs
|
## Code Before:
from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^admin/', include(admin.site.urls)),
url(r'^', include('manager.urls')),
url(r'exercise/', include('exercises.urls')),
url(r'weight/', include('weight.urls')),
url(r'nutrition/', include('nutrition.urls')),
url(r'^browserid/', include('django_browserid.urls')),
)
## Instruction:
Append the name of the current language to the URLs
## Code After:
from django.conf.urls import patterns, include, url
from django.conf.urls.i18n import i18n_patterns
from django.contrib import admin
admin.autodiscover()
urlpatterns = i18n_patterns('',
url(r'^admin/', include(admin.site.urls)),
url(r'^', include('manager.urls')),
url(r'exercise/', include('exercises.urls')),
url(r'weight/', include('weight.urls')),
url(r'nutrition/', include('nutrition.urls')),
url(r'^browserid/', include('django_browserid.urls')),
)
|
from django.conf.urls import patterns, include, url
+ from django.conf.urls.i18n import i18n_patterns
from django.contrib import admin
admin.autodiscover()
- urlpatterns = patterns('',
+ urlpatterns = i18n_patterns('',
? +++++
url(r'^admin/', include(admin.site.urls)),
url(r'^', include('manager.urls')),
url(r'exercise/', include('exercises.urls')),
url(r'weight/', include('weight.urls')),
url(r'nutrition/', include('nutrition.urls')),
url(r'^browserid/', include('django_browserid.urls')),
)
|
80bb5557f78268545139cd37f593278e35979fd5
|
corehq/mobile_flags.py
|
corehq/mobile_flags.py
|
from collections import namedtuple
MobileFlag = namedtuple('MobileFlag', 'slug label')
MULTIPLE_APPS_UNLIMITED = MobileFlag(
'multiple_apps_unlimited',
'Enable unlimited multiple apps'
)
|
from collections import namedtuple
MobileFlag = namedtuple('MobileFlag', 'slug label')
MULTIPLE_APPS_UNLIMITED = MobileFlag(
'multiple_apps_unlimited',
'Enable unlimited multiple apps'
)
ADVANCED_SETTINGS_ACCESS = MobileFlag(
'advanced_settings_access',
'Enable access to advanced settings'
)
|
Add Flag enum for settings access
|
Add Flag enum for settings access
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
from collections import namedtuple
MobileFlag = namedtuple('MobileFlag', 'slug label')
MULTIPLE_APPS_UNLIMITED = MobileFlag(
'multiple_apps_unlimited',
'Enable unlimited multiple apps'
)
+ ADVANCED_SETTINGS_ACCESS = MobileFlag(
+ 'advanced_settings_access',
+ 'Enable access to advanced settings'
+ )
+
|
Add Flag enum for settings access
|
## Code Before:
from collections import namedtuple
MobileFlag = namedtuple('MobileFlag', 'slug label')
MULTIPLE_APPS_UNLIMITED = MobileFlag(
'multiple_apps_unlimited',
'Enable unlimited multiple apps'
)
## Instruction:
Add Flag enum for settings access
## Code After:
from collections import namedtuple
MobileFlag = namedtuple('MobileFlag', 'slug label')
MULTIPLE_APPS_UNLIMITED = MobileFlag(
'multiple_apps_unlimited',
'Enable unlimited multiple apps'
)
ADVANCED_SETTINGS_ACCESS = MobileFlag(
'advanced_settings_access',
'Enable access to advanced settings'
)
|
from collections import namedtuple
MobileFlag = namedtuple('MobileFlag', 'slug label')
MULTIPLE_APPS_UNLIMITED = MobileFlag(
'multiple_apps_unlimited',
'Enable unlimited multiple apps'
)
+
+ ADVANCED_SETTINGS_ACCESS = MobileFlag(
+ 'advanced_settings_access',
+ 'Enable access to advanced settings'
+ )
|
46ea832db6db8a98c5b9f5a58a37bfed16a27a10
|
app/actions/peptable/base.py
|
app/actions/peptable/base.py
|
from app.dataformats import peptable as peptabledata
from app.dataformats import mzidtsv as psmtsvdata
def add_peptide(allpeps, psm, scorecol=False, fncol=None, new=False,
track_psms=True):
peptide = {'score': psm[scorecol],
'line': psm,
'psms': []
}
if track_psms:
if not new:
peptide['psms'] = allpeps[psm[peptabledata.HEADER_PEPTIDE]]['psms']
peptide['psms'].append('{0}_{1}'.format(psm[fncol],
psm[psmtsvdata.HEADER_SCANNR]))
allpeps[psm[peptabledata.HEADER_PEPTIDE]] = peptide
def evaluate_peptide(peptides, psm, key, higherbetter, scorecol, fncol=None,
track_psms=True):
try:
existing_score = peptides[key]['score']
except KeyError:
add_peptide(peptides, psm, scorecol, fncol, True, track_psms)
else:
if higherbetter and psm[scorecol] > existing_score:
add_peptide(peptides, psm, scorecol, fncol, track_psms=track_psms)
elif not higherbetter and psm[scorecol] < existing_score:
add_peptide(peptides, psm, scorecol, fncol, track_psms=track_psms)
return peptides
|
from app.dataformats import mzidtsv as psmtsvdata
def add_peptide(allpeps, psm, key, scorecol=False, fncol=None, new=False,
track_psms=True):
peptide = {'score': psm[scorecol],
'line': psm,
'psms': []
}
if track_psms:
if not new:
peptide['psms'] = allpeps[key]['psms']
peptide['psms'].append('{0}_{1}'.format(psm[fncol],
psm[psmtsvdata.HEADER_SCANNR]))
allpeps[key] = peptide
def evaluate_peptide(peptides, psm, key, higherbetter, scorecol, fncol=None,
track_psms=True):
try:
existing_score = peptides[key]['score']
except KeyError:
add_peptide(peptides, psm, key, scorecol, fncol, True, track_psms)
else:
if higherbetter and psm[scorecol] > existing_score:
add_peptide(peptides, psm, key, scorecol, fncol,
track_psms=track_psms)
elif not higherbetter and psm[scorecol] < existing_score:
add_peptide(peptides, psm, key, scorecol, fncol,
track_psms=track_psms)
return peptides
|
Use input param key instead of using HEADER field
|
Use input param key instead of using HEADER field
|
Python
|
mit
|
glormph/msstitch
|
- from app.dataformats import peptable as peptabledata
from app.dataformats import mzidtsv as psmtsvdata
- def add_peptide(allpeps, psm, scorecol=False, fncol=None, new=False,
+ def add_peptide(allpeps, psm, key, scorecol=False, fncol=None, new=False,
track_psms=True):
peptide = {'score': psm[scorecol],
'line': psm,
'psms': []
}
if track_psms:
if not new:
- peptide['psms'] = allpeps[psm[peptabledata.HEADER_PEPTIDE]]['psms']
+ peptide['psms'] = allpeps[key]['psms']
peptide['psms'].append('{0}_{1}'.format(psm[fncol],
psm[psmtsvdata.HEADER_SCANNR]))
- allpeps[psm[peptabledata.HEADER_PEPTIDE]] = peptide
+ allpeps[key] = peptide
def evaluate_peptide(peptides, psm, key, higherbetter, scorecol, fncol=None,
track_psms=True):
try:
existing_score = peptides[key]['score']
except KeyError:
- add_peptide(peptides, psm, scorecol, fncol, True, track_psms)
+ add_peptide(peptides, psm, key, scorecol, fncol, True, track_psms)
else:
if higherbetter and psm[scorecol] > existing_score:
- add_peptide(peptides, psm, scorecol, fncol, track_psms=track_psms)
+ add_peptide(peptides, psm, key, scorecol, fncol,
+ track_psms=track_psms)
elif not higherbetter and psm[scorecol] < existing_score:
- add_peptide(peptides, psm, scorecol, fncol, track_psms=track_psms)
+ add_peptide(peptides, psm, key, scorecol, fncol,
+ track_psms=track_psms)
return peptides
|
Use input param key instead of using HEADER field
|
## Code Before:
from app.dataformats import peptable as peptabledata
from app.dataformats import mzidtsv as psmtsvdata
def add_peptide(allpeps, psm, scorecol=False, fncol=None, new=False,
track_psms=True):
peptide = {'score': psm[scorecol],
'line': psm,
'psms': []
}
if track_psms:
if not new:
peptide['psms'] = allpeps[psm[peptabledata.HEADER_PEPTIDE]]['psms']
peptide['psms'].append('{0}_{1}'.format(psm[fncol],
psm[psmtsvdata.HEADER_SCANNR]))
allpeps[psm[peptabledata.HEADER_PEPTIDE]] = peptide
def evaluate_peptide(peptides, psm, key, higherbetter, scorecol, fncol=None,
track_psms=True):
try:
existing_score = peptides[key]['score']
except KeyError:
add_peptide(peptides, psm, scorecol, fncol, True, track_psms)
else:
if higherbetter and psm[scorecol] > existing_score:
add_peptide(peptides, psm, scorecol, fncol, track_psms=track_psms)
elif not higherbetter and psm[scorecol] < existing_score:
add_peptide(peptides, psm, scorecol, fncol, track_psms=track_psms)
return peptides
## Instruction:
Use input param key instead of using HEADER field
## Code After:
from app.dataformats import mzidtsv as psmtsvdata
def add_peptide(allpeps, psm, key, scorecol=False, fncol=None, new=False,
track_psms=True):
peptide = {'score': psm[scorecol],
'line': psm,
'psms': []
}
if track_psms:
if not new:
peptide['psms'] = allpeps[key]['psms']
peptide['psms'].append('{0}_{1}'.format(psm[fncol],
psm[psmtsvdata.HEADER_SCANNR]))
allpeps[key] = peptide
def evaluate_peptide(peptides, psm, key, higherbetter, scorecol, fncol=None,
track_psms=True):
try:
existing_score = peptides[key]['score']
except KeyError:
add_peptide(peptides, psm, key, scorecol, fncol, True, track_psms)
else:
if higherbetter and psm[scorecol] > existing_score:
add_peptide(peptides, psm, key, scorecol, fncol,
track_psms=track_psms)
elif not higherbetter and psm[scorecol] < existing_score:
add_peptide(peptides, psm, key, scorecol, fncol,
track_psms=track_psms)
return peptides
|
- from app.dataformats import peptable as peptabledata
from app.dataformats import mzidtsv as psmtsvdata
- def add_peptide(allpeps, psm, scorecol=False, fncol=None, new=False,
+ def add_peptide(allpeps, psm, key, scorecol=False, fncol=None, new=False,
? +++++
track_psms=True):
peptide = {'score': psm[scorecol],
'line': psm,
'psms': []
}
if track_psms:
if not new:
- peptide['psms'] = allpeps[psm[peptabledata.HEADER_PEPTIDE]]['psms']
+ peptide['psms'] = allpeps[key]['psms']
peptide['psms'].append('{0}_{1}'.format(psm[fncol],
psm[psmtsvdata.HEADER_SCANNR]))
- allpeps[psm[peptabledata.HEADER_PEPTIDE]] = peptide
+ allpeps[key] = peptide
def evaluate_peptide(peptides, psm, key, higherbetter, scorecol, fncol=None,
track_psms=True):
try:
existing_score = peptides[key]['score']
except KeyError:
- add_peptide(peptides, psm, scorecol, fncol, True, track_psms)
+ add_peptide(peptides, psm, key, scorecol, fncol, True, track_psms)
? +++++
else:
if higherbetter and psm[scorecol] > existing_score:
- add_peptide(peptides, psm, scorecol, fncol, track_psms=track_psms)
? ----------------------
+ add_peptide(peptides, psm, key, scorecol, fncol,
? +++++
+ track_psms=track_psms)
elif not higherbetter and psm[scorecol] < existing_score:
- add_peptide(peptides, psm, scorecol, fncol, track_psms=track_psms)
? ----------------------
+ add_peptide(peptides, psm, key, scorecol, fncol,
? +++++
+ track_psms=track_psms)
return peptides
|
7666a29aafe22a51abfd5aee21b62c71055aea78
|
tests/test_account.py
|
tests/test_account.py
|
# PyTest
import pytest
# lendingclub2
from lendingclub2.account import InvestorAccount
from lendingclub2.error import LCError
class TestInvestorAccount(object):
def test_properties(self):
investor = InvestorAccount()
try:
investor.id()
except LCError:
pytest.skip("skip because cannot find account ID")
assert investor.available_balance >= 0.0
assert investor.total_balance >= 0.0
|
# PyTest
import pytest
# lendingclub2
from lendingclub2.account import InvestorAccount
from lendingclub2.error import LCError
class TestInvestorAccount(object):
def test_properties(self):
try:
investor = InvestorAccount()
except LCError:
pytest.skip("skip because cannot find account ID")
assert investor.available_balance >= 0.0
assert investor.total_balance >= 0.0
|
Fix error in the case when no ID is provided
|
Fix error in the case when no ID is provided
|
Python
|
mit
|
ahartoto/lendingclub2
|
# PyTest
import pytest
# lendingclub2
from lendingclub2.account import InvestorAccount
from lendingclub2.error import LCError
class TestInvestorAccount(object):
def test_properties(self):
- investor = InvestorAccount()
try:
- investor.id()
+ investor = InvestorAccount()
except LCError:
pytest.skip("skip because cannot find account ID")
assert investor.available_balance >= 0.0
assert investor.total_balance >= 0.0
|
Fix error in the case when no ID is provided
|
## Code Before:
# PyTest
import pytest
# lendingclub2
from lendingclub2.account import InvestorAccount
from lendingclub2.error import LCError
class TestInvestorAccount(object):
def test_properties(self):
investor = InvestorAccount()
try:
investor.id()
except LCError:
pytest.skip("skip because cannot find account ID")
assert investor.available_balance >= 0.0
assert investor.total_balance >= 0.0
## Instruction:
Fix error in the case when no ID is provided
## Code After:
# PyTest
import pytest
# lendingclub2
from lendingclub2.account import InvestorAccount
from lendingclub2.error import LCError
class TestInvestorAccount(object):
def test_properties(self):
try:
investor = InvestorAccount()
except LCError:
pytest.skip("skip because cannot find account ID")
assert investor.available_balance >= 0.0
assert investor.total_balance >= 0.0
|
# PyTest
import pytest
# lendingclub2
from lendingclub2.account import InvestorAccount
from lendingclub2.error import LCError
class TestInvestorAccount(object):
def test_properties(self):
- investor = InvestorAccount()
try:
- investor.id()
+ investor = InvestorAccount()
except LCError:
pytest.skip("skip because cannot find account ID")
assert investor.available_balance >= 0.0
assert investor.total_balance >= 0.0
|
1e5ec4a2923757ca79c8a55b056fd13872cac963
|
foyer/tests/test_xml_writer.py
|
foyer/tests/test_xml_writer.py
|
import glob
import itertools as it
import os
import parmed as pmd
from pkg_resources import resource_filename
import pytest
from foyer import Forcefield
from foyer.tests.utils import atomtype
from foyer.xml_writer import write_foyer
def test_write_xml(filename, ff_file):
structure = pmd.loadfile(filename)
forcefield = Forcefield(ff_file)
structure.write_foyer('test.xml', forcefield=forcefield)
def test_load_xml():
structure = pmd.loadfile(filename)
forcefield = Forcefield(ff_file)
structure.write_foyer('test.xml', forcefield=forcefield)
generated_ff = Forcefield('text.xml')
|
import parmed as pmd
import pytest
import os
from pkg_resources import resource_filename
from foyer import Forcefield
from foyer.xml_writer import write_foyer
OPLS_TESTFILES_DIR = resource_filename('foyer', 'opls_validation')
def test_write_xml():
top = os.path.join(OPLS_TESTFILES_DIR, 'benzene/benzene.top')
gro = os.path.join(OPLS_TESTFILES_DIR, 'benzene/benzene.gro')
structure = pmd.load_file(top, xyz=gro)
forcefield = Forcefield(name='oplsaa')
param_struc = forcefield.apply(structure)
param_struc.write_foyer('test.xml', forcefield=forcefield)
def test_load_xml():
top = os.path.join(OPLS_TESTFILES_DIR, 'benzene/benzene.top')
gro = os.path.join(OPLS_TESTFILES_DIR, 'benzene/benzene.gro')
structure = pmd.load_file(top, xyz=gro)
forcefield = Forcefield(name='oplsaa')
param_struc = forcefield.apply(structure)
param_struc.write_foyer('test.xml', forcefield=forcefield)
generated_ff = Forcefield('test.xml')
|
Update xml writer test to work
|
Update xml writer test to work
|
Python
|
mit
|
iModels/foyer,mosdef-hub/foyer,mosdef-hub/foyer,iModels/foyer
|
- import glob
- import itertools as it
+ import parmed as pmd
+ import pytest
import os
- import parmed as pmd
from pkg_resources import resource_filename
- import pytest
-
from foyer import Forcefield
- from foyer.tests.utils import atomtype
from foyer.xml_writer import write_foyer
- def test_write_xml(filename, ff_file):
- structure = pmd.loadfile(filename)
- forcefield = Forcefield(ff_file)
+ OPLS_TESTFILES_DIR = resource_filename('foyer', 'opls_validation')
+
+ def test_write_xml():
+ top = os.path.join(OPLS_TESTFILES_DIR, 'benzene/benzene.top')
+ gro = os.path.join(OPLS_TESTFILES_DIR, 'benzene/benzene.gro')
+ structure = pmd.load_file(top, xyz=gro)
+ forcefield = Forcefield(name='oplsaa')
+ param_struc = forcefield.apply(structure)
+
- structure.write_foyer('test.xml', forcefield=forcefield)
+ param_struc.write_foyer('test.xml', forcefield=forcefield)
def test_load_xml():
- structure = pmd.loadfile(filename)
+ top = os.path.join(OPLS_TESTFILES_DIR, 'benzene/benzene.top')
+ gro = os.path.join(OPLS_TESTFILES_DIR, 'benzene/benzene.gro')
+ structure = pmd.load_file(top, xyz=gro)
- forcefield = Forcefield(ff_file)
+ forcefield = Forcefield(name='oplsaa')
+ param_struc = forcefield.apply(structure)
- structure.write_foyer('test.xml', forcefield=forcefield)
+ param_struc.write_foyer('test.xml', forcefield=forcefield)
- generated_ff = Forcefield('text.xml')
+ generated_ff = Forcefield('test.xml')
|
Update xml writer test to work
|
## Code Before:
import glob
import itertools as it
import os
import parmed as pmd
from pkg_resources import resource_filename
import pytest
from foyer import Forcefield
from foyer.tests.utils import atomtype
from foyer.xml_writer import write_foyer
def test_write_xml(filename, ff_file):
structure = pmd.loadfile(filename)
forcefield = Forcefield(ff_file)
structure.write_foyer('test.xml', forcefield=forcefield)
def test_load_xml():
structure = pmd.loadfile(filename)
forcefield = Forcefield(ff_file)
structure.write_foyer('test.xml', forcefield=forcefield)
generated_ff = Forcefield('text.xml')
## Instruction:
Update xml writer test to work
## Code After:
import parmed as pmd
import pytest
import os
from pkg_resources import resource_filename
from foyer import Forcefield
from foyer.xml_writer import write_foyer
OPLS_TESTFILES_DIR = resource_filename('foyer', 'opls_validation')
def test_write_xml():
top = os.path.join(OPLS_TESTFILES_DIR, 'benzene/benzene.top')
gro = os.path.join(OPLS_TESTFILES_DIR, 'benzene/benzene.gro')
structure = pmd.load_file(top, xyz=gro)
forcefield = Forcefield(name='oplsaa')
param_struc = forcefield.apply(structure)
param_struc.write_foyer('test.xml', forcefield=forcefield)
def test_load_xml():
top = os.path.join(OPLS_TESTFILES_DIR, 'benzene/benzene.top')
gro = os.path.join(OPLS_TESTFILES_DIR, 'benzene/benzene.gro')
structure = pmd.load_file(top, xyz=gro)
forcefield = Forcefield(name='oplsaa')
param_struc = forcefield.apply(structure)
param_struc.write_foyer('test.xml', forcefield=forcefield)
generated_ff = Forcefield('test.xml')
|
- import glob
- import itertools as it
+ import parmed as pmd
+ import pytest
import os
- import parmed as pmd
from pkg_resources import resource_filename
- import pytest
-
from foyer import Forcefield
- from foyer.tests.utils import atomtype
from foyer.xml_writer import write_foyer
- def test_write_xml(filename, ff_file):
- structure = pmd.loadfile(filename)
- forcefield = Forcefield(ff_file)
+ OPLS_TESTFILES_DIR = resource_filename('foyer', 'opls_validation')
+
+ def test_write_xml():
+ top = os.path.join(OPLS_TESTFILES_DIR, 'benzene/benzene.top')
+ gro = os.path.join(OPLS_TESTFILES_DIR, 'benzene/benzene.gro')
+ structure = pmd.load_file(top, xyz=gro)
+ forcefield = Forcefield(name='oplsaa')
+ param_struc = forcefield.apply(structure)
+
- structure.write_foyer('test.xml', forcefield=forcefield)
? ----
+ param_struc.write_foyer('test.xml', forcefield=forcefield)
? +++++++
def test_load_xml():
- structure = pmd.loadfile(filename)
+ top = os.path.join(OPLS_TESTFILES_DIR, 'benzene/benzene.top')
+ gro = os.path.join(OPLS_TESTFILES_DIR, 'benzene/benzene.gro')
+ structure = pmd.load_file(top, xyz=gro)
- forcefield = Forcefield(ff_file)
? ^^^^^ ^
+ forcefield = Forcefield(name='oplsaa')
? + ^^^^^^^^ ^^^^
+ param_struc = forcefield.apply(structure)
- structure.write_foyer('test.xml', forcefield=forcefield)
? ----
+ param_struc.write_foyer('test.xml', forcefield=forcefield)
? +++++++
- generated_ff = Forcefield('text.xml')
? ^
+ generated_ff = Forcefield('test.xml')
? + ^
|
2fb0678363479c790e5a63de8b92a19de3ac2359
|
src/Camera.py
|
src/Camera.py
|
from traits.api import HasTraits, Int, Str, Tuple, Array, Range
class CameraError(Exception):
def __init__(self, msg, cam):
self.msg = msg
self.camera_number = cam
def __str__(self):
return '{0} on camera {1}'.format(self.msg, self.camera_number)
class Camera(HasTraits):
camera_number = Int(-1)
id_string = Str()
resolution = Tuple(Int(), Int())
roi = Tuple(Int(), Int(), Int(), Int())
frame_rate = Range(1, 500, 30)
frame = Array()
def __enter__(self):
self.open()
return self
def __exit__(self, *args):
self.close()
return False # don't suppress exceptions
def open(self):
raise NotImplementedError()
def close(self):
raise NotImplementedError()
def query_frame(self):
raise NotImplementedError()
def find_resolutions(self):
'''
Returns a list of resolution tuples that this camera supports.
'''
# Default: return the camera's own default resolution
return [self.resolution]
def configure(self):
"""Opens a dialog to set the camera's parameters."""
pass
|
from traits.api import HasTraits, Int, Str, Tuple, Array, Range
from traitsui.api import View, Label
class CameraError(Exception):
def __init__(self, msg, cam):
self.msg = msg
self.camera_number = cam
def __str__(self):
return '{0} on camera {1}'.format(self.msg, self.camera_number)
class Camera(HasTraits):
camera_number = Int(-1)
id_string = Str()
resolution = Tuple(Int(), Int())
roi = Tuple(Int(), Int(), Int(), Int())
frame_rate = Range(1, 500, 30)
frame = Array()
# Default configuration panel
view = View(Label('No settings to configure'))
def __enter__(self):
self.open()
return self
def __exit__(self, *args):
self.close()
return False # don't suppress exceptions
def open(self):
raise NotImplementedError()
def close(self):
raise NotImplementedError()
def query_frame(self):
raise NotImplementedError()
def find_resolutions(self):
'''
Returns a list of resolution tuples that this camera supports.
'''
# Default: return the camera's own default resolution
return [self.resolution]
def configure(self):
"""Opens a dialog to set the camera's parameters."""
pass
|
Add default view for camera
|
Add default view for camera
|
Python
|
mit
|
ptomato/Beams
|
from traits.api import HasTraits, Int, Str, Tuple, Array, Range
-
+ from traitsui.api import View, Label
class CameraError(Exception):
def __init__(self, msg, cam):
self.msg = msg
self.camera_number = cam
def __str__(self):
return '{0} on camera {1}'.format(self.msg, self.camera_number)
class Camera(HasTraits):
camera_number = Int(-1)
id_string = Str()
resolution = Tuple(Int(), Int())
roi = Tuple(Int(), Int(), Int(), Int())
frame_rate = Range(1, 500, 30)
frame = Array()
+
+ # Default configuration panel
+ view = View(Label('No settings to configure'))
def __enter__(self):
self.open()
return self
def __exit__(self, *args):
self.close()
return False # don't suppress exceptions
def open(self):
raise NotImplementedError()
def close(self):
raise NotImplementedError()
def query_frame(self):
raise NotImplementedError()
def find_resolutions(self):
'''
Returns a list of resolution tuples that this camera supports.
'''
# Default: return the camera's own default resolution
return [self.resolution]
def configure(self):
"""Opens a dialog to set the camera's parameters."""
pass
|
Add default view for camera
|
## Code Before:
from traits.api import HasTraits, Int, Str, Tuple, Array, Range
class CameraError(Exception):
def __init__(self, msg, cam):
self.msg = msg
self.camera_number = cam
def __str__(self):
return '{0} on camera {1}'.format(self.msg, self.camera_number)
class Camera(HasTraits):
camera_number = Int(-1)
id_string = Str()
resolution = Tuple(Int(), Int())
roi = Tuple(Int(), Int(), Int(), Int())
frame_rate = Range(1, 500, 30)
frame = Array()
def __enter__(self):
self.open()
return self
def __exit__(self, *args):
self.close()
return False # don't suppress exceptions
def open(self):
raise NotImplementedError()
def close(self):
raise NotImplementedError()
def query_frame(self):
raise NotImplementedError()
def find_resolutions(self):
'''
Returns a list of resolution tuples that this camera supports.
'''
# Default: return the camera's own default resolution
return [self.resolution]
def configure(self):
"""Opens a dialog to set the camera's parameters."""
pass
## Instruction:
Add default view for camera
## Code After:
from traits.api import HasTraits, Int, Str, Tuple, Array, Range
from traitsui.api import View, Label
class CameraError(Exception):
def __init__(self, msg, cam):
self.msg = msg
self.camera_number = cam
def __str__(self):
return '{0} on camera {1}'.format(self.msg, self.camera_number)
class Camera(HasTraits):
camera_number = Int(-1)
id_string = Str()
resolution = Tuple(Int(), Int())
roi = Tuple(Int(), Int(), Int(), Int())
frame_rate = Range(1, 500, 30)
frame = Array()
# Default configuration panel
view = View(Label('No settings to configure'))
def __enter__(self):
self.open()
return self
def __exit__(self, *args):
self.close()
return False # don't suppress exceptions
def open(self):
raise NotImplementedError()
def close(self):
raise NotImplementedError()
def query_frame(self):
raise NotImplementedError()
def find_resolutions(self):
'''
Returns a list of resolution tuples that this camera supports.
'''
# Default: return the camera's own default resolution
return [self.resolution]
def configure(self):
"""Opens a dialog to set the camera's parameters."""
pass
|
from traits.api import HasTraits, Int, Str, Tuple, Array, Range
-
+ from traitsui.api import View, Label
class CameraError(Exception):
def __init__(self, msg, cam):
self.msg = msg
self.camera_number = cam
def __str__(self):
return '{0} on camera {1}'.format(self.msg, self.camera_number)
class Camera(HasTraits):
camera_number = Int(-1)
id_string = Str()
resolution = Tuple(Int(), Int())
roi = Tuple(Int(), Int(), Int(), Int())
frame_rate = Range(1, 500, 30)
frame = Array()
+
+ # Default configuration panel
+ view = View(Label('No settings to configure'))
def __enter__(self):
self.open()
return self
def __exit__(self, *args):
self.close()
return False # don't suppress exceptions
def open(self):
raise NotImplementedError()
def close(self):
raise NotImplementedError()
def query_frame(self):
raise NotImplementedError()
def find_resolutions(self):
'''
Returns a list of resolution tuples that this camera supports.
'''
# Default: return the camera's own default resolution
return [self.resolution]
def configure(self):
"""Opens a dialog to set the camera's parameters."""
pass
|
4ca292e53710dd4ef481e7fa5965e22d3f94e65b
|
l10n_br_account_payment_order/models/cnab_return_move_code.py
|
l10n_br_account_payment_order/models/cnab_return_move_code.py
|
from odoo import models, api, fields
class CNABReturnMoveCode(models.Model):
"""
CNAB return code, each Bank can has a list of Codes
"""
_name = 'cnab.return.move.code'
_description = 'CNAB Return Move Code'
name = fields.Char(string='Name')
code = fields.Char(string='Code')
bank_id = fields.Many2one(
string='Bank', comodel_name='res.bank'
)
payment_method_id = fields.Many2one(
'account.payment.method', string='Payment Method'
)
# Fields used to create domain
bank_code_bc = fields.Char(
related='bank_id.code_bc',
)
payment_method_code = fields.Char(related='payment_method_id.code')
@api.multi
def name_get(self):
result = []
for record in self:
result.append((
record.id, '%s - %s' % (
record.code, record.name)
))
return result
|
from odoo import models, api, fields
class CNABReturnMoveCode(models.Model):
"""
CNAB return code, each Bank can has a list of Codes
"""
_name = 'cnab.return.move.code'
_description = 'CNAB Return Move Code'
name = fields.Char(
string='Name',
index=True,
)
code = fields.Char(
string='Code',
index=True,
)
bank_id = fields.Many2one(
string='Bank',
comodel_name='res.bank',
index=True,
)
payment_method_id = fields.Many2one(
comodel_name='account.payment.method',
string='Payment Method',
index=True,
)
# Fields used to create domain
bank_code_bc = fields.Char(
related='bank_id.code_bc',
store=True,
)
payment_method_code = fields.Char(
related='payment_method_id.code',
store=True,
)
@api.multi
def name_get(self):
result = []
for record in self:
result.append((
record.id, '%s - %s' % (
record.code, record.name)
))
return result
|
Index and code improve cnab.return.move.code
|
[REF] Index and code improve cnab.return.move.code
Signed-off-by: Luis Felipe Mileo <[email protected]>
|
Python
|
agpl-3.0
|
akretion/l10n-brazil,OCA/l10n-brazil,akretion/l10n-brazil,akretion/l10n-brazil,OCA/l10n-brazil,OCA/l10n-brazil
|
from odoo import models, api, fields
class CNABReturnMoveCode(models.Model):
"""
CNAB return code, each Bank can has a list of Codes
"""
_name = 'cnab.return.move.code'
_description = 'CNAB Return Move Code'
- name = fields.Char(string='Name')
+ name = fields.Char(
+ string='Name',
+ index=True,
+ )
- code = fields.Char(string='Code')
+ code = fields.Char(
+ string='Code',
+ index=True,
+ )
bank_id = fields.Many2one(
+ string='Bank',
- string='Bank', comodel_name='res.bank'
+ comodel_name='res.bank',
+ index=True,
)
payment_method_id = fields.Many2one(
- 'account.payment.method', string='Payment Method'
+ comodel_name='account.payment.method',
+ string='Payment Method',
+ index=True,
)
# Fields used to create domain
bank_code_bc = fields.Char(
related='bank_id.code_bc',
+ store=True,
)
- payment_method_code = fields.Char(related='payment_method_id.code')
+ payment_method_code = fields.Char(
+ related='payment_method_id.code',
+ store=True,
+ )
@api.multi
def name_get(self):
result = []
for record in self:
result.append((
record.id, '%s - %s' % (
record.code, record.name)
))
return result
|
Index and code improve cnab.return.move.code
|
## Code Before:
from odoo import models, api, fields
class CNABReturnMoveCode(models.Model):
"""
CNAB return code, each Bank can has a list of Codes
"""
_name = 'cnab.return.move.code'
_description = 'CNAB Return Move Code'
name = fields.Char(string='Name')
code = fields.Char(string='Code')
bank_id = fields.Many2one(
string='Bank', comodel_name='res.bank'
)
payment_method_id = fields.Many2one(
'account.payment.method', string='Payment Method'
)
# Fields used to create domain
bank_code_bc = fields.Char(
related='bank_id.code_bc',
)
payment_method_code = fields.Char(related='payment_method_id.code')
@api.multi
def name_get(self):
result = []
for record in self:
result.append((
record.id, '%s - %s' % (
record.code, record.name)
))
return result
## Instruction:
Index and code improve cnab.return.move.code
## Code After:
from odoo import models, api, fields
class CNABReturnMoveCode(models.Model):
"""
CNAB return code, each Bank can has a list of Codes
"""
_name = 'cnab.return.move.code'
_description = 'CNAB Return Move Code'
name = fields.Char(
string='Name',
index=True,
)
code = fields.Char(
string='Code',
index=True,
)
bank_id = fields.Many2one(
string='Bank',
comodel_name='res.bank',
index=True,
)
payment_method_id = fields.Many2one(
comodel_name='account.payment.method',
string='Payment Method',
index=True,
)
# Fields used to create domain
bank_code_bc = fields.Char(
related='bank_id.code_bc',
store=True,
)
payment_method_code = fields.Char(
related='payment_method_id.code',
store=True,
)
@api.multi
def name_get(self):
result = []
for record in self:
result.append((
record.id, '%s - %s' % (
record.code, record.name)
))
return result
|
from odoo import models, api, fields
class CNABReturnMoveCode(models.Model):
"""
CNAB return code, each Bank can has a list of Codes
"""
_name = 'cnab.return.move.code'
_description = 'CNAB Return Move Code'
- name = fields.Char(string='Name')
? --------------
+ name = fields.Char(
+ string='Name',
+ index=True,
+ )
- code = fields.Char(string='Code')
? --------------
+ code = fields.Char(
+ string='Code',
+ index=True,
+ )
bank_id = fields.Many2one(
+ string='Bank',
- string='Bank', comodel_name='res.bank'
? ---------------
+ comodel_name='res.bank',
? +
+ index=True,
)
payment_method_id = fields.Many2one(
- 'account.payment.method', string='Payment Method'
+ comodel_name='account.payment.method',
+ string='Payment Method',
+ index=True,
)
# Fields used to create domain
bank_code_bc = fields.Char(
related='bank_id.code_bc',
+ store=True,
)
- payment_method_code = fields.Char(related='payment_method_id.code')
+ payment_method_code = fields.Char(
+ related='payment_method_id.code',
+ store=True,
+ )
@api.multi
def name_get(self):
result = []
for record in self:
result.append((
record.id, '%s - %s' % (
record.code, record.name)
))
return result
|
19e0deeb65a4e66e5ab623702701d82f1994d594
|
world_population.py
|
world_population.py
|
import json
#load data onto a list
filename = 'population_data.json'
with open(filename) as f:
pop_data = json.load(f)
#print the 2010 population for each country
for pop_dict in pop_data:
if pop_dict['Year'] == '2010':
country_name = pop_dict['Country Name']
population = pop_dict['Value']
print(country_name + ": " + population)
|
import json
#load data onto a list
filename = 'population_data.json'
with open(filename) as f:
pop_data = json.load(f)
#print the 2010 population for each country
for pop_dict in pop_data:
if pop_dict['Year'] == '2010':
country_name = pop_dict['Country Name']
population = int(float(pop_dict['Value']))
print(country_name + ": " + str(population))
|
Convert Strings into Numerical Values
|
Convert Strings into Numerical Values
|
Python
|
mit
|
4bic-attic/data_viz
|
import json
#load data onto a list
filename = 'population_data.json'
with open(filename) as f:
pop_data = json.load(f)
#print the 2010 population for each country
for pop_dict in pop_data:
if pop_dict['Year'] == '2010':
country_name = pop_dict['Country Name']
- population = pop_dict['Value']
+ population = int(float(pop_dict['Value']))
- print(country_name + ": " + population)
+ print(country_name + ": " + str(population))
|
Convert Strings into Numerical Values
|
## Code Before:
import json
#load data onto a list
filename = 'population_data.json'
with open(filename) as f:
pop_data = json.load(f)
#print the 2010 population for each country
for pop_dict in pop_data:
if pop_dict['Year'] == '2010':
country_name = pop_dict['Country Name']
population = pop_dict['Value']
print(country_name + ": " + population)
## Instruction:
Convert Strings into Numerical Values
## Code After:
import json
#load data onto a list
filename = 'population_data.json'
with open(filename) as f:
pop_data = json.load(f)
#print the 2010 population for each country
for pop_dict in pop_data:
if pop_dict['Year'] == '2010':
country_name = pop_dict['Country Name']
population = int(float(pop_dict['Value']))
print(country_name + ": " + str(population))
|
import json
#load data onto a list
filename = 'population_data.json'
with open(filename) as f:
pop_data = json.load(f)
#print the 2010 population for each country
for pop_dict in pop_data:
if pop_dict['Year'] == '2010':
country_name = pop_dict['Country Name']
- population = pop_dict['Value']
+ population = int(float(pop_dict['Value']))
? ++++++++++ ++
- print(country_name + ": " + population)
+ print(country_name + ": " + str(population))
? ++++ +
|
2a3fe3b5e08c91ab8d77569b02b36da63909f619
|
pysnmp/hlapi/v1arch/asyncore/sync/__init__.py
|
pysnmp/hlapi/v1arch/asyncore/sync/__init__.py
|
from pysnmp.proto.rfc1902 import *
from pysnmp.smi.rfc1902 import *
from pysnmp.hlapi.v1arch.auth import *
from pysnmp.hlapi.v1arch.asyncore.transport import *
from pysnmp.hlapi.v1arch.asyncore.cmdgen import *
from pysnmp.hlapi.v1arch.asyncore.ntforg import *
from pysnmp.hlapi.v1arch.asyncore.dispatch import *
try:
from pysnmp.hlapi.v1arch.asyncore.sync.cmdgen import *
from pysnmp.hlapi.v1arch.asyncore.sync.ntforg import *
except SyntaxError:
from pysnmp.hlapi.v1arch.asyncore.sync.compat.cmdgen import *
from pysnmp.hlapi.v1arch.asyncore.sync.compat.ntforg import *
|
from pysnmp.proto.rfc1902 import *
from pysnmp.smi.rfc1902 import *
from pysnmp.hlapi.v1arch.auth import *
from pysnmp.hlapi.v1arch.asyncore.transport import *
from pysnmp.hlapi.v1arch.asyncore.cmdgen import *
from pysnmp.hlapi.v1arch.asyncore.dispatch import *
from pysnmp.hlapi.v1arch.asyncore.ntforg import *
from pysnmp.hlapi.v1arch.asyncore.sync.cmdgen import *
from pysnmp.hlapi.v1arch.asyncore.sync.ntforg import *
|
Remove the remnants of hlapi.v1arch.asyncore.sync.compat
|
Remove the remnants of hlapi.v1arch.asyncore.sync.compat
|
Python
|
bsd-2-clause
|
etingof/pysnmp,etingof/pysnmp
|
from pysnmp.proto.rfc1902 import *
from pysnmp.smi.rfc1902 import *
from pysnmp.hlapi.v1arch.auth import *
from pysnmp.hlapi.v1arch.asyncore.transport import *
from pysnmp.hlapi.v1arch.asyncore.cmdgen import *
+ from pysnmp.hlapi.v1arch.asyncore.dispatch import *
from pysnmp.hlapi.v1arch.asyncore.ntforg import *
+ from pysnmp.hlapi.v1arch.asyncore.sync.cmdgen import *
- from pysnmp.hlapi.v1arch.asyncore.dispatch import *
+ from pysnmp.hlapi.v1arch.asyncore.sync.ntforg import *
- try:
- from pysnmp.hlapi.v1arch.asyncore.sync.cmdgen import *
- from pysnmp.hlapi.v1arch.asyncore.sync.ntforg import *
-
- except SyntaxError:
- from pysnmp.hlapi.v1arch.asyncore.sync.compat.cmdgen import *
- from pysnmp.hlapi.v1arch.asyncore.sync.compat.ntforg import *
-
|
Remove the remnants of hlapi.v1arch.asyncore.sync.compat
|
## Code Before:
from pysnmp.proto.rfc1902 import *
from pysnmp.smi.rfc1902 import *
from pysnmp.hlapi.v1arch.auth import *
from pysnmp.hlapi.v1arch.asyncore.transport import *
from pysnmp.hlapi.v1arch.asyncore.cmdgen import *
from pysnmp.hlapi.v1arch.asyncore.ntforg import *
from pysnmp.hlapi.v1arch.asyncore.dispatch import *
try:
from pysnmp.hlapi.v1arch.asyncore.sync.cmdgen import *
from pysnmp.hlapi.v1arch.asyncore.sync.ntforg import *
except SyntaxError:
from pysnmp.hlapi.v1arch.asyncore.sync.compat.cmdgen import *
from pysnmp.hlapi.v1arch.asyncore.sync.compat.ntforg import *
## Instruction:
Remove the remnants of hlapi.v1arch.asyncore.sync.compat
## Code After:
from pysnmp.proto.rfc1902 import *
from pysnmp.smi.rfc1902 import *
from pysnmp.hlapi.v1arch.auth import *
from pysnmp.hlapi.v1arch.asyncore.transport import *
from pysnmp.hlapi.v1arch.asyncore.cmdgen import *
from pysnmp.hlapi.v1arch.asyncore.dispatch import *
from pysnmp.hlapi.v1arch.asyncore.ntforg import *
from pysnmp.hlapi.v1arch.asyncore.sync.cmdgen import *
from pysnmp.hlapi.v1arch.asyncore.sync.ntforg import *
|
from pysnmp.proto.rfc1902 import *
from pysnmp.smi.rfc1902 import *
from pysnmp.hlapi.v1arch.auth import *
from pysnmp.hlapi.v1arch.asyncore.transport import *
from pysnmp.hlapi.v1arch.asyncore.cmdgen import *
+ from pysnmp.hlapi.v1arch.asyncore.dispatch import *
from pysnmp.hlapi.v1arch.asyncore.ntforg import *
- from pysnmp.hlapi.v1arch.asyncore.dispatch import *
-
- try:
- from pysnmp.hlapi.v1arch.asyncore.sync.cmdgen import *
? ----
+ from pysnmp.hlapi.v1arch.asyncore.sync.cmdgen import *
- from pysnmp.hlapi.v1arch.asyncore.sync.ntforg import *
? ----
+ from pysnmp.hlapi.v1arch.asyncore.sync.ntforg import *
-
- except SyntaxError:
- from pysnmp.hlapi.v1arch.asyncore.sync.compat.cmdgen import *
- from pysnmp.hlapi.v1arch.asyncore.sync.compat.ntforg import *
|
305969cedb966d1e5cd340d531727bb984ac35a8
|
whitenoise/generators/sqlalchemy.py
|
whitenoise/generators/sqlalchemy.py
|
import random
from whitenoise.generators import BaseGenerator
class SelectGenerator(BaseGenerator):
'''
Creates a value by selecting from another SQLAlchemy table
Depends on SQLAlchemy, and receiving a session object from the Fixture runner
the SQLAlchemy fixture runner handles this for us
Receives the name of another class to lookup. If the
query returns more than one option, either random or the 1st is selected
(default is random)
'''
def __init__(self, model, random=True, *args, **kwargs):
super().__init__(*args, **kwargs)
self.session = None
self.model = model
self.random = random
def generate(self):
if(self.session is None):
raise ValueError('You must set the session property before using this generator')
_query = self.session.query(self.model).all()
if self.random:
return random.SystemRandom().choice(_query)
else:
return _query[0]
|
import random
from whitenoise.generators import BaseGenerator
class SelectGenerator(BaseGenerator):
'''
Creates a value by selecting from another SQLAlchemy table
Depends on SQLAlchemy, and receiving a session object from the Fixture runner
the SQLAlchemy fixture runner handles this for us
Receives the name of another class to lookup. If the
query returns more than one option, either random or the 1st is selected
(default is random)
'''
def __init__(self, model, random=True, *args, **kwargs):
super().__init__(*args, **kwargs)
self.session = None
self.model = model
self.random = random
def generate(self):
if(self.session is None):
raise ValueError('You must set the session property before using this generator')
_query = self.session.query(self.model).all()
if self.random:
return random.SystemRandom().choice(_query)
else:
return _query[0]
class LinkGenerator(BaseGenerator):
'''
Creates a list for secondary relationships using link tables by selecting from another SQLAlchemy table
Depends on SQLAlchemy, and receiving a session object from the Fixture runner
the SQLAlchemy fixture runner handles this for us
Receives the name of another class to lookup. If the
query returns more than one option, either random or the 1st is selected
(default is random)
'''
def __init__(self, model, max_map, random=True, *args, **kwargs):
super().__init__(*args, **kwargs)
self.session = None
self.model = model
self.random = random
self.max_map = max_map
def generate(self):
if(self.session is None):
raise ValueError('You must set the session property before using this generator')
_query = self.session.query(self.model).all()
if self.random:
return random.SystemRandom().sample(_query,random.randint(1, max_map))
else:
return [_query[0]]
|
Add a generator for association tables
|
Add a generator for association tables
|
Python
|
mit
|
James1345/white-noise
|
import random
from whitenoise.generators import BaseGenerator
class SelectGenerator(BaseGenerator):
'''
Creates a value by selecting from another SQLAlchemy table
Depends on SQLAlchemy, and receiving a session object from the Fixture runner
the SQLAlchemy fixture runner handles this for us
Receives the name of another class to lookup. If the
query returns more than one option, either random or the 1st is selected
(default is random)
'''
def __init__(self, model, random=True, *args, **kwargs):
super().__init__(*args, **kwargs)
self.session = None
self.model = model
self.random = random
def generate(self):
if(self.session is None):
raise ValueError('You must set the session property before using this generator')
_query = self.session.query(self.model).all()
if self.random:
return random.SystemRandom().choice(_query)
else:
return _query[0]
+ class LinkGenerator(BaseGenerator):
+ '''
+ Creates a list for secondary relationships using link tables by selecting from another SQLAlchemy table
+ Depends on SQLAlchemy, and receiving a session object from the Fixture runner
+ the SQLAlchemy fixture runner handles this for us
+ Receives the name of another class to lookup. If the
+ query returns more than one option, either random or the 1st is selected
+ (default is random)
+ '''
+ def __init__(self, model, max_map, random=True, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+ self.session = None
+ self.model = model
+ self.random = random
+ self.max_map = max_map
+
+ def generate(self):
+ if(self.session is None):
+ raise ValueError('You must set the session property before using this generator')
+ _query = self.session.query(self.model).all()
+ if self.random:
+ return random.SystemRandom().sample(_query,random.randint(1, max_map))
+ else:
+ return [_query[0]]
|
Add a generator for association tables
|
## Code Before:
import random
from whitenoise.generators import BaseGenerator
class SelectGenerator(BaseGenerator):
'''
Creates a value by selecting from another SQLAlchemy table
Depends on SQLAlchemy, and receiving a session object from the Fixture runner
the SQLAlchemy fixture runner handles this for us
Receives the name of another class to lookup. If the
query returns more than one option, either random or the 1st is selected
(default is random)
'''
def __init__(self, model, random=True, *args, **kwargs):
super().__init__(*args, **kwargs)
self.session = None
self.model = model
self.random = random
def generate(self):
if(self.session is None):
raise ValueError('You must set the session property before using this generator')
_query = self.session.query(self.model).all()
if self.random:
return random.SystemRandom().choice(_query)
else:
return _query[0]
## Instruction:
Add a generator for association tables
## Code After:
import random
from whitenoise.generators import BaseGenerator
class SelectGenerator(BaseGenerator):
'''
Creates a value by selecting from another SQLAlchemy table
Depends on SQLAlchemy, and receiving a session object from the Fixture runner
the SQLAlchemy fixture runner handles this for us
Receives the name of another class to lookup. If the
query returns more than one option, either random or the 1st is selected
(default is random)
'''
def __init__(self, model, random=True, *args, **kwargs):
super().__init__(*args, **kwargs)
self.session = None
self.model = model
self.random = random
def generate(self):
if(self.session is None):
raise ValueError('You must set the session property before using this generator')
_query = self.session.query(self.model).all()
if self.random:
return random.SystemRandom().choice(_query)
else:
return _query[0]
class LinkGenerator(BaseGenerator):
'''
Creates a list for secondary relationships using link tables by selecting from another SQLAlchemy table
Depends on SQLAlchemy, and receiving a session object from the Fixture runner
the SQLAlchemy fixture runner handles this for us
Receives the name of another class to lookup. If the
query returns more than one option, either random or the 1st is selected
(default is random)
'''
def __init__(self, model, max_map, random=True, *args, **kwargs):
super().__init__(*args, **kwargs)
self.session = None
self.model = model
self.random = random
self.max_map = max_map
def generate(self):
if(self.session is None):
raise ValueError('You must set the session property before using this generator')
_query = self.session.query(self.model).all()
if self.random:
return random.SystemRandom().sample(_query,random.randint(1, max_map))
else:
return [_query[0]]
|
import random
from whitenoise.generators import BaseGenerator
class SelectGenerator(BaseGenerator):
'''
Creates a value by selecting from another SQLAlchemy table
Depends on SQLAlchemy, and receiving a session object from the Fixture runner
the SQLAlchemy fixture runner handles this for us
Receives the name of another class to lookup. If the
query returns more than one option, either random or the 1st is selected
(default is random)
'''
def __init__(self, model, random=True, *args, **kwargs):
super().__init__(*args, **kwargs)
self.session = None
self.model = model
self.random = random
def generate(self):
if(self.session is None):
raise ValueError('You must set the session property before using this generator')
_query = self.session.query(self.model).all()
if self.random:
return random.SystemRandom().choice(_query)
else:
return _query[0]
+
+ class LinkGenerator(BaseGenerator):
+ '''
+ Creates a list for secondary relationships using link tables by selecting from another SQLAlchemy table
+ Depends on SQLAlchemy, and receiving a session object from the Fixture runner
+ the SQLAlchemy fixture runner handles this for us
+ Receives the name of another class to lookup. If the
+ query returns more than one option, either random or the 1st is selected
+ (default is random)
+ '''
+ def __init__(self, model, max_map, random=True, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+ self.session = None
+ self.model = model
+ self.random = random
+ self.max_map = max_map
+
+ def generate(self):
+ if(self.session is None):
+ raise ValueError('You must set the session property before using this generator')
+ _query = self.session.query(self.model).all()
+ if self.random:
+ return random.SystemRandom().sample(_query,random.randint(1, max_map))
+ else:
+ return [_query[0]]
|
c34817c2740e860493692b630a11fdb7acab76aa
|
tests/test_simple_features.py
|
tests/test_simple_features.py
|
from wordgraph.points import Point
import wordgraph
EPOCH_START = 1407109280
def time_values(values, start=EPOCH_START, increment=1):
datapoints = []
for index, value in enumerate(values):
datapoints.append(Point(x=value, y=start + (increment * index)))
return datapoints
def test_monotonic_up_per_second():
datapoints = time_values(float(i) for i in range(POINTS))
features = wordgraph.describe(datapoints)
assert "" in features
|
from wordgraph.points import Point
import wordgraph
EPOCH_START = 1407109280
def time_values(values, start=EPOCH_START, increment=1):
datapoints = []
for index, value in enumerate(values):
datapoints.append(Point(x=value, y=start + (increment * index)))
return datapoints
def test_monotonic_up_per_second():
datapoints = time_values(float(i) for i in range(10))
features = wordgraph.describe(datapoints)
assert "" in features
def test_monotonic_down_per_second():
datapoints = time_values(10.0 - i for i in range(10))
features = wordgraph.describe(datapoints)
assert "" in features
|
Test case for monotonically decreasing graphs
|
Test case for monotonically decreasing graphs
Generate time series data for values that decrease monotonically over
time.
|
Python
|
apache-2.0
|
tleeuwenburg/wordgraph,tleeuwenburg/wordgraph
|
from wordgraph.points import Point
import wordgraph
EPOCH_START = 1407109280
def time_values(values, start=EPOCH_START, increment=1):
datapoints = []
for index, value in enumerate(values):
datapoints.append(Point(x=value, y=start + (increment * index)))
return datapoints
def test_monotonic_up_per_second():
- datapoints = time_values(float(i) for i in range(POINTS))
+ datapoints = time_values(float(i) for i in range(10))
features = wordgraph.describe(datapoints)
assert "" in features
+ def test_monotonic_down_per_second():
+ datapoints = time_values(10.0 - i for i in range(10))
+ features = wordgraph.describe(datapoints)
+ assert "" in features
+
|
Test case for monotonically decreasing graphs
|
## Code Before:
from wordgraph.points import Point
import wordgraph
EPOCH_START = 1407109280
def time_values(values, start=EPOCH_START, increment=1):
datapoints = []
for index, value in enumerate(values):
datapoints.append(Point(x=value, y=start + (increment * index)))
return datapoints
def test_monotonic_up_per_second():
datapoints = time_values(float(i) for i in range(POINTS))
features = wordgraph.describe(datapoints)
assert "" in features
## Instruction:
Test case for monotonically decreasing graphs
## Code After:
from wordgraph.points import Point
import wordgraph
EPOCH_START = 1407109280
def time_values(values, start=EPOCH_START, increment=1):
datapoints = []
for index, value in enumerate(values):
datapoints.append(Point(x=value, y=start + (increment * index)))
return datapoints
def test_monotonic_up_per_second():
datapoints = time_values(float(i) for i in range(10))
features = wordgraph.describe(datapoints)
assert "" in features
def test_monotonic_down_per_second():
datapoints = time_values(10.0 - i for i in range(10))
features = wordgraph.describe(datapoints)
assert "" in features
|
from wordgraph.points import Point
import wordgraph
EPOCH_START = 1407109280
def time_values(values, start=EPOCH_START, increment=1):
datapoints = []
for index, value in enumerate(values):
datapoints.append(Point(x=value, y=start + (increment * index)))
return datapoints
def test_monotonic_up_per_second():
- datapoints = time_values(float(i) for i in range(POINTS))
? ^^^^^^
+ datapoints = time_values(float(i) for i in range(10))
? ^^
features = wordgraph.describe(datapoints)
assert "" in features
+
+ def test_monotonic_down_per_second():
+ datapoints = time_values(10.0 - i for i in range(10))
+ features = wordgraph.describe(datapoints)
+ assert "" in features
|
dac4ef0e30fb5dd26ef41eb74854919cf5295450
|
subprocrunner/error.py
|
subprocrunner/error.py
|
from __future__ import absolute_import, unicode_literals
class CommandError(Exception):
@property
def errno(self):
return self.__errno
def __init__(self, *args, **kwargs):
self.__errno = kwargs.pop("errno", None)
super(CommandError, self).__init__(*args, **kwargs)
class InvalidCommandError(CommandError):
# Deprecate in the future
pass
class CommandNotFoundError(CommandError):
# Deprecate in the future
pass
|
from __future__ import absolute_import, unicode_literals
class CommandError(Exception):
@property
def cmd(self):
return self.__cmd
@property
def errno(self):
return self.__errno
def __init__(self, *args, **kwargs):
self.__cmd = kwargs.pop("cmd", None)
self.__errno = kwargs.pop("errno", None)
super(CommandError, self).__init__(*args, **kwargs)
class InvalidCommandError(CommandError):
# Deprecate in the future
pass
class CommandNotFoundError(CommandError):
# Deprecate in the future
pass
|
Add a property to an exception class
|
Add a property to an exception class
|
Python
|
mit
|
thombashi/subprocrunner,thombashi/subprocrunner
|
from __future__ import absolute_import, unicode_literals
class CommandError(Exception):
@property
+ def cmd(self):
+ return self.__cmd
+
+ @property
def errno(self):
return self.__errno
def __init__(self, *args, **kwargs):
+ self.__cmd = kwargs.pop("cmd", None)
self.__errno = kwargs.pop("errno", None)
super(CommandError, self).__init__(*args, **kwargs)
class InvalidCommandError(CommandError):
# Deprecate in the future
pass
class CommandNotFoundError(CommandError):
# Deprecate in the future
pass
|
Add a property to an exception class
|
## Code Before:
from __future__ import absolute_import, unicode_literals
class CommandError(Exception):
@property
def errno(self):
return self.__errno
def __init__(self, *args, **kwargs):
self.__errno = kwargs.pop("errno", None)
super(CommandError, self).__init__(*args, **kwargs)
class InvalidCommandError(CommandError):
# Deprecate in the future
pass
class CommandNotFoundError(CommandError):
# Deprecate in the future
pass
## Instruction:
Add a property to an exception class
## Code After:
from __future__ import absolute_import, unicode_literals
class CommandError(Exception):
@property
def cmd(self):
return self.__cmd
@property
def errno(self):
return self.__errno
def __init__(self, *args, **kwargs):
self.__cmd = kwargs.pop("cmd", None)
self.__errno = kwargs.pop("errno", None)
super(CommandError, self).__init__(*args, **kwargs)
class InvalidCommandError(CommandError):
# Deprecate in the future
pass
class CommandNotFoundError(CommandError):
# Deprecate in the future
pass
|
from __future__ import absolute_import, unicode_literals
class CommandError(Exception):
@property
+ def cmd(self):
+ return self.__cmd
+
+ @property
def errno(self):
return self.__errno
def __init__(self, *args, **kwargs):
+ self.__cmd = kwargs.pop("cmd", None)
self.__errno = kwargs.pop("errno", None)
super(CommandError, self).__init__(*args, **kwargs)
class InvalidCommandError(CommandError):
# Deprecate in the future
pass
class CommandNotFoundError(CommandError):
# Deprecate in the future
pass
|
839d884d3dca3e799a235b1d2d69acf998f520f9
|
barsystem_base/management/commands/import_people.py
|
barsystem_base/management/commands/import_people.py
|
from django.core.management.base import BaseCommand, CommandError
from barsystem_base.models import Person
class Command(BaseCommand):
args = '<filename>'
help = 'Import list of people'
csv_columns = 'id,first_name,last_name,nick_name,amount,type,token'.split(',')
def handle(self, *args, **kwargs):
if len(args) == 0:
raise CommandError('Please supply filename')
with open(args[0], 'r') as f:
columns = None
for line in [line.strip().split(',') for line in f.readlines() if line[0] != '#']:
# print(line)
# take header
if columns is None:
columns = line
continue
values = dict(zip(columns, line))
values['active'] = values['type'] != 'hidden'
try:
p = Person.objects.get(id=values['id'])
except Person.DoesNotExist:
p = Person()
for key, val in values.items():
if hasattr(p, key):
setattr(p, key, val)
print(p)
p.save()
print('Done')
|
from django.core.management.base import BaseCommand, CommandError
from barsystem_base.models import Person, Token
class Command(BaseCommand):
args = '<filename>'
help = 'Import list of people'
csv_columns = 'id,first_name,last_name,nick_name,amount,type,token'.split(',')
def handle(self, *args, **kwargs):
if len(args) == 0:
raise CommandError('Please supply filename')
with open(args[0], 'r') as f:
columns = None
for line in [line.strip().split(',') for line in f.readlines() if line[0] != '#']:
# print(line)
# take header
if columns is None:
columns = line
continue
values = dict(zip(columns, line))
values['active'] = values['type'] != 'hidden'
try:
p = Person.objects.get(id=values['id'])
except Person.DoesNotExist:
p = Person()
for key, val in values.items():
if hasattr(p, key):
setattr(p, key, val)
print(p)
p.save()
t = Token()
t.type = 'ibutton'
t.value = values['token']
t.person = p
t.save()
print('Done')
|
Add ibutton when importing old people
|
Add ibutton when importing old people
|
Python
|
mit
|
TkkrLab/barsystem,TkkrLab/barsystem,TkkrLab/barsystem
|
from django.core.management.base import BaseCommand, CommandError
- from barsystem_base.models import Person
+ from barsystem_base.models import Person, Token
class Command(BaseCommand):
args = '<filename>'
help = 'Import list of people'
csv_columns = 'id,first_name,last_name,nick_name,amount,type,token'.split(',')
def handle(self, *args, **kwargs):
if len(args) == 0:
raise CommandError('Please supply filename')
with open(args[0], 'r') as f:
columns = None
for line in [line.strip().split(',') for line in f.readlines() if line[0] != '#']:
# print(line)
# take header
if columns is None:
columns = line
continue
values = dict(zip(columns, line))
values['active'] = values['type'] != 'hidden'
try:
p = Person.objects.get(id=values['id'])
except Person.DoesNotExist:
p = Person()
for key, val in values.items():
if hasattr(p, key):
setattr(p, key, val)
print(p)
p.save()
+ t = Token()
+ t.type = 'ibutton'
+ t.value = values['token']
+ t.person = p
+ t.save()
print('Done')
|
Add ibutton when importing old people
|
## Code Before:
from django.core.management.base import BaseCommand, CommandError
from barsystem_base.models import Person
class Command(BaseCommand):
args = '<filename>'
help = 'Import list of people'
csv_columns = 'id,first_name,last_name,nick_name,amount,type,token'.split(',')
def handle(self, *args, **kwargs):
if len(args) == 0:
raise CommandError('Please supply filename')
with open(args[0], 'r') as f:
columns = None
for line in [line.strip().split(',') for line in f.readlines() if line[0] != '#']:
# print(line)
# take header
if columns is None:
columns = line
continue
values = dict(zip(columns, line))
values['active'] = values['type'] != 'hidden'
try:
p = Person.objects.get(id=values['id'])
except Person.DoesNotExist:
p = Person()
for key, val in values.items():
if hasattr(p, key):
setattr(p, key, val)
print(p)
p.save()
print('Done')
## Instruction:
Add ibutton when importing old people
## Code After:
from django.core.management.base import BaseCommand, CommandError
from barsystem_base.models import Person, Token
class Command(BaseCommand):
args = '<filename>'
help = 'Import list of people'
csv_columns = 'id,first_name,last_name,nick_name,amount,type,token'.split(',')
def handle(self, *args, **kwargs):
if len(args) == 0:
raise CommandError('Please supply filename')
with open(args[0], 'r') as f:
columns = None
for line in [line.strip().split(',') for line in f.readlines() if line[0] != '#']:
# print(line)
# take header
if columns is None:
columns = line
continue
values = dict(zip(columns, line))
values['active'] = values['type'] != 'hidden'
try:
p = Person.objects.get(id=values['id'])
except Person.DoesNotExist:
p = Person()
for key, val in values.items():
if hasattr(p, key):
setattr(p, key, val)
print(p)
p.save()
t = Token()
t.type = 'ibutton'
t.value = values['token']
t.person = p
t.save()
print('Done')
|
from django.core.management.base import BaseCommand, CommandError
- from barsystem_base.models import Person
+ from barsystem_base.models import Person, Token
? +++++++
class Command(BaseCommand):
args = '<filename>'
help = 'Import list of people'
csv_columns = 'id,first_name,last_name,nick_name,amount,type,token'.split(',')
def handle(self, *args, **kwargs):
if len(args) == 0:
raise CommandError('Please supply filename')
with open(args[0], 'r') as f:
columns = None
for line in [line.strip().split(',') for line in f.readlines() if line[0] != '#']:
# print(line)
# take header
if columns is None:
columns = line
continue
values = dict(zip(columns, line))
values['active'] = values['type'] != 'hidden'
try:
p = Person.objects.get(id=values['id'])
except Person.DoesNotExist:
p = Person()
for key, val in values.items():
if hasattr(p, key):
setattr(p, key, val)
print(p)
p.save()
+ t = Token()
+ t.type = 'ibutton'
+ t.value = values['token']
+ t.person = p
+ t.save()
print('Done')
|
01a71f10f94d9e7b7c90d19540df8015455ae2ad
|
commands/say.py
|
commands/say.py
|
from CommandTemplate import CommandTemplate
class Command(CommandTemplate):
triggers = ['say', 'do']
helptext = "Makes the bot say the provided text in the provided channel (format 'say [channel/user] text')"
adminOnly = True
showInCommandList = False
def execute(self, bot, user, target, triggerInMsg, msg, msgWithoutFirstWord, msgParts, msgPartsLength):
if msgPartsLength < 3:
bot.say(target, "Please provide both a channel or user name to say something to, and the text to say")
#Check if we're in the channel we have to say something to
elif msgParts[1].startswith('#') and msgParts[1] not in bot.channelsUserList:
bot.say(target, "I'm not in that channel, so I can't say anything in there, sorry.")
#Nothing's stopping us now! Say it!
else:
messageToSay = " ".join(msgParts[2:])
if triggerInMsg == 'say':
bot.say(msgParts[1], messageToSay)
elif triggerInMsg == 'do':
bot.doAction(msgParts[1], messageToSay)
|
from CommandTemplate import CommandTemplate
class Command(CommandTemplate):
triggers = ['say', 'do', 'notice']
helptext = "Makes the bot say the provided text in the provided channel (format 'say [channel/user] text')"
adminOnly = True
showInCommandList = False
def execute(self, bot, user, target, triggerInMsg, msg, msgWithoutFirstWord, msgParts, msgPartsLength):
if msgPartsLength < 3:
bot.say(target, "Please provide both a channel or user name to say something to, and the text to say")
#Check if we're in the channel we have to say something to
elif msgParts[1].startswith('#') and msgParts[1] not in bot.channelsUserList:
bot.say(target, "I'm not in that channel, so I can't say anything in there, sorry.")
#Nothing's stopping us now! Say it!
else:
messageToSay = " ".join(msgParts[2:])
messageType = 'say'
if triggerInMsg == 'do':
messageType = 'action'
elif triggerInMsg == 'notice':
messageType = 'notice'
bot.sendMessage(msgParts[1], messageToSay, messageType)
|
Move to sendMessage command for message sending. Also add 'notice' trigger, because why not
|
Move to sendMessage command for message sending. Also add 'notice' trigger, because why not
|
Python
|
mit
|
Didero/DideRobot
|
from CommandTemplate import CommandTemplate
class Command(CommandTemplate):
- triggers = ['say', 'do']
+ triggers = ['say', 'do', 'notice']
helptext = "Makes the bot say the provided text in the provided channel (format 'say [channel/user] text')"
adminOnly = True
showInCommandList = False
def execute(self, bot, user, target, triggerInMsg, msg, msgWithoutFirstWord, msgParts, msgPartsLength):
if msgPartsLength < 3:
bot.say(target, "Please provide both a channel or user name to say something to, and the text to say")
#Check if we're in the channel we have to say something to
elif msgParts[1].startswith('#') and msgParts[1] not in bot.channelsUserList:
bot.say(target, "I'm not in that channel, so I can't say anything in there, sorry.")
#Nothing's stopping us now! Say it!
else:
messageToSay = " ".join(msgParts[2:])
+ messageType = 'say'
- if triggerInMsg == 'say':
- bot.say(msgParts[1], messageToSay)
- elif triggerInMsg == 'do':
+ if triggerInMsg == 'do':
- bot.doAction(msgParts[1], messageToSay)
+ messageType = 'action'
+ elif triggerInMsg == 'notice':
+ messageType = 'notice'
+ bot.sendMessage(msgParts[1], messageToSay, messageType)
+
|
Move to sendMessage command for message sending. Also add 'notice' trigger, because why not
|
## Code Before:
from CommandTemplate import CommandTemplate
class Command(CommandTemplate):
triggers = ['say', 'do']
helptext = "Makes the bot say the provided text in the provided channel (format 'say [channel/user] text')"
adminOnly = True
showInCommandList = False
def execute(self, bot, user, target, triggerInMsg, msg, msgWithoutFirstWord, msgParts, msgPartsLength):
if msgPartsLength < 3:
bot.say(target, "Please provide both a channel or user name to say something to, and the text to say")
#Check if we're in the channel we have to say something to
elif msgParts[1].startswith('#') and msgParts[1] not in bot.channelsUserList:
bot.say(target, "I'm not in that channel, so I can't say anything in there, sorry.")
#Nothing's stopping us now! Say it!
else:
messageToSay = " ".join(msgParts[2:])
if triggerInMsg == 'say':
bot.say(msgParts[1], messageToSay)
elif triggerInMsg == 'do':
bot.doAction(msgParts[1], messageToSay)
## Instruction:
Move to sendMessage command for message sending. Also add 'notice' trigger, because why not
## Code After:
from CommandTemplate import CommandTemplate
class Command(CommandTemplate):
triggers = ['say', 'do', 'notice']
helptext = "Makes the bot say the provided text in the provided channel (format 'say [channel/user] text')"
adminOnly = True
showInCommandList = False
def execute(self, bot, user, target, triggerInMsg, msg, msgWithoutFirstWord, msgParts, msgPartsLength):
if msgPartsLength < 3:
bot.say(target, "Please provide both a channel or user name to say something to, and the text to say")
#Check if we're in the channel we have to say something to
elif msgParts[1].startswith('#') and msgParts[1] not in bot.channelsUserList:
bot.say(target, "I'm not in that channel, so I can't say anything in there, sorry.")
#Nothing's stopping us now! Say it!
else:
messageToSay = " ".join(msgParts[2:])
messageType = 'say'
if triggerInMsg == 'do':
messageType = 'action'
elif triggerInMsg == 'notice':
messageType = 'notice'
bot.sendMessage(msgParts[1], messageToSay, messageType)
|
from CommandTemplate import CommandTemplate
class Command(CommandTemplate):
- triggers = ['say', 'do']
+ triggers = ['say', 'do', 'notice']
? ++++++++++
helptext = "Makes the bot say the provided text in the provided channel (format 'say [channel/user] text')"
adminOnly = True
showInCommandList = False
def execute(self, bot, user, target, triggerInMsg, msg, msgWithoutFirstWord, msgParts, msgPartsLength):
if msgPartsLength < 3:
bot.say(target, "Please provide both a channel or user name to say something to, and the text to say")
#Check if we're in the channel we have to say something to
elif msgParts[1].startswith('#') and msgParts[1] not in bot.channelsUserList:
bot.say(target, "I'm not in that channel, so I can't say anything in there, sorry.")
#Nothing's stopping us now! Say it!
else:
messageToSay = " ".join(msgParts[2:])
+ messageType = 'say'
- if triggerInMsg == 'say':
- bot.say(msgParts[1], messageToSay)
- elif triggerInMsg == 'do':
? --
+ if triggerInMsg == 'do':
- bot.doAction(msgParts[1], messageToSay)
+ messageType = 'action'
+ elif triggerInMsg == 'notice':
+ messageType = 'notice'
+
+ bot.sendMessage(msgParts[1], messageToSay, messageType)
|
1e5a956eb289b8333ecf3c3cc00f51295f37870a
|
api_tests/institutions/views/test_institution_users_list.py
|
api_tests/institutions/views/test_institution_users_list.py
|
from nose.tools import * # flake8: noqa
from tests.base import ApiTestCase
from osf_tests.factories import InstitutionFactory, UserFactory
from api.base.settings.defaults import API_BASE
class TestInstitutionUsersList(ApiTestCase):
def setUp(self):
super(TestInstitutionUsersList, self).setUp()
self.institution = InstitutionFactory()
self.user1 = UserFactory()
self.user1.affiliated_institutions.add(self.institution)
self.user1.save()
self.user2 = UserFactory()
self.user2.affiliated_institutions.add(self.institution)
self.user2.save()
self.institution_user_url = '/{0}institutions/{1}/users/'.format(API_BASE, self.institution._id)
def test_return_all_users(self):
res = self.app.get(self.institution_user_url)
assert_equal(res.status_code, 200)
ids = [each['id'] for each in res.json['data']]
assert_equal(len(res.json['data']), 2)
assert_in(self.user1._id, ids)
assert_in(self.user2._id, ids)
|
import pytest
from api.base.settings.defaults import API_BASE
from osf_tests.factories import (
InstitutionFactory,
UserFactory,
)
@pytest.mark.django_db
class TestInstitutionUsersList:
@pytest.fixture()
def institution(self):
return InstitutionFactory()
@pytest.fixture()
def user_one(self, institution):
user_one = UserFactory()
user_one.affiliated_institutions.add(institution)
user_one.save()
return user_one
@pytest.fixture()
def user_two(self, institution):
user_two = UserFactory()
user_two.affiliated_institutions.add(institution)
user_two.save()
return user_two
@pytest.fixture()
def url_institution_user(self, institution):
return '/{0}institutions/{1}/users/'.format(API_BASE, institution._id)
def test_return_all_users(self, app, institution, user_one, user_two, url_institution_user):
res = app.get(url_institution_user)
assert res.status_code == 200
ids = [each['id'] for each in res.json['data']]
assert len(res.json['data']) == 2
assert user_one._id in ids
assert user_two._id in ids
|
Convert institutions users to pytest
|
Convert institutions users to pytest
|
Python
|
apache-2.0
|
cslzchen/osf.io,chennan47/osf.io,crcresearch/osf.io,caneruguz/osf.io,HalcyonChimera/osf.io,leb2dg/osf.io,icereval/osf.io,crcresearch/osf.io,cslzchen/osf.io,sloria/osf.io,felliott/osf.io,binoculars/osf.io,laurenrevere/osf.io,mfraezz/osf.io,felliott/osf.io,Johnetordoff/osf.io,aaxelb/osf.io,crcresearch/osf.io,adlius/osf.io,brianjgeiger/osf.io,caneruguz/osf.io,erinspace/osf.io,baylee-d/osf.io,erinspace/osf.io,laurenrevere/osf.io,Johnetordoff/osf.io,CenterForOpenScience/osf.io,chrisseto/osf.io,sloria/osf.io,mfraezz/osf.io,chrisseto/osf.io,cslzchen/osf.io,brianjgeiger/osf.io,saradbowman/osf.io,leb2dg/osf.io,mattclark/osf.io,TomBaxter/osf.io,TomBaxter/osf.io,caseyrollins/osf.io,caseyrollins/osf.io,leb2dg/osf.io,felliott/osf.io,Johnetordoff/osf.io,chennan47/osf.io,erinspace/osf.io,HalcyonChimera/osf.io,baylee-d/osf.io,adlius/osf.io,leb2dg/osf.io,Johnetordoff/osf.io,cslzchen/osf.io,laurenrevere/osf.io,icereval/osf.io,aaxelb/osf.io,adlius/osf.io,aaxelb/osf.io,pattisdr/osf.io,CenterForOpenScience/osf.io,caseyrollins/osf.io,brianjgeiger/osf.io,CenterForOpenScience/osf.io,mattclark/osf.io,chrisseto/osf.io,caneruguz/osf.io,pattisdr/osf.io,adlius/osf.io,HalcyonChimera/osf.io,binoculars/osf.io,baylee-d/osf.io,aaxelb/osf.io,mfraezz/osf.io,saradbowman/osf.io,sloria/osf.io,brianjgeiger/osf.io,pattisdr/osf.io,CenterForOpenScience/osf.io,icereval/osf.io,chrisseto/osf.io,HalcyonChimera/osf.io,caneruguz/osf.io,chennan47/osf.io,TomBaxter/osf.io,binoculars/osf.io,mfraezz/osf.io,felliott/osf.io,mattclark/osf.io
|
+ import pytest
- from nose.tools import * # flake8: noqa
-
- from tests.base import ApiTestCase
- from osf_tests.factories import InstitutionFactory, UserFactory
from api.base.settings.defaults import API_BASE
+ from osf_tests.factories import (
+ InstitutionFactory,
+ UserFactory,
+ )
+ @pytest.mark.django_db
- class TestInstitutionUsersList(ApiTestCase):
+ class TestInstitutionUsersList:
- def setUp(self):
- super(TestInstitutionUsersList, self).setUp()
- self.institution = InstitutionFactory()
- self.user1 = UserFactory()
- self.user1.affiliated_institutions.add(self.institution)
- self.user1.save()
- self.user2 = UserFactory()
- self.user2.affiliated_institutions.add(self.institution)
- self.user2.save()
- self.institution_user_url = '/{0}institutions/{1}/users/'.format(API_BASE, self.institution._id)
+ @pytest.fixture()
+ def institution(self):
+ return InstitutionFactory()
- def test_return_all_users(self):
- res = self.app.get(self.institution_user_url)
+ @pytest.fixture()
+ def user_one(self, institution):
+ user_one = UserFactory()
+ user_one.affiliated_institutions.add(institution)
+ user_one.save()
+ return user_one
+ @pytest.fixture()
+ def user_two(self, institution):
+ user_two = UserFactory()
+ user_two.affiliated_institutions.add(institution)
+ user_two.save()
+ return user_two
+
+ @pytest.fixture()
+ def url_institution_user(self, institution):
+ return '/{0}institutions/{1}/users/'.format(API_BASE, institution._id)
+
+ def test_return_all_users(self, app, institution, user_one, user_two, url_institution_user):
+ res = app.get(url_institution_user)
+
- assert_equal(res.status_code, 200)
+ assert res.status_code == 200
ids = [each['id'] for each in res.json['data']]
- assert_equal(len(res.json['data']), 2)
+ assert len(res.json['data']) == 2
- assert_in(self.user1._id, ids)
- assert_in(self.user2._id, ids)
+ assert user_one._id in ids
+ assert user_two._id in ids
|
Convert institutions users to pytest
|
## Code Before:
from nose.tools import * # flake8: noqa
from tests.base import ApiTestCase
from osf_tests.factories import InstitutionFactory, UserFactory
from api.base.settings.defaults import API_BASE
class TestInstitutionUsersList(ApiTestCase):
def setUp(self):
super(TestInstitutionUsersList, self).setUp()
self.institution = InstitutionFactory()
self.user1 = UserFactory()
self.user1.affiliated_institutions.add(self.institution)
self.user1.save()
self.user2 = UserFactory()
self.user2.affiliated_institutions.add(self.institution)
self.user2.save()
self.institution_user_url = '/{0}institutions/{1}/users/'.format(API_BASE, self.institution._id)
def test_return_all_users(self):
res = self.app.get(self.institution_user_url)
assert_equal(res.status_code, 200)
ids = [each['id'] for each in res.json['data']]
assert_equal(len(res.json['data']), 2)
assert_in(self.user1._id, ids)
assert_in(self.user2._id, ids)
## Instruction:
Convert institutions users to pytest
## Code After:
import pytest
from api.base.settings.defaults import API_BASE
from osf_tests.factories import (
InstitutionFactory,
UserFactory,
)
@pytest.mark.django_db
class TestInstitutionUsersList:
@pytest.fixture()
def institution(self):
return InstitutionFactory()
@pytest.fixture()
def user_one(self, institution):
user_one = UserFactory()
user_one.affiliated_institutions.add(institution)
user_one.save()
return user_one
@pytest.fixture()
def user_two(self, institution):
user_two = UserFactory()
user_two.affiliated_institutions.add(institution)
user_two.save()
return user_two
@pytest.fixture()
def url_institution_user(self, institution):
return '/{0}institutions/{1}/users/'.format(API_BASE, institution._id)
def test_return_all_users(self, app, institution, user_one, user_two, url_institution_user):
res = app.get(url_institution_user)
assert res.status_code == 200
ids = [each['id'] for each in res.json['data']]
assert len(res.json['data']) == 2
assert user_one._id in ids
assert user_two._id in ids
|
+ import pytest
- from nose.tools import * # flake8: noqa
-
- from tests.base import ApiTestCase
- from osf_tests.factories import InstitutionFactory, UserFactory
from api.base.settings.defaults import API_BASE
+ from osf_tests.factories import (
+ InstitutionFactory,
+ UserFactory,
+ )
+ @pytest.mark.django_db
- class TestInstitutionUsersList(ApiTestCase):
? -------------
+ class TestInstitutionUsersList:
- def setUp(self):
- super(TestInstitutionUsersList, self).setUp()
- self.institution = InstitutionFactory()
- self.user1 = UserFactory()
- self.user1.affiliated_institutions.add(self.institution)
- self.user1.save()
- self.user2 = UserFactory()
- self.user2.affiliated_institutions.add(self.institution)
- self.user2.save()
- self.institution_user_url = '/{0}institutions/{1}/users/'.format(API_BASE, self.institution._id)
+ @pytest.fixture()
+ def institution(self):
+ return InstitutionFactory()
- def test_return_all_users(self):
- res = self.app.get(self.institution_user_url)
+ @pytest.fixture()
+ def user_one(self, institution):
+ user_one = UserFactory()
+ user_one.affiliated_institutions.add(institution)
+ user_one.save()
+ return user_one
+ @pytest.fixture()
+ def user_two(self, institution):
+ user_two = UserFactory()
+ user_two.affiliated_institutions.add(institution)
+ user_two.save()
+ return user_two
+
+ @pytest.fixture()
+ def url_institution_user(self, institution):
+ return '/{0}institutions/{1}/users/'.format(API_BASE, institution._id)
+
+ def test_return_all_users(self, app, institution, user_one, user_two, url_institution_user):
+ res = app.get(url_institution_user)
+
- assert_equal(res.status_code, 200)
? ^^^^^^^ ^ -
+ assert res.status_code == 200
? ^ ^^^
ids = [each['id'] for each in res.json['data']]
- assert_equal(len(res.json['data']), 2)
? ^^^^^^^ ^ -
+ assert len(res.json['data']) == 2
? ^ ^^^
- assert_in(self.user1._id, ids)
- assert_in(self.user2._id, ids)
+ assert user_one._id in ids
+ assert user_two._id in ids
|
c30ef8dba42dcc16eb7bf9e126d4551db30c06eb
|
exporters/filters/__init__.py
|
exporters/filters/__init__.py
|
from .key_value_filter import KeyValueFilter
from .key_value_regex_filter import KeyValueRegexFilter
from .no_filter import NoFilter
from .pythonexp_filter import PythonexpFilter
__all__ = ['KeyValueFilter', 'KeyValueRegexFilter', 'NoFilter', 'PythonexpFilter']
|
from .key_value_filters import KeyValueFilter, KeyValueRegexFilter
from .no_filter import NoFilter
from .pythonexp_filter import PythonexpFilter
__all__ = ['KeyValueFilter', 'KeyValueRegexFilter', 'NoFilter', 'PythonexpFilter']
|
Use proper filters avoiding deprecated warnings.
|
Use proper filters avoiding deprecated warnings.
|
Python
|
bsd-3-clause
|
scrapinghub/exporters
|
- from .key_value_filter import KeyValueFilter
- from .key_value_regex_filter import KeyValueRegexFilter
+ from .key_value_filters import KeyValueFilter, KeyValueRegexFilter
from .no_filter import NoFilter
from .pythonexp_filter import PythonexpFilter
__all__ = ['KeyValueFilter', 'KeyValueRegexFilter', 'NoFilter', 'PythonexpFilter']
|
Use proper filters avoiding deprecated warnings.
|
## Code Before:
from .key_value_filter import KeyValueFilter
from .key_value_regex_filter import KeyValueRegexFilter
from .no_filter import NoFilter
from .pythonexp_filter import PythonexpFilter
__all__ = ['KeyValueFilter', 'KeyValueRegexFilter', 'NoFilter', 'PythonexpFilter']
## Instruction:
Use proper filters avoiding deprecated warnings.
## Code After:
from .key_value_filters import KeyValueFilter, KeyValueRegexFilter
from .no_filter import NoFilter
from .pythonexp_filter import PythonexpFilter
__all__ = ['KeyValueFilter', 'KeyValueRegexFilter', 'NoFilter', 'PythonexpFilter']
|
- from .key_value_filter import KeyValueFilter
- from .key_value_regex_filter import KeyValueRegexFilter
? ------
+ from .key_value_filters import KeyValueFilter, KeyValueRegexFilter
? + ++++++++++++++++
from .no_filter import NoFilter
from .pythonexp_filter import PythonexpFilter
__all__ = ['KeyValueFilter', 'KeyValueRegexFilter', 'NoFilter', 'PythonexpFilter']
|
2145ee5961cc35d36013e2333c636c7390b6c039
|
gooey/gui/util/taskkill.py
|
gooey/gui/util/taskkill.py
|
import sys
import os
import signal
if sys.platform.startswith("win"):
def taskkill(pid):
os.system('taskkill /F /PID {:d} /T >NUL 2>NUL'.format(pid))
else: # POSIX
def taskkill(pid):
os.kill(pid, signal.SIGTERM)
|
import sys
import os
import signal
if sys.platform.startswith("win"):
def taskkill(pid):
os.system('taskkill /F /PID {:d} /T >NUL 2>NUL'.format(pid))
else: # POSIX
import psutil
def taskkill(pid):
parent = psutil.Process(pid)
for child in parent.children(recursive=True):
child.kill()
parent.kill()
|
Kill child processes as well as shell process
|
Kill child processes as well as shell process
|
Python
|
mit
|
jschultz/Gooey,partrita/Gooey,chriskiehl/Gooey,codingsnippets/Gooey
|
import sys
import os
import signal
if sys.platform.startswith("win"):
def taskkill(pid):
os.system('taskkill /F /PID {:d} /T >NUL 2>NUL'.format(pid))
else: # POSIX
+ import psutil
def taskkill(pid):
- os.kill(pid, signal.SIGTERM)
+ parent = psutil.Process(pid)
+ for child in parent.children(recursive=True):
+ child.kill()
+ parent.kill()
|
Kill child processes as well as shell process
|
## Code Before:
import sys
import os
import signal
if sys.platform.startswith("win"):
def taskkill(pid):
os.system('taskkill /F /PID {:d} /T >NUL 2>NUL'.format(pid))
else: # POSIX
def taskkill(pid):
os.kill(pid, signal.SIGTERM)
## Instruction:
Kill child processes as well as shell process
## Code After:
import sys
import os
import signal
if sys.platform.startswith("win"):
def taskkill(pid):
os.system('taskkill /F /PID {:d} /T >NUL 2>NUL'.format(pid))
else: # POSIX
import psutil
def taskkill(pid):
parent = psutil.Process(pid)
for child in parent.children(recursive=True):
child.kill()
parent.kill()
|
import sys
import os
import signal
if sys.platform.startswith("win"):
def taskkill(pid):
os.system('taskkill /F /PID {:d} /T >NUL 2>NUL'.format(pid))
else: # POSIX
+ import psutil
def taskkill(pid):
- os.kill(pid, signal.SIGTERM)
+ parent = psutil.Process(pid)
+ for child in parent.children(recursive=True):
+ child.kill()
+ parent.kill()
|
9548247251399a4fbe7a140c5d8db64e8dd71b46
|
cobe/instatrace.py
|
cobe/instatrace.py
|
import datetime
import math
import os
import time
def singleton(cls):
instances = {}
def getinstance():
if cls not in instances:
instances[cls] = cls()
return instances[cls]
return getinstance
@singleton
class Instatrace:
def __init__(self):
self._fd = None
def init(self, filename):
if self._fd is not None:
self._fd.close()
if filename is None:
self._fd = None
else:
# rotate logs
if os.path.exists(filename):
now = datetime.datetime.now()
stamp = now.strftime("%Y-%m-%d.%H%M%S")
os.rename(filename, "%s.%s" % (filename, stamp))
self._fd = open(filename, "w")
def is_enabled(self):
return self._fd is not None
def now(self):
"""Microsecond resolution, integer now"""
if not self.is_enabled():
return 0
return int(time.time()*100000)
def now_ms(self):
"""Millisecond resolution, integer now"""
if not self.is_enabled():
return 0
return int(time.time()*1000)
def trace(self, statName, statValue, userData=None):
if not self.is_enabled():
return
extra = ""
if userData is not None:
extra = " " + repr(userData)
self._fd.write("%s %d%s\n" % (statName, statValue, extra))
self._fd.flush()
|
import datetime
import math
import os
import time
def singleton(cls):
instances = {}
def getinstance():
if cls not in instances:
instances[cls] = cls()
return instances[cls]
return getinstance
@singleton
class Instatrace:
def __init__(self):
self._fd = None
def init(self, filename):
if self._fd is not None:
self._fd.close()
if filename is None:
self._fd = None
else:
# rotate logs
if os.path.exists(filename):
now = datetime.datetime.now()
stamp = now.strftime("%Y-%m-%d.%H%M%S")
os.rename(filename, "%s.%s" % (filename, stamp))
self._fd = open(filename, "w")
def is_enabled(self):
return self._fd is not None
def now(self):
"""Microsecond resolution, integer now"""
if not self.is_enabled():
return 0
return int(time.time()*100000)
def now_ms(self):
"""Millisecond resolution, integer now"""
if not self.is_enabled():
return 0
return int(time.time()*1000)
def trace(self, statName, statValue, userData=None):
if not self.is_enabled():
return
extra = ""
if userData is not None:
extra = " " + repr(userData)
self._fd.write("%s %d%s\n" % (statName, statValue, extra))
|
Remove a debugging flush() after every trace
|
Remove a debugging flush() after every trace
|
Python
|
mit
|
wodim/cobe-ng,wodim/cobe-ng,tiagochiavericosta/cobe,LeMagnesium/cobe,LeMagnesium/cobe,DarkMio/cobe,pteichman/cobe,meska/cobe,meska/cobe,pteichman/cobe,DarkMio/cobe,tiagochiavericosta/cobe
|
import datetime
import math
import os
import time
def singleton(cls):
instances = {}
def getinstance():
if cls not in instances:
instances[cls] = cls()
return instances[cls]
return getinstance
@singleton
class Instatrace:
def __init__(self):
self._fd = None
def init(self, filename):
if self._fd is not None:
self._fd.close()
if filename is None:
self._fd = None
else:
# rotate logs
if os.path.exists(filename):
now = datetime.datetime.now()
stamp = now.strftime("%Y-%m-%d.%H%M%S")
os.rename(filename, "%s.%s" % (filename, stamp))
self._fd = open(filename, "w")
def is_enabled(self):
return self._fd is not None
def now(self):
"""Microsecond resolution, integer now"""
if not self.is_enabled():
return 0
return int(time.time()*100000)
def now_ms(self):
"""Millisecond resolution, integer now"""
if not self.is_enabled():
return 0
return int(time.time()*1000)
def trace(self, statName, statValue, userData=None):
if not self.is_enabled():
return
extra = ""
if userData is not None:
extra = " " + repr(userData)
self._fd.write("%s %d%s\n" % (statName, statValue, extra))
- self._fd.flush()
|
Remove a debugging flush() after every trace
|
## Code Before:
import datetime
import math
import os
import time
def singleton(cls):
instances = {}
def getinstance():
if cls not in instances:
instances[cls] = cls()
return instances[cls]
return getinstance
@singleton
class Instatrace:
def __init__(self):
self._fd = None
def init(self, filename):
if self._fd is not None:
self._fd.close()
if filename is None:
self._fd = None
else:
# rotate logs
if os.path.exists(filename):
now = datetime.datetime.now()
stamp = now.strftime("%Y-%m-%d.%H%M%S")
os.rename(filename, "%s.%s" % (filename, stamp))
self._fd = open(filename, "w")
def is_enabled(self):
return self._fd is not None
def now(self):
"""Microsecond resolution, integer now"""
if not self.is_enabled():
return 0
return int(time.time()*100000)
def now_ms(self):
"""Millisecond resolution, integer now"""
if not self.is_enabled():
return 0
return int(time.time()*1000)
def trace(self, statName, statValue, userData=None):
if not self.is_enabled():
return
extra = ""
if userData is not None:
extra = " " + repr(userData)
self._fd.write("%s %d%s\n" % (statName, statValue, extra))
self._fd.flush()
## Instruction:
Remove a debugging flush() after every trace
## Code After:
import datetime
import math
import os
import time
def singleton(cls):
instances = {}
def getinstance():
if cls not in instances:
instances[cls] = cls()
return instances[cls]
return getinstance
@singleton
class Instatrace:
def __init__(self):
self._fd = None
def init(self, filename):
if self._fd is not None:
self._fd.close()
if filename is None:
self._fd = None
else:
# rotate logs
if os.path.exists(filename):
now = datetime.datetime.now()
stamp = now.strftime("%Y-%m-%d.%H%M%S")
os.rename(filename, "%s.%s" % (filename, stamp))
self._fd = open(filename, "w")
def is_enabled(self):
return self._fd is not None
def now(self):
"""Microsecond resolution, integer now"""
if not self.is_enabled():
return 0
return int(time.time()*100000)
def now_ms(self):
"""Millisecond resolution, integer now"""
if not self.is_enabled():
return 0
return int(time.time()*1000)
def trace(self, statName, statValue, userData=None):
if not self.is_enabled():
return
extra = ""
if userData is not None:
extra = " " + repr(userData)
self._fd.write("%s %d%s\n" % (statName, statValue, extra))
|
import datetime
import math
import os
import time
def singleton(cls):
instances = {}
def getinstance():
if cls not in instances:
instances[cls] = cls()
return instances[cls]
return getinstance
@singleton
class Instatrace:
def __init__(self):
self._fd = None
def init(self, filename):
if self._fd is not None:
self._fd.close()
if filename is None:
self._fd = None
else:
# rotate logs
if os.path.exists(filename):
now = datetime.datetime.now()
stamp = now.strftime("%Y-%m-%d.%H%M%S")
os.rename(filename, "%s.%s" % (filename, stamp))
self._fd = open(filename, "w")
def is_enabled(self):
return self._fd is not None
def now(self):
"""Microsecond resolution, integer now"""
if not self.is_enabled():
return 0
return int(time.time()*100000)
def now_ms(self):
"""Millisecond resolution, integer now"""
if not self.is_enabled():
return 0
return int(time.time()*1000)
def trace(self, statName, statValue, userData=None):
if not self.is_enabled():
return
extra = ""
if userData is not None:
extra = " " + repr(userData)
self._fd.write("%s %d%s\n" % (statName, statValue, extra))
- self._fd.flush()
|
0a4d3f5b837cfa0d41a927c193a831a1c00b51f5
|
setup.py
|
setup.py
|
from distutils.core import setup
from hydra_agent import __version__
setup(
name = 'hydra-agent',
version = __version__,
author = "Whamcloud, Inc.",
author_email = "[email protected]",
packages = ['hydra_agent', 'hydra_agent/cmds'],
scripts = ['bin/hydra-agent.py', 'bin/hydra-rmmod.py'],
data_files=[('/usr/lib/ocf/resource.d/hydra', ['Target'])],
url = 'http://www.whamcloud.com/',
license = 'Proprietary',
description = 'The Whamcloud Lustre Monitoring and Adminisration Interface Agent',
long_description = open('README.txt').read(),
)
|
from distutils.core import setup
from hydra_agent import __version__
setup(
name = 'hydra-agent',
version = __version__,
author = "Whamcloud, Inc.",
author_email = "[email protected]",
packages = ['hydra_agent', 'hydra_agent/cmds', 'hydra_agent/audit', 'hydra_agent/audit/lustre'],
scripts = ['bin/hydra-agent.py', 'bin/hydra-rmmod.py'],
data_files=[('/usr/lib/ocf/resource.d/hydra', ['Target'])],
url = 'http://www.whamcloud.com/',
license = 'Proprietary',
description = 'The Whamcloud Lustre Monitoring and Adminisration Interface Agent',
long_description = open('README.txt').read(),
)
|
Add new paths for audit/
|
Add new paths for audit/
|
Python
|
mit
|
intel-hpdd/intel-manager-for-lustre,intel-hpdd/intel-manager-for-lustre,intel-hpdd/intel-manager-for-lustre
|
from distutils.core import setup
from hydra_agent import __version__
setup(
name = 'hydra-agent',
version = __version__,
author = "Whamcloud, Inc.",
author_email = "[email protected]",
- packages = ['hydra_agent', 'hydra_agent/cmds'],
+ packages = ['hydra_agent', 'hydra_agent/cmds', 'hydra_agent/audit', 'hydra_agent/audit/lustre'],
scripts = ['bin/hydra-agent.py', 'bin/hydra-rmmod.py'],
data_files=[('/usr/lib/ocf/resource.d/hydra', ['Target'])],
url = 'http://www.whamcloud.com/',
license = 'Proprietary',
description = 'The Whamcloud Lustre Monitoring and Adminisration Interface Agent',
long_description = open('README.txt').read(),
)
|
Add new paths for audit/
|
## Code Before:
from distutils.core import setup
from hydra_agent import __version__
setup(
name = 'hydra-agent',
version = __version__,
author = "Whamcloud, Inc.",
author_email = "[email protected]",
packages = ['hydra_agent', 'hydra_agent/cmds'],
scripts = ['bin/hydra-agent.py', 'bin/hydra-rmmod.py'],
data_files=[('/usr/lib/ocf/resource.d/hydra', ['Target'])],
url = 'http://www.whamcloud.com/',
license = 'Proprietary',
description = 'The Whamcloud Lustre Monitoring and Adminisration Interface Agent',
long_description = open('README.txt').read(),
)
## Instruction:
Add new paths for audit/
## Code After:
from distutils.core import setup
from hydra_agent import __version__
setup(
name = 'hydra-agent',
version = __version__,
author = "Whamcloud, Inc.",
author_email = "[email protected]",
packages = ['hydra_agent', 'hydra_agent/cmds', 'hydra_agent/audit', 'hydra_agent/audit/lustre'],
scripts = ['bin/hydra-agent.py', 'bin/hydra-rmmod.py'],
data_files=[('/usr/lib/ocf/resource.d/hydra', ['Target'])],
url = 'http://www.whamcloud.com/',
license = 'Proprietary',
description = 'The Whamcloud Lustre Monitoring and Adminisration Interface Agent',
long_description = open('README.txt').read(),
)
|
from distutils.core import setup
from hydra_agent import __version__
setup(
name = 'hydra-agent',
version = __version__,
author = "Whamcloud, Inc.",
author_email = "[email protected]",
- packages = ['hydra_agent', 'hydra_agent/cmds'],
+ packages = ['hydra_agent', 'hydra_agent/cmds', 'hydra_agent/audit', 'hydra_agent/audit/lustre'],
scripts = ['bin/hydra-agent.py', 'bin/hydra-rmmod.py'],
data_files=[('/usr/lib/ocf/resource.d/hydra', ['Target'])],
url = 'http://www.whamcloud.com/',
license = 'Proprietary',
description = 'The Whamcloud Lustre Monitoring and Adminisration Interface Agent',
long_description = open('README.txt').read(),
)
|
724b80b44229b531d7a11cb7cc9f6ad88d9aedb0
|
bnw_handlers/command_userinfo.py
|
bnw_handlers/command_userinfo.py
|
from twisted.internet import defer
import bnw_core.bnw_objects as objs
@defer.inlineCallbacks
def cmd_userinfo(request, user=''):
if not user:
defer.returnValue(dict(ok=False, desc='Username required.'))
user_obj = yield objs.User.find_one({'name': user})
subscribers = yield objs.Subscription.find(dict(
target=user, type='sub_user'))
subscribers = set([x['user'] for x in subscribers])
subscriptions = yield objs.Subscription.find(dict(
user=user, type='sub_user'))
subscriptions = set([x['target'] for x in subscriptions])
friends = list(subscribers & subscriptions)
friends.sort()
subscribers_only = list(subscribers - subscriptions)
subscribers_only.sort()
subscriptions_only = list(subscriptions - subscribers)
subscriptions_only.sort()
messages_count = int((yield objs.Message.count({'user': user})))
comments_count = int((yield objs.Comment.count({'user': user})))
vcard = user_obj.get('vcard', {})
about = user_obj.get('settings', {}).get('about', '')
if not about:
about = vcard.get('desc', '')
defer.returnValue({
'user': user,
'regdate': user_obj.get('regdate', 0),
'messages_count': messages_count,
'comments_count': comments_count,
'subscribers': subscribers_only,
'subscriptions': subscriptions_only,
'friends': friends,
'vcard': vcard,
'about': about,
})
|
from twisted.internet import defer
import bnw_core.bnw_objects as objs
@defer.inlineCallbacks
def cmd_userinfo(request, user=''):
if not user:
defer.returnValue(dict(ok=False, desc='Username required.'))
user_obj = yield objs.User.find_one({'name': user})
subscribers = yield objs.Subscription.find(dict(
target=user, type='sub_user'))
subscribers = set([x['user'] for x in subscribers])
subscriptions = yield objs.Subscription.find(dict(
user=user, type='sub_user'))
subscriptions = set([x['target'] for x in subscriptions])
friends = list(subscribers & subscriptions)
friends.sort()
subscribers_only = list(subscribers - subscriptions)
subscribers_only.sort()
subscriptions_only = list(subscriptions - subscribers)
subscriptions_only.sort()
messages_count = int((yield objs.Message.count({'user': user})))
comments_count = int((yield objs.Comment.count({'user': user})))
vcard = user_obj.get('vcard', {})
about = user_obj.get('settings', {}).get('about', '')
if not about:
about = vcard.get('desc', '')
defer.returnValue({
'ok': True,
'user': user,
'regdate': user_obj.get('regdate', 0),
'messages_count': messages_count,
'comments_count': comments_count,
'subscribers': subscribers_only,
'subscriptions': subscriptions_only,
'friends': friends,
'vcard': vcard,
'about': about,
})
|
Fix userinfo api command (send ok=True)
|
Fix userinfo api command (send ok=True)
|
Python
|
bsd-2-clause
|
ojab/bnw,stiletto/bnw,stiletto/bnw,ojab/bnw,un-def/bnw,ojab/bnw,stiletto/bnw,ojab/bnw,un-def/bnw,un-def/bnw,un-def/bnw,stiletto/bnw
|
from twisted.internet import defer
import bnw_core.bnw_objects as objs
@defer.inlineCallbacks
def cmd_userinfo(request, user=''):
if not user:
defer.returnValue(dict(ok=False, desc='Username required.'))
user_obj = yield objs.User.find_one({'name': user})
subscribers = yield objs.Subscription.find(dict(
target=user, type='sub_user'))
subscribers = set([x['user'] for x in subscribers])
subscriptions = yield objs.Subscription.find(dict(
user=user, type='sub_user'))
subscriptions = set([x['target'] for x in subscriptions])
friends = list(subscribers & subscriptions)
friends.sort()
subscribers_only = list(subscribers - subscriptions)
subscribers_only.sort()
subscriptions_only = list(subscriptions - subscribers)
subscriptions_only.sort()
messages_count = int((yield objs.Message.count({'user': user})))
comments_count = int((yield objs.Comment.count({'user': user})))
vcard = user_obj.get('vcard', {})
about = user_obj.get('settings', {}).get('about', '')
if not about:
about = vcard.get('desc', '')
defer.returnValue({
+ 'ok': True,
'user': user,
'regdate': user_obj.get('regdate', 0),
'messages_count': messages_count,
'comments_count': comments_count,
'subscribers': subscribers_only,
'subscriptions': subscriptions_only,
'friends': friends,
'vcard': vcard,
'about': about,
})
|
Fix userinfo api command (send ok=True)
|
## Code Before:
from twisted.internet import defer
import bnw_core.bnw_objects as objs
@defer.inlineCallbacks
def cmd_userinfo(request, user=''):
if not user:
defer.returnValue(dict(ok=False, desc='Username required.'))
user_obj = yield objs.User.find_one({'name': user})
subscribers = yield objs.Subscription.find(dict(
target=user, type='sub_user'))
subscribers = set([x['user'] for x in subscribers])
subscriptions = yield objs.Subscription.find(dict(
user=user, type='sub_user'))
subscriptions = set([x['target'] for x in subscriptions])
friends = list(subscribers & subscriptions)
friends.sort()
subscribers_only = list(subscribers - subscriptions)
subscribers_only.sort()
subscriptions_only = list(subscriptions - subscribers)
subscriptions_only.sort()
messages_count = int((yield objs.Message.count({'user': user})))
comments_count = int((yield objs.Comment.count({'user': user})))
vcard = user_obj.get('vcard', {})
about = user_obj.get('settings', {}).get('about', '')
if not about:
about = vcard.get('desc', '')
defer.returnValue({
'user': user,
'regdate': user_obj.get('regdate', 0),
'messages_count': messages_count,
'comments_count': comments_count,
'subscribers': subscribers_only,
'subscriptions': subscriptions_only,
'friends': friends,
'vcard': vcard,
'about': about,
})
## Instruction:
Fix userinfo api command (send ok=True)
## Code After:
from twisted.internet import defer
import bnw_core.bnw_objects as objs
@defer.inlineCallbacks
def cmd_userinfo(request, user=''):
if not user:
defer.returnValue(dict(ok=False, desc='Username required.'))
user_obj = yield objs.User.find_one({'name': user})
subscribers = yield objs.Subscription.find(dict(
target=user, type='sub_user'))
subscribers = set([x['user'] for x in subscribers])
subscriptions = yield objs.Subscription.find(dict(
user=user, type='sub_user'))
subscriptions = set([x['target'] for x in subscriptions])
friends = list(subscribers & subscriptions)
friends.sort()
subscribers_only = list(subscribers - subscriptions)
subscribers_only.sort()
subscriptions_only = list(subscriptions - subscribers)
subscriptions_only.sort()
messages_count = int((yield objs.Message.count({'user': user})))
comments_count = int((yield objs.Comment.count({'user': user})))
vcard = user_obj.get('vcard', {})
about = user_obj.get('settings', {}).get('about', '')
if not about:
about = vcard.get('desc', '')
defer.returnValue({
'ok': True,
'user': user,
'regdate': user_obj.get('regdate', 0),
'messages_count': messages_count,
'comments_count': comments_count,
'subscribers': subscribers_only,
'subscriptions': subscriptions_only,
'friends': friends,
'vcard': vcard,
'about': about,
})
|
from twisted.internet import defer
import bnw_core.bnw_objects as objs
@defer.inlineCallbacks
def cmd_userinfo(request, user=''):
if not user:
defer.returnValue(dict(ok=False, desc='Username required.'))
user_obj = yield objs.User.find_one({'name': user})
subscribers = yield objs.Subscription.find(dict(
target=user, type='sub_user'))
subscribers = set([x['user'] for x in subscribers])
subscriptions = yield objs.Subscription.find(dict(
user=user, type='sub_user'))
subscriptions = set([x['target'] for x in subscriptions])
friends = list(subscribers & subscriptions)
friends.sort()
subscribers_only = list(subscribers - subscriptions)
subscribers_only.sort()
subscriptions_only = list(subscriptions - subscribers)
subscriptions_only.sort()
messages_count = int((yield objs.Message.count({'user': user})))
comments_count = int((yield objs.Comment.count({'user': user})))
vcard = user_obj.get('vcard', {})
about = user_obj.get('settings', {}).get('about', '')
if not about:
about = vcard.get('desc', '')
defer.returnValue({
+ 'ok': True,
'user': user,
'regdate': user_obj.get('regdate', 0),
'messages_count': messages_count,
'comments_count': comments_count,
'subscribers': subscribers_only,
'subscriptions': subscriptions_only,
'friends': friends,
'vcard': vcard,
'about': about,
})
|
f5408c02202a07a1b45019eefb505eb8a0d21852
|
swagger2markdown.py
|
swagger2markdown.py
|
import argparse, json, os.path
import jinja2, requests
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
"-i", "--input",
default="swagger.json",
help="path to or URL of the Swagger JSON file (default: swagger.json)",
metavar="SWAGGER_LOCATION"
)
parser.add_argument(
"-o", "--output",
default="swagger.md",
help="path to the output Markdown file (default: swagger.md)",
metavar="OUTPUT"
)
parser.add_argument(
"-t", "--template",
default=os.path.join(os.path.dirname(__file__), "swagger.md.j2"),
help="Jinja2 template used for conversion",
metavar="TEMPLATE"
)
args = parser.parse_args()
try:
swagger_data = json.load(open(args.input, encoding="utf8"))
except FileNotFoundError:
swagger_data = requests.get(args.input).json()
template = jinja2.Template(open(args.template, encoding="utf8").read())
with open(args.output, "w", encoding="utf8") as output:
output.write(template.render(swagger_data=swagger_data))
|
import argparse, json, os.path
import jinja2, requests
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
"-i", "--input",
default="swagger.json",
help="path to or URL of the Swagger JSON file (default: swagger.json)",
metavar="SWAGGER_LOCATION"
)
parser.add_argument(
"-o", "--output",
default="swagger.md",
help="path to the output Markdown file (default: swagger.md)",
metavar="OUTPUT"
)
parser.add_argument(
"-t", "--template",
default=os.path.join(os.path.dirname(__file__), "swagger.md.j2"),
help="Jinja2 template used for conversion",
metavar="TEMPLATE"
)
args = parser.parse_args()
try:
swagger_data = json.load(open(args.input, encoding="utf8"))
except (FileNotFoundError, OSError):
swagger_data = requests.get(args.input).json()
template = jinja2.Template(open(args.template, encoding="utf8").read())
with open(args.output, "w", encoding="utf8") as output:
output.write(template.render(swagger_data=swagger_data))
|
Fix crash when URL is provided.
|
Fix crash when URL is provided.
|
Python
|
mit
|
moigagoo/swagger2markdown
|
import argparse, json, os.path
import jinja2, requests
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
"-i", "--input",
default="swagger.json",
help="path to or URL of the Swagger JSON file (default: swagger.json)",
metavar="SWAGGER_LOCATION"
)
parser.add_argument(
"-o", "--output",
default="swagger.md",
help="path to the output Markdown file (default: swagger.md)",
metavar="OUTPUT"
)
parser.add_argument(
"-t", "--template",
default=os.path.join(os.path.dirname(__file__), "swagger.md.j2"),
help="Jinja2 template used for conversion",
metavar="TEMPLATE"
)
args = parser.parse_args()
try:
swagger_data = json.load(open(args.input, encoding="utf8"))
- except FileNotFoundError:
+ except (FileNotFoundError, OSError):
swagger_data = requests.get(args.input).json()
template = jinja2.Template(open(args.template, encoding="utf8").read())
with open(args.output, "w", encoding="utf8") as output:
output.write(template.render(swagger_data=swagger_data))
|
Fix crash when URL is provided.
|
## Code Before:
import argparse, json, os.path
import jinja2, requests
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
"-i", "--input",
default="swagger.json",
help="path to or URL of the Swagger JSON file (default: swagger.json)",
metavar="SWAGGER_LOCATION"
)
parser.add_argument(
"-o", "--output",
default="swagger.md",
help="path to the output Markdown file (default: swagger.md)",
metavar="OUTPUT"
)
parser.add_argument(
"-t", "--template",
default=os.path.join(os.path.dirname(__file__), "swagger.md.j2"),
help="Jinja2 template used for conversion",
metavar="TEMPLATE"
)
args = parser.parse_args()
try:
swagger_data = json.load(open(args.input, encoding="utf8"))
except FileNotFoundError:
swagger_data = requests.get(args.input).json()
template = jinja2.Template(open(args.template, encoding="utf8").read())
with open(args.output, "w", encoding="utf8") as output:
output.write(template.render(swagger_data=swagger_data))
## Instruction:
Fix crash when URL is provided.
## Code After:
import argparse, json, os.path
import jinja2, requests
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
"-i", "--input",
default="swagger.json",
help="path to or URL of the Swagger JSON file (default: swagger.json)",
metavar="SWAGGER_LOCATION"
)
parser.add_argument(
"-o", "--output",
default="swagger.md",
help="path to the output Markdown file (default: swagger.md)",
metavar="OUTPUT"
)
parser.add_argument(
"-t", "--template",
default=os.path.join(os.path.dirname(__file__), "swagger.md.j2"),
help="Jinja2 template used for conversion",
metavar="TEMPLATE"
)
args = parser.parse_args()
try:
swagger_data = json.load(open(args.input, encoding="utf8"))
except (FileNotFoundError, OSError):
swagger_data = requests.get(args.input).json()
template = jinja2.Template(open(args.template, encoding="utf8").read())
with open(args.output, "w", encoding="utf8") as output:
output.write(template.render(swagger_data=swagger_data))
|
import argparse, json, os.path
import jinja2, requests
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
"-i", "--input",
default="swagger.json",
help="path to or URL of the Swagger JSON file (default: swagger.json)",
metavar="SWAGGER_LOCATION"
)
parser.add_argument(
"-o", "--output",
default="swagger.md",
help="path to the output Markdown file (default: swagger.md)",
metavar="OUTPUT"
)
parser.add_argument(
"-t", "--template",
default=os.path.join(os.path.dirname(__file__), "swagger.md.j2"),
help="Jinja2 template used for conversion",
metavar="TEMPLATE"
)
args = parser.parse_args()
try:
swagger_data = json.load(open(args.input, encoding="utf8"))
- except FileNotFoundError:
+ except (FileNotFoundError, OSError):
? + ++++++++++
swagger_data = requests.get(args.input).json()
template = jinja2.Template(open(args.template, encoding="utf8").read())
with open(args.output, "w", encoding="utf8") as output:
output.write(template.render(swagger_data=swagger_data))
|
e2234d41831d513b4da17d1031e2856785d23089
|
ui/__init__.py
|
ui/__init__.py
|
import multiprocessing as mp
class UI:
def __init__(self, game):
parent_conn, child_conn = mp.Pipe(duplex=False)
self.ui_event_pipe = parent_conn
self.game = game
def get_move(self):
raise Exception("Method 'get_move' not implemented.")
def update(self):
raise Exception("Method 'update' not implemented.")
def run(self, mainloop):
return mainloop()
|
class UI:
def __init__(self, game):
self.game = game
def get_move(self):
raise Exception("Method 'get_move' not implemented.")
def update(self):
raise Exception("Method 'update' not implemented.")
def run(self, mainloop):
return mainloop()
|
Remove unused UI event pipe
|
Remove unused UI event pipe
|
Python
|
mit
|
ethanal/othello,ethanal/othello
|
- import multiprocessing as mp
-
-
class UI:
def __init__(self, game):
- parent_conn, child_conn = mp.Pipe(duplex=False)
- self.ui_event_pipe = parent_conn
self.game = game
def get_move(self):
raise Exception("Method 'get_move' not implemented.")
def update(self):
raise Exception("Method 'update' not implemented.")
def run(self, mainloop):
return mainloop()
|
Remove unused UI event pipe
|
## Code Before:
import multiprocessing as mp
class UI:
def __init__(self, game):
parent_conn, child_conn = mp.Pipe(duplex=False)
self.ui_event_pipe = parent_conn
self.game = game
def get_move(self):
raise Exception("Method 'get_move' not implemented.")
def update(self):
raise Exception("Method 'update' not implemented.")
def run(self, mainloop):
return mainloop()
## Instruction:
Remove unused UI event pipe
## Code After:
class UI:
def __init__(self, game):
self.game = game
def get_move(self):
raise Exception("Method 'get_move' not implemented.")
def update(self):
raise Exception("Method 'update' not implemented.")
def run(self, mainloop):
return mainloop()
|
- import multiprocessing as mp
-
-
class UI:
def __init__(self, game):
- parent_conn, child_conn = mp.Pipe(duplex=False)
- self.ui_event_pipe = parent_conn
self.game = game
def get_move(self):
raise Exception("Method 'get_move' not implemented.")
def update(self):
raise Exception("Method 'update' not implemented.")
def run(self, mainloop):
return mainloop()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.