commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 10
2.94k
| new_contents
stringlengths 21
3.18k
| subject
stringlengths 16
444
| message
stringlengths 17
2.63k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43k
| ndiff
stringlengths 52
3.32k
| instruction
stringlengths 16
444
| content
stringlengths 133
4.32k
| fuzzy_diff
stringlengths 16
3.18k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
c7efd5976f511200162610612fcd5b6f9b013a54
|
dciclient/v1/utils.py
|
dciclient/v1/utils.py
|
import click
import json
import six
def flatten(d, prefix=''):
ret = []
for k, v in d.items():
p = k if not prefix else prefix + '.' + k
if isinstance(v, dict):
ret += flatten(v, prefix=p)
else:
ret.append("%s=%s" % (p, v))
return ret
def print_json(result_json):
formatted_result = json.dumps(result_json, indent=4)
click.echo(formatted_result)
def sanitize_kwargs(**kwargs):
kwargs = dict((k, v) for k, v in six.iteritems(kwargs) if v)
try:
kwargs['data'] = json.loads(kwargs['data'])
except KeyError:
pass
return kwargs
|
import click
import json
import six
def flatten(d, prefix=''):
ret = []
for k, v in d.items():
p = k if not prefix else prefix + '.' + k
if isinstance(v, dict):
ret += flatten(v, prefix=p)
else:
ret.append("%s=%s" % (p, v))
return ret
def print_json(result_json):
formatted_result = json.dumps(result_json, indent=4)
click.echo(formatted_result)
def sanitize_kwargs(**kwargs):
kwargs = dict((k, v) for k, v in six.iteritems(kwargs) if v)
try:
kwargs['data'] = json.loads(kwargs['data'])
except KeyError:
pass
except TypeError:
pass
return kwargs
|
Fix TypeError exception when parsing json
|
Fix TypeError exception when parsing json
This change fixes the TypeError exception that is raised when it should
not while parsing json
File "/usr/lib64/python2.7/json/__init__.py", line 338, in loads
return _default_decoder.decode(s)
File "/usr/lib64/python2.7/json/decoder.py", line 366, in decode
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
TypeError: expected string or buffer
Change-Id: I1b9670adcc505084fecb54a45ce11029dc8a4d93
|
Python
|
apache-2.0
|
redhat-cip/python-dciclient,redhat-cip/python-dciclient
|
import click
import json
import six
def flatten(d, prefix=''):
ret = []
for k, v in d.items():
p = k if not prefix else prefix + '.' + k
if isinstance(v, dict):
ret += flatten(v, prefix=p)
else:
ret.append("%s=%s" % (p, v))
return ret
def print_json(result_json):
formatted_result = json.dumps(result_json, indent=4)
click.echo(formatted_result)
def sanitize_kwargs(**kwargs):
kwargs = dict((k, v) for k, v in six.iteritems(kwargs) if v)
try:
kwargs['data'] = json.loads(kwargs['data'])
except KeyError:
pass
+ except TypeError:
+ pass
return kwargs
|
Fix TypeError exception when parsing json
|
## Code Before:
import click
import json
import six
def flatten(d, prefix=''):
ret = []
for k, v in d.items():
p = k if not prefix else prefix + '.' + k
if isinstance(v, dict):
ret += flatten(v, prefix=p)
else:
ret.append("%s=%s" % (p, v))
return ret
def print_json(result_json):
formatted_result = json.dumps(result_json, indent=4)
click.echo(formatted_result)
def sanitize_kwargs(**kwargs):
kwargs = dict((k, v) for k, v in six.iteritems(kwargs) if v)
try:
kwargs['data'] = json.loads(kwargs['data'])
except KeyError:
pass
return kwargs
## Instruction:
Fix TypeError exception when parsing json
## Code After:
import click
import json
import six
def flatten(d, prefix=''):
ret = []
for k, v in d.items():
p = k if not prefix else prefix + '.' + k
if isinstance(v, dict):
ret += flatten(v, prefix=p)
else:
ret.append("%s=%s" % (p, v))
return ret
def print_json(result_json):
formatted_result = json.dumps(result_json, indent=4)
click.echo(formatted_result)
def sanitize_kwargs(**kwargs):
kwargs = dict((k, v) for k, v in six.iteritems(kwargs) if v)
try:
kwargs['data'] = json.loads(kwargs['data'])
except KeyError:
pass
except TypeError:
pass
return kwargs
|
// ... existing code ...
pass
except TypeError:
pass
// ... rest of the code ...
|
7ec92591324717cfdefc8531549654f146e8b15c
|
test/unit/test_id_iterators.py
|
test/unit/test_id_iterators.py
|
from unittest import TestCase, main
import re
from uuid import UUID
from jsonrpcclient.id_iterators import hex_iterator, uuid_iterator, \
random_iterator
class TestHexIterator(TestCase):
def test(self):
i = hex_iterator()
self.assertEqual('1', next(i))
i = hex_iterator(9)
self.assertEqual('9', next(i))
self.assertEqual('a', next(i))
class TestUUIDIterator(TestCase):
def test(self):
i = uuid_iterator()
# Raise ValueError if badly formed hexadecimal UUID string
UUID(next(i), version=4)
class TestRandomIterator(TestCase):
def test(self):
i = random_iterator()
self.assertTrue(re.match('^[0-9,a-z]{8}$', next(i)))
|
from unittest import TestCase, main
import re
from uuid import UUID
from jsonrpcclient.id_iterators import hex_iterator, uuid_iterator, \
random_iterator
class TestHexIterator(TestCase):
def test(self):
i = hex_iterator()
self.assertEqual('1', next(i))
i = hex_iterator(9)
self.assertEqual('9', next(i))
self.assertEqual('a', next(i))
class TestUUIDIterator(TestCase):
def test(self):
i = uuid_iterator()
# Raise ValueError if badly formed hexadecimal UUID string
UUID(next(i), version=4)
class TestRandomIterator(TestCase):
def test(self):
i = random_iterator()
self.assertTrue(re.match('^[0-9,a-z]{8}$', next(i)))
if __name__ == '__main__':
main()
|
Add statements to run id_iterators tests
|
Add statements to run id_iterators tests
Closes #8
|
Python
|
mit
|
bcb/jsonrpcclient
|
from unittest import TestCase, main
import re
from uuid import UUID
from jsonrpcclient.id_iterators import hex_iterator, uuid_iterator, \
random_iterator
class TestHexIterator(TestCase):
def test(self):
i = hex_iterator()
self.assertEqual('1', next(i))
i = hex_iterator(9)
self.assertEqual('9', next(i))
self.assertEqual('a', next(i))
class TestUUIDIterator(TestCase):
def test(self):
i = uuid_iterator()
# Raise ValueError if badly formed hexadecimal UUID string
UUID(next(i), version=4)
class TestRandomIterator(TestCase):
def test(self):
i = random_iterator()
self.assertTrue(re.match('^[0-9,a-z]{8}$', next(i)))
+
+ if __name__ == '__main__':
+ main()
+
|
Add statements to run id_iterators tests
|
## Code Before:
from unittest import TestCase, main
import re
from uuid import UUID
from jsonrpcclient.id_iterators import hex_iterator, uuid_iterator, \
random_iterator
class TestHexIterator(TestCase):
def test(self):
i = hex_iterator()
self.assertEqual('1', next(i))
i = hex_iterator(9)
self.assertEqual('9', next(i))
self.assertEqual('a', next(i))
class TestUUIDIterator(TestCase):
def test(self):
i = uuid_iterator()
# Raise ValueError if badly formed hexadecimal UUID string
UUID(next(i), version=4)
class TestRandomIterator(TestCase):
def test(self):
i = random_iterator()
self.assertTrue(re.match('^[0-9,a-z]{8}$', next(i)))
## Instruction:
Add statements to run id_iterators tests
## Code After:
from unittest import TestCase, main
import re
from uuid import UUID
from jsonrpcclient.id_iterators import hex_iterator, uuid_iterator, \
random_iterator
class TestHexIterator(TestCase):
def test(self):
i = hex_iterator()
self.assertEqual('1', next(i))
i = hex_iterator(9)
self.assertEqual('9', next(i))
self.assertEqual('a', next(i))
class TestUUIDIterator(TestCase):
def test(self):
i = uuid_iterator()
# Raise ValueError if badly formed hexadecimal UUID string
UUID(next(i), version=4)
class TestRandomIterator(TestCase):
def test(self):
i = random_iterator()
self.assertTrue(re.match('^[0-9,a-z]{8}$', next(i)))
if __name__ == '__main__':
main()
|
# ... existing code ...
self.assertTrue(re.match('^[0-9,a-z]{8}$', next(i)))
if __name__ == '__main__':
main()
# ... rest of the code ...
|
2f34d442157f86af4fd75c48ea2cf568fbef34f6
|
migrations/versions/223041bb858b_message_contact_association.py
|
migrations/versions/223041bb858b_message_contact_association.py
|
# revision identifiers, used by Alembic.
revision = '223041bb858b'
down_revision = '2c9f3a06de09'
# Yes, this is a terrible hack. But tools/rerank_contacts.py already contains a
# script to process contacts from messages, so it's very expedient.
import sys
sys.path.append('./tools')
from rerank_contacts import rerank_contacts
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_table(
'messagecontactassociation',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('contact_id', sa.Integer(), nullable=False),
sa.Column('message_id', sa.Integer(), nullable=False),
sa.Column('field',
sa.Enum('from_addr', 'to_addr', 'cc_addr', 'bcc_addr'),
nullable=True),
sa.ForeignKeyConstraint(['contact_id'], ['contact.id'], ),
sa.ForeignKeyConstraint(['message_id'], ['message.id'], ),
sa.PrimaryKeyConstraint('id', 'contact_id', 'message_id')
)
rerank_contacts()
def downgrade():
op.drop_table('messagecontactassociation')
|
# revision identifiers, used by Alembic.
revision = '223041bb858b'
down_revision = '2c9f3a06de09'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_table(
'messagecontactassociation',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('contact_id', sa.Integer(), nullable=False),
sa.Column('message_id', sa.Integer(), nullable=False),
sa.Column('field',
sa.Enum('from_addr', 'to_addr', 'cc_addr', 'bcc_addr'),
nullable=True),
sa.ForeignKeyConstraint(['contact_id'], ['contact.id'], ),
sa.ForeignKeyConstraint(['message_id'], ['message.id'], ),
sa.PrimaryKeyConstraint('id', 'contact_id', 'message_id')
)
# Yes, this is a terrible hack. But tools/rerank_contacts.py already
# contains a script to process contacts from messages, so it's very
# expedient.
import sys
sys.path.append('./tools')
from rerank_contacts import rerank_contacts
rerank_contacts()
def downgrade():
op.drop_table('messagecontactassociation')
|
Rearrange imports in previous migration.
|
Rearrange imports in previous migration.
According to mg bad things can happen if you try to do stuff outside of a
migration's upgrade() function.
|
Python
|
agpl-3.0
|
wakermahmud/sync-engine,nylas/sync-engine,PriviPK/privipk-sync-engine,PriviPK/privipk-sync-engine,EthanBlackburn/sync-engine,rmasters/inbox,jobscore/sync-engine,closeio/nylas,Eagles2F/sync-engine,ErinCall/sync-engine,Eagles2F/sync-engine,wakermahmud/sync-engine,closeio/nylas,Eagles2F/sync-engine,wakermahmud/sync-engine,ErinCall/sync-engine,closeio/nylas,jobscore/sync-engine,rmasters/inbox,PriviPK/privipk-sync-engine,gale320/sync-engine,ErinCall/sync-engine,PriviPK/privipk-sync-engine,EthanBlackburn/sync-engine,wakermahmud/sync-engine,ErinCall/sync-engine,EthanBlackburn/sync-engine,EthanBlackburn/sync-engine,Eagles2F/sync-engine,rmasters/inbox,nylas/sync-engine,closeio/nylas,PriviPK/privipk-sync-engine,rmasters/inbox,Eagles2F/sync-engine,ErinCall/sync-engine,gale320/sync-engine,EthanBlackburn/sync-engine,nylas/sync-engine,wakermahmud/sync-engine,jobscore/sync-engine,gale320/sync-engine,gale320/sync-engine,nylas/sync-engine,jobscore/sync-engine,gale320/sync-engine
|
# revision identifiers, used by Alembic.
revision = '223041bb858b'
down_revision = '2c9f3a06de09'
- # Yes, this is a terrible hack. But tools/rerank_contacts.py already contains a
- # script to process contacts from messages, so it's very expedient.
- import sys
- sys.path.append('./tools')
- from rerank_contacts import rerank_contacts
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_table(
'messagecontactassociation',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('contact_id', sa.Integer(), nullable=False),
sa.Column('message_id', sa.Integer(), nullable=False),
sa.Column('field',
sa.Enum('from_addr', 'to_addr', 'cc_addr', 'bcc_addr'),
nullable=True),
sa.ForeignKeyConstraint(['contact_id'], ['contact.id'], ),
sa.ForeignKeyConstraint(['message_id'], ['message.id'], ),
sa.PrimaryKeyConstraint('id', 'contact_id', 'message_id')
)
+
+ # Yes, this is a terrible hack. But tools/rerank_contacts.py already
+ # contains a script to process contacts from messages, so it's very
+ # expedient.
+ import sys
+ sys.path.append('./tools')
+ from rerank_contacts import rerank_contacts
rerank_contacts()
def downgrade():
op.drop_table('messagecontactassociation')
|
Rearrange imports in previous migration.
|
## Code Before:
# revision identifiers, used by Alembic.
revision = '223041bb858b'
down_revision = '2c9f3a06de09'
# Yes, this is a terrible hack. But tools/rerank_contacts.py already contains a
# script to process contacts from messages, so it's very expedient.
import sys
sys.path.append('./tools')
from rerank_contacts import rerank_contacts
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_table(
'messagecontactassociation',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('contact_id', sa.Integer(), nullable=False),
sa.Column('message_id', sa.Integer(), nullable=False),
sa.Column('field',
sa.Enum('from_addr', 'to_addr', 'cc_addr', 'bcc_addr'),
nullable=True),
sa.ForeignKeyConstraint(['contact_id'], ['contact.id'], ),
sa.ForeignKeyConstraint(['message_id'], ['message.id'], ),
sa.PrimaryKeyConstraint('id', 'contact_id', 'message_id')
)
rerank_contacts()
def downgrade():
op.drop_table('messagecontactassociation')
## Instruction:
Rearrange imports in previous migration.
## Code After:
# revision identifiers, used by Alembic.
revision = '223041bb858b'
down_revision = '2c9f3a06de09'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_table(
'messagecontactassociation',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('contact_id', sa.Integer(), nullable=False),
sa.Column('message_id', sa.Integer(), nullable=False),
sa.Column('field',
sa.Enum('from_addr', 'to_addr', 'cc_addr', 'bcc_addr'),
nullable=True),
sa.ForeignKeyConstraint(['contact_id'], ['contact.id'], ),
sa.ForeignKeyConstraint(['message_id'], ['message.id'], ),
sa.PrimaryKeyConstraint('id', 'contact_id', 'message_id')
)
# Yes, this is a terrible hack. But tools/rerank_contacts.py already
# contains a script to process contacts from messages, so it's very
# expedient.
import sys
sys.path.append('./tools')
from rerank_contacts import rerank_contacts
rerank_contacts()
def downgrade():
op.drop_table('messagecontactassociation')
|
# ... existing code ...
# ... modified code ...
)
# Yes, this is a terrible hack. But tools/rerank_contacts.py already
# contains a script to process contacts from messages, so it's very
# expedient.
import sys
sys.path.append('./tools')
from rerank_contacts import rerank_contacts
rerank_contacts()
# ... rest of the code ...
|
d58576bc658f1433351c0cf9ac0225537e17f472
|
cobe/brain.py
|
cobe/brain.py
|
import itertools
import logging
from cobe.analysis import (
AccentNormalizer, StemNormalizer, TokenNormalizer, WhitespaceAnalyzer)
from cobe.kvstore import SqliteStore
from cobe.model import Model
from cobe.search import RandomWalkSearcher
log = logging.getLogger(__name__)
class StandardAnalyzer(WhitespaceAnalyzer):
"""A basic analyzer for test purposes.
This combines a whitespace tokenizer with AccentNormalizer.
"""
def __init__(self):
super(StandardAnalyzer, self).__init__()
self.add_token_normalizer(AccentNormalizer())
self.add_token_normalizer(StemNormalizer("english"))
class Brain(object):
"""The all-in-one interface to a cobe stack."""
def __init__(self, filename):
self.analyzer = StandardAnalyzer()
store = SqliteStore(filename)
self.model = Model(self.analyzer, store)
self.searcher = RandomWalkSearcher(self.model)
def reply(self, text):
# Create a search query from the input
query = self.analyzer.query(text, self.model)
result = itertools.islice(self.searcher.search(query), 1).next()
return self.analyzer.join(result)
def train(self, text):
pass
|
import itertools
import logging
from cobe.analysis import (
AccentNormalizer, StemNormalizer, WhitespaceAnalyzer)
from cobe.kvstore import SqliteStore
from cobe.model import Model
from cobe.search import RandomWalkSearcher
log = logging.getLogger(__name__)
class StandardAnalyzer(WhitespaceAnalyzer):
"""A basic analyzer for test purposes.
This combines a whitespace tokenizer with AccentNormalizer.
"""
def __init__(self):
super(StandardAnalyzer, self).__init__()
self.add_token_normalizer(AccentNormalizer())
self.add_token_normalizer(StemNormalizer("english"))
class Brain(object):
"""The all-in-one interface to a cobe stack."""
def __init__(self, filename):
self.analyzer = StandardAnalyzer()
store = SqliteStore(filename)
self.model = Model(self.analyzer, store)
self.searcher = RandomWalkSearcher(self.model)
def reply(self, text):
# Create a search query from the input
query = self.analyzer.query(text, self.model)
result = itertools.islice(self.searcher.search(query), 1).next()
return self.analyzer.join(result)
def train(self, text):
pass
|
Remove unused import of TokenNormalizer
|
Remove unused import of TokenNormalizer
Fixes the build
|
Python
|
mit
|
wodim/cobe-ng,LeMagnesium/cobe,pteichman/cobe,tiagochiavericosta/cobe,meska/cobe,pteichman/cobe,LeMagnesium/cobe,wodim/cobe-ng,tiagochiavericosta/cobe,DarkMio/cobe,meska/cobe,DarkMio/cobe
|
import itertools
import logging
from cobe.analysis import (
- AccentNormalizer, StemNormalizer, TokenNormalizer, WhitespaceAnalyzer)
+ AccentNormalizer, StemNormalizer, WhitespaceAnalyzer)
from cobe.kvstore import SqliteStore
from cobe.model import Model
from cobe.search import RandomWalkSearcher
log = logging.getLogger(__name__)
class StandardAnalyzer(WhitespaceAnalyzer):
"""A basic analyzer for test purposes.
This combines a whitespace tokenizer with AccentNormalizer.
"""
def __init__(self):
super(StandardAnalyzer, self).__init__()
self.add_token_normalizer(AccentNormalizer())
self.add_token_normalizer(StemNormalizer("english"))
class Brain(object):
"""The all-in-one interface to a cobe stack."""
def __init__(self, filename):
self.analyzer = StandardAnalyzer()
store = SqliteStore(filename)
self.model = Model(self.analyzer, store)
self.searcher = RandomWalkSearcher(self.model)
def reply(self, text):
# Create a search query from the input
query = self.analyzer.query(text, self.model)
result = itertools.islice(self.searcher.search(query), 1).next()
return self.analyzer.join(result)
def train(self, text):
pass
|
Remove unused import of TokenNormalizer
|
## Code Before:
import itertools
import logging
from cobe.analysis import (
AccentNormalizer, StemNormalizer, TokenNormalizer, WhitespaceAnalyzer)
from cobe.kvstore import SqliteStore
from cobe.model import Model
from cobe.search import RandomWalkSearcher
log = logging.getLogger(__name__)
class StandardAnalyzer(WhitespaceAnalyzer):
"""A basic analyzer for test purposes.
This combines a whitespace tokenizer with AccentNormalizer.
"""
def __init__(self):
super(StandardAnalyzer, self).__init__()
self.add_token_normalizer(AccentNormalizer())
self.add_token_normalizer(StemNormalizer("english"))
class Brain(object):
"""The all-in-one interface to a cobe stack."""
def __init__(self, filename):
self.analyzer = StandardAnalyzer()
store = SqliteStore(filename)
self.model = Model(self.analyzer, store)
self.searcher = RandomWalkSearcher(self.model)
def reply(self, text):
# Create a search query from the input
query = self.analyzer.query(text, self.model)
result = itertools.islice(self.searcher.search(query), 1).next()
return self.analyzer.join(result)
def train(self, text):
pass
## Instruction:
Remove unused import of TokenNormalizer
## Code After:
import itertools
import logging
from cobe.analysis import (
AccentNormalizer, StemNormalizer, WhitespaceAnalyzer)
from cobe.kvstore import SqliteStore
from cobe.model import Model
from cobe.search import RandomWalkSearcher
log = logging.getLogger(__name__)
class StandardAnalyzer(WhitespaceAnalyzer):
"""A basic analyzer for test purposes.
This combines a whitespace tokenizer with AccentNormalizer.
"""
def __init__(self):
super(StandardAnalyzer, self).__init__()
self.add_token_normalizer(AccentNormalizer())
self.add_token_normalizer(StemNormalizer("english"))
class Brain(object):
"""The all-in-one interface to a cobe stack."""
def __init__(self, filename):
self.analyzer = StandardAnalyzer()
store = SqliteStore(filename)
self.model = Model(self.analyzer, store)
self.searcher = RandomWalkSearcher(self.model)
def reply(self, text):
# Create a search query from the input
query = self.analyzer.query(text, self.model)
result = itertools.islice(self.searcher.search(query), 1).next()
return self.analyzer.join(result)
def train(self, text):
pass
|
...
from cobe.analysis import (
AccentNormalizer, StemNormalizer, WhitespaceAnalyzer)
from cobe.kvstore import SqliteStore
...
|
83938c9bf7aafc1f7a2a6b9594279600012ee7ef
|
setup.py
|
setup.py
|
import distutils.core
import os.path
from pypel import __version__
def read(filename):
"""Small tool function to read file content."""
return open(os.path.join(os.path.dirname(__file__), filename)).read()
classifiers = '''
Development Status :: 3 - Alpha
Environment :: Console
Intended Audience :: End Users/Desktop
License :: OSI Approved :: BSD License
Operating System :: POSIX
Programming Language :: Python
Programming Language :: Python :: 2
Programming Language :: Python :: 2.7
Programming Language :: Python :: 3
Programming Language :: Python :: 3.2
Programming Language :: Python :: 3.3
Topic :: Office/Business :: Financial
'''.strip().splitlines()
distutils.core.setup(
name = 'pypel',
version = __version__,
license = 'BSD',
description = 'simple tool to manage receipts',
long_description = read('README.rst'),
classifiers = classifiers,
url = 'http://mornie.org/projects/pypel/',
author = 'Daniele Tricoli',
author_email = '[email protected]',
packages = ['pypel'],
package_dir = dict(pypel='pypel'),
scripts = ['bin/pypel']
)
|
import distutils.core
import os.path
from pypel import __version__
def read(filename):
"""Small tool function to read file content."""
return open(os.path.join(os.path.dirname(__file__), filename)).read()
classifiers = '''
Development Status :: 3 - Alpha
Environment :: Console
Intended Audience :: End Users/Desktop
License :: OSI Approved :: BSD License
Operating System :: POSIX
Programming Language :: Python
Programming Language :: Python :: 2
Programming Language :: Python :: 2.7
Programming Language :: Python :: 3
Programming Language :: Python :: 3.4
Topic :: Office/Business :: Financial
'''.strip().splitlines()
distutils.core.setup(
name = 'pypel',
version = __version__,
license = 'BSD',
description = 'simple tool to manage receipts',
long_description = read('README.rst'),
classifiers = classifiers,
url = 'http://mornie.org/projects/pypel/',
author = 'Daniele Tricoli',
author_email = '[email protected]',
packages = ['pypel'],
package_dir = dict(pypel='pypel'),
scripts = ['bin/pypel']
)
|
Update Python supported versions classifiers
|
Update Python supported versions classifiers
|
Python
|
bsd-3-clause
|
eriol/pypel
|
import distutils.core
import os.path
from pypel import __version__
def read(filename):
"""Small tool function to read file content."""
return open(os.path.join(os.path.dirname(__file__), filename)).read()
classifiers = '''
Development Status :: 3 - Alpha
Environment :: Console
Intended Audience :: End Users/Desktop
License :: OSI Approved :: BSD License
Operating System :: POSIX
Programming Language :: Python
Programming Language :: Python :: 2
Programming Language :: Python :: 2.7
Programming Language :: Python :: 3
- Programming Language :: Python :: 3.2
+ Programming Language :: Python :: 3.4
- Programming Language :: Python :: 3.3
Topic :: Office/Business :: Financial
'''.strip().splitlines()
distutils.core.setup(
name = 'pypel',
version = __version__,
license = 'BSD',
description = 'simple tool to manage receipts',
long_description = read('README.rst'),
classifiers = classifiers,
url = 'http://mornie.org/projects/pypel/',
author = 'Daniele Tricoli',
author_email = '[email protected]',
packages = ['pypel'],
package_dir = dict(pypel='pypel'),
scripts = ['bin/pypel']
)
|
Update Python supported versions classifiers
|
## Code Before:
import distutils.core
import os.path
from pypel import __version__
def read(filename):
"""Small tool function to read file content."""
return open(os.path.join(os.path.dirname(__file__), filename)).read()
classifiers = '''
Development Status :: 3 - Alpha
Environment :: Console
Intended Audience :: End Users/Desktop
License :: OSI Approved :: BSD License
Operating System :: POSIX
Programming Language :: Python
Programming Language :: Python :: 2
Programming Language :: Python :: 2.7
Programming Language :: Python :: 3
Programming Language :: Python :: 3.2
Programming Language :: Python :: 3.3
Topic :: Office/Business :: Financial
'''.strip().splitlines()
distutils.core.setup(
name = 'pypel',
version = __version__,
license = 'BSD',
description = 'simple tool to manage receipts',
long_description = read('README.rst'),
classifiers = classifiers,
url = 'http://mornie.org/projects/pypel/',
author = 'Daniele Tricoli',
author_email = '[email protected]',
packages = ['pypel'],
package_dir = dict(pypel='pypel'),
scripts = ['bin/pypel']
)
## Instruction:
Update Python supported versions classifiers
## Code After:
import distutils.core
import os.path
from pypel import __version__
def read(filename):
"""Small tool function to read file content."""
return open(os.path.join(os.path.dirname(__file__), filename)).read()
classifiers = '''
Development Status :: 3 - Alpha
Environment :: Console
Intended Audience :: End Users/Desktop
License :: OSI Approved :: BSD License
Operating System :: POSIX
Programming Language :: Python
Programming Language :: Python :: 2
Programming Language :: Python :: 2.7
Programming Language :: Python :: 3
Programming Language :: Python :: 3.4
Topic :: Office/Business :: Financial
'''.strip().splitlines()
distutils.core.setup(
name = 'pypel',
version = __version__,
license = 'BSD',
description = 'simple tool to manage receipts',
long_description = read('README.rst'),
classifiers = classifiers,
url = 'http://mornie.org/projects/pypel/',
author = 'Daniele Tricoli',
author_email = '[email protected]',
packages = ['pypel'],
package_dir = dict(pypel='pypel'),
scripts = ['bin/pypel']
)
|
...
Programming Language :: Python :: 3
Programming Language :: Python :: 3.4
Topic :: Office/Business :: Financial
...
|
46a568690a9a284ddc350519a15e092e1211d073
|
reviewboard/site/urlresolvers.py
|
reviewboard/site/urlresolvers.py
|
from __future__ import unicode_literals
from django.core.urlresolvers import NoReverseMatch, reverse
def local_site_reverse(viewname, request=None, local_site_name=None,
args=None, kwargs=None, *func_args, **func_kwargs):
"""Reverses a URL name, returning a working URL.
This works much like Django's reverse(), but handles returning a
localsite version of a URL when invoked with a request within a localsite.
"""
if request or local_site_name:
if request and not local_site_name:
local_site_name = getattr(request, '_local_site_name', None)
if local_site_name:
if args:
new_args = [local_site_name] + args
new_kwargs = kwargs
else:
new_args = args
new_kwargs = {
'local_site_name': local_site_name,
}
if kwargs:
new_kwargs.update(kwargs)
try:
return reverse(viewname, args=new_args, kwargs=new_kwargs,
*func_args, **func_kwargs)
except NoReverseMatch:
# We'll try it again without those arguments.
pass
return reverse(viewname, args=args, kwargs=kwargs,
*func_args, **func_kwargs)
|
from __future__ import unicode_literals
from django.core.urlresolvers import NoReverseMatch, reverse
def local_site_reverse(viewname, request=None, local_site_name=None,
local_site=None, args=None, kwargs=None,
*func_args, **func_kwargs):
"""Reverses a URL name, returning a working URL.
This works much like Django's reverse(), but handles returning a
localsite version of a URL when invoked with a request within a localsite.
"""
assert not (local_site_name and local_site)
if request or local_site_name or local_site:
if local_site:
local_site_name = local_site.name
elif request and not local_site_name:
local_site_name = getattr(request, '_local_site_name', None)
if local_site_name:
if args:
new_args = [local_site_name] + args
new_kwargs = kwargs
else:
new_args = args
new_kwargs = {
'local_site_name': local_site_name,
}
if kwargs:
new_kwargs.update(kwargs)
try:
return reverse(viewname, args=new_args, kwargs=new_kwargs,
*func_args, **func_kwargs)
except NoReverseMatch:
# We'll try it again without those arguments.
pass
return reverse(viewname, args=args, kwargs=kwargs,
*func_args, **func_kwargs)
|
Allow local_site_reverse to take an actual LocalSite.
|
Allow local_site_reverse to take an actual LocalSite.
local_site_reverse was able to take a LocalSite's name, or a request
object, but if you actually had a LocalSite (or None), you'd have to
write your own conditional to extract the name and pass it.
Now, local_site_reverse can take a LocalSite. This saves a database
query, if one is already available, and simplifies calling code.
Reviewed at https://reviews.reviewboard.org/r/6302/
|
Python
|
mit
|
custode/reviewboard,custode/reviewboard,bkochendorfer/reviewboard,custode/reviewboard,brennie/reviewboard,reviewboard/reviewboard,KnowNo/reviewboard,sgallagher/reviewboard,custode/reviewboard,sgallagher/reviewboard,brennie/reviewboard,davidt/reviewboard,chipx86/reviewboard,KnowNo/reviewboard,chipx86/reviewboard,KnowNo/reviewboard,beol/reviewboard,bkochendorfer/reviewboard,davidt/reviewboard,bkochendorfer/reviewboard,beol/reviewboard,chipx86/reviewboard,davidt/reviewboard,bkochendorfer/reviewboard,reviewboard/reviewboard,chipx86/reviewboard,davidt/reviewboard,brennie/reviewboard,reviewboard/reviewboard,brennie/reviewboard,beol/reviewboard,KnowNo/reviewboard,sgallagher/reviewboard,reviewboard/reviewboard,beol/reviewboard,sgallagher/reviewboard
|
from __future__ import unicode_literals
from django.core.urlresolvers import NoReverseMatch, reverse
def local_site_reverse(viewname, request=None, local_site_name=None,
+ local_site=None, args=None, kwargs=None,
- args=None, kwargs=None, *func_args, **func_kwargs):
+ *func_args, **func_kwargs):
"""Reverses a URL name, returning a working URL.
This works much like Django's reverse(), but handles returning a
localsite version of a URL when invoked with a request within a localsite.
"""
+ assert not (local_site_name and local_site)
+
- if request or local_site_name:
+ if request or local_site_name or local_site:
+ if local_site:
+ local_site_name = local_site.name
- if request and not local_site_name:
+ elif request and not local_site_name:
local_site_name = getattr(request, '_local_site_name', None)
if local_site_name:
if args:
new_args = [local_site_name] + args
new_kwargs = kwargs
else:
new_args = args
new_kwargs = {
'local_site_name': local_site_name,
}
if kwargs:
new_kwargs.update(kwargs)
try:
return reverse(viewname, args=new_args, kwargs=new_kwargs,
*func_args, **func_kwargs)
except NoReverseMatch:
# We'll try it again without those arguments.
pass
return reverse(viewname, args=args, kwargs=kwargs,
*func_args, **func_kwargs)
|
Allow local_site_reverse to take an actual LocalSite.
|
## Code Before:
from __future__ import unicode_literals
from django.core.urlresolvers import NoReverseMatch, reverse
def local_site_reverse(viewname, request=None, local_site_name=None,
args=None, kwargs=None, *func_args, **func_kwargs):
"""Reverses a URL name, returning a working URL.
This works much like Django's reverse(), but handles returning a
localsite version of a URL when invoked with a request within a localsite.
"""
if request or local_site_name:
if request and not local_site_name:
local_site_name = getattr(request, '_local_site_name', None)
if local_site_name:
if args:
new_args = [local_site_name] + args
new_kwargs = kwargs
else:
new_args = args
new_kwargs = {
'local_site_name': local_site_name,
}
if kwargs:
new_kwargs.update(kwargs)
try:
return reverse(viewname, args=new_args, kwargs=new_kwargs,
*func_args, **func_kwargs)
except NoReverseMatch:
# We'll try it again without those arguments.
pass
return reverse(viewname, args=args, kwargs=kwargs,
*func_args, **func_kwargs)
## Instruction:
Allow local_site_reverse to take an actual LocalSite.
## Code After:
from __future__ import unicode_literals
from django.core.urlresolvers import NoReverseMatch, reverse
def local_site_reverse(viewname, request=None, local_site_name=None,
local_site=None, args=None, kwargs=None,
*func_args, **func_kwargs):
"""Reverses a URL name, returning a working URL.
This works much like Django's reverse(), but handles returning a
localsite version of a URL when invoked with a request within a localsite.
"""
assert not (local_site_name and local_site)
if request or local_site_name or local_site:
if local_site:
local_site_name = local_site.name
elif request and not local_site_name:
local_site_name = getattr(request, '_local_site_name', None)
if local_site_name:
if args:
new_args = [local_site_name] + args
new_kwargs = kwargs
else:
new_args = args
new_kwargs = {
'local_site_name': local_site_name,
}
if kwargs:
new_kwargs.update(kwargs)
try:
return reverse(viewname, args=new_args, kwargs=new_kwargs,
*func_args, **func_kwargs)
except NoReverseMatch:
# We'll try it again without those arguments.
pass
return reverse(viewname, args=args, kwargs=kwargs,
*func_args, **func_kwargs)
|
...
def local_site_reverse(viewname, request=None, local_site_name=None,
local_site=None, args=None, kwargs=None,
*func_args, **func_kwargs):
"""Reverses a URL name, returning a working URL.
...
"""
assert not (local_site_name and local_site)
if request or local_site_name or local_site:
if local_site:
local_site_name = local_site.name
elif request and not local_site_name:
local_site_name = getattr(request, '_local_site_name', None)
...
|
d2e9289167b538fe5ef83edcbfce3d5f023de088
|
lib/core/countpage.py
|
lib/core/countpage.py
|
'''
Copyright (c) 2016 anti-XSS developers
'''
class CountPage(object):
__number = 0
def __init__(self, number=0):
self.__number = number
def setNumber(self, number):
self.__number = number
def getNumber(self):
return self.__number
def incNumber(self):
self.__number += 1
|
'''
Copyright (c) 2016 anti-XSS developers
'''
class CountPage(object):
number = 0
def __init__(self, number=0):
self.number = number
def setNumber(self, number):
self.number = number
def getNumber(self):
return self.number
def incNumber(self):
self.number += 1
|
Modify CountPage to a public class
|
Modify CountPage to a public class
|
Python
|
mit
|
lewangbtcc/anti-XSS,lewangbtcc/anti-XSS
|
'''
Copyright (c) 2016 anti-XSS developers
'''
class CountPage(object):
- __number = 0
+ number = 0
def __init__(self, number=0):
- self.__number = number
+ self.number = number
def setNumber(self, number):
- self.__number = number
+ self.number = number
def getNumber(self):
- return self.__number
+ return self.number
def incNumber(self):
- self.__number += 1
+ self.number += 1
|
Modify CountPage to a public class
|
## Code Before:
'''
Copyright (c) 2016 anti-XSS developers
'''
class CountPage(object):
__number = 0
def __init__(self, number=0):
self.__number = number
def setNumber(self, number):
self.__number = number
def getNumber(self):
return self.__number
def incNumber(self):
self.__number += 1
## Instruction:
Modify CountPage to a public class
## Code After:
'''
Copyright (c) 2016 anti-XSS developers
'''
class CountPage(object):
number = 0
def __init__(self, number=0):
self.number = number
def setNumber(self, number):
self.number = number
def getNumber(self):
return self.number
def incNumber(self):
self.number += 1
|
# ... existing code ...
number = 0
# ... modified code ...
def __init__(self, number=0):
self.number = number
...
def setNumber(self, number):
self.number = number
...
def getNumber(self):
return self.number
...
def incNumber(self):
self.number += 1
# ... rest of the code ...
|
b1deec08fe23eb89dd51471c6f11e2e3da69a563
|
aospy/__init__.py
|
aospy/__init__.py
|
"""aospy: management, analysis, and plotting of gridded climate data."""
from .__config__ import (user_path, LAT_STR, LON_STR, PFULL_STR, PHALF_STR,
PLEVEL_STR, TIME_STR)
from . import constants
from .constants import Constant
from . import numerics
from .numerics import FiniteDiff
from . import utils
from . import io
from . import timedate
from .timedate import TimeManager
from . import units
from .units import Units
from . import operator
from .operator import Operator
#from . import spharm_interface # On hold in python3
#from .spharm_interface import SpharmInterface
from . import var
from .var import Var
from . import region
from .region import Region
from . import run
from .run import Run
from . import model
from .model import Model
from . import proj
from .proj import Proj
from . import calc
from .calc import CalcInterface, Calc
from . import plotting
from .plotting import Fig, Ax, Plot
__all__ = ['Proj', 'Model', 'Run', 'Var', 'Units', 'Constant', 'Region',
'Fig', 'Ax', 'Plot', 'units', 'calc', 'constants', 'utils', 'io',
'plotting']
|
"""aospy: management, analysis, and plotting of gridded climate data."""
from .__config__ import (user_path, LAT_STR, LON_STR, PFULL_STR, PHALF_STR,
PLEVEL_STR, TIME_STR, TIME_STR_IDEALIZED)
from . import constants
from .constants import Constant
from . import numerics
from .numerics import FiniteDiff
from . import utils
from . import io
from . import timedate
from .timedate import TimeManager
from . import units
from .units import Units
from . import operator
from .operator import Operator
#from . import spharm_interface # On hold in python3
#from .spharm_interface import SpharmInterface
from . import var
from .var import Var
from . import region
from .region import Region
from . import run
from .run import Run
from . import model
from .model import Model
from . import proj
from .proj import Proj
from . import calc
from .calc import CalcInterface, Calc
from . import plotting
from .plotting import Fig, Ax, Plot
__all__ = ['Proj', 'Model', 'Run', 'Var', 'Units', 'Constant', 'Region',
'Fig', 'Ax', 'Plot', 'units', 'calc', 'constants', 'utils', 'io',
'plotting']
|
Add TIME_STR_IDEALIZED to string labels
|
Add TIME_STR_IDEALIZED to string labels
|
Python
|
apache-2.0
|
spencerkclark/aospy,spencerahill/aospy
|
"""aospy: management, analysis, and plotting of gridded climate data."""
from .__config__ import (user_path, LAT_STR, LON_STR, PFULL_STR, PHALF_STR,
- PLEVEL_STR, TIME_STR)
+ PLEVEL_STR, TIME_STR, TIME_STR_IDEALIZED)
from . import constants
from .constants import Constant
from . import numerics
from .numerics import FiniteDiff
from . import utils
from . import io
from . import timedate
from .timedate import TimeManager
from . import units
from .units import Units
from . import operator
from .operator import Operator
#from . import spharm_interface # On hold in python3
#from .spharm_interface import SpharmInterface
from . import var
from .var import Var
from . import region
from .region import Region
from . import run
from .run import Run
from . import model
from .model import Model
from . import proj
from .proj import Proj
from . import calc
from .calc import CalcInterface, Calc
from . import plotting
from .plotting import Fig, Ax, Plot
__all__ = ['Proj', 'Model', 'Run', 'Var', 'Units', 'Constant', 'Region',
'Fig', 'Ax', 'Plot', 'units', 'calc', 'constants', 'utils', 'io',
'plotting']
|
Add TIME_STR_IDEALIZED to string labels
|
## Code Before:
"""aospy: management, analysis, and plotting of gridded climate data."""
from .__config__ import (user_path, LAT_STR, LON_STR, PFULL_STR, PHALF_STR,
PLEVEL_STR, TIME_STR)
from . import constants
from .constants import Constant
from . import numerics
from .numerics import FiniteDiff
from . import utils
from . import io
from . import timedate
from .timedate import TimeManager
from . import units
from .units import Units
from . import operator
from .operator import Operator
#from . import spharm_interface # On hold in python3
#from .spharm_interface import SpharmInterface
from . import var
from .var import Var
from . import region
from .region import Region
from . import run
from .run import Run
from . import model
from .model import Model
from . import proj
from .proj import Proj
from . import calc
from .calc import CalcInterface, Calc
from . import plotting
from .plotting import Fig, Ax, Plot
__all__ = ['Proj', 'Model', 'Run', 'Var', 'Units', 'Constant', 'Region',
'Fig', 'Ax', 'Plot', 'units', 'calc', 'constants', 'utils', 'io',
'plotting']
## Instruction:
Add TIME_STR_IDEALIZED to string labels
## Code After:
"""aospy: management, analysis, and plotting of gridded climate data."""
from .__config__ import (user_path, LAT_STR, LON_STR, PFULL_STR, PHALF_STR,
PLEVEL_STR, TIME_STR, TIME_STR_IDEALIZED)
from . import constants
from .constants import Constant
from . import numerics
from .numerics import FiniteDiff
from . import utils
from . import io
from . import timedate
from .timedate import TimeManager
from . import units
from .units import Units
from . import operator
from .operator import Operator
#from . import spharm_interface # On hold in python3
#from .spharm_interface import SpharmInterface
from . import var
from .var import Var
from . import region
from .region import Region
from . import run
from .run import Run
from . import model
from .model import Model
from . import proj
from .proj import Proj
from . import calc
from .calc import CalcInterface, Calc
from . import plotting
from .plotting import Fig, Ax, Plot
__all__ = ['Proj', 'Model', 'Run', 'Var', 'Units', 'Constant', 'Region',
'Fig', 'Ax', 'Plot', 'units', 'calc', 'constants', 'utils', 'io',
'plotting']
|
...
from .__config__ import (user_path, LAT_STR, LON_STR, PFULL_STR, PHALF_STR,
PLEVEL_STR, TIME_STR, TIME_STR_IDEALIZED)
from . import constants
...
|
e8092ec82ff8ee9c0104b507751e45555c08685b
|
tests/tests.py
|
tests/tests.py
|
from __future__ import unicode_literals, absolute_import
from django.test import TestCase
from tags.models import Tag
from .models import Food
class TestFoodModel(TestCase):
def test_create_food(self):
food = Food.objects.create(
name="nacho",
tags="tortilla chips")
self.assertTrue(food)
self.assertEqual(Tag.objects.all()[0].name, "tortilla chips")
self.assertEqual(Tag.objects.all()[0].slug, "tortilla-chips")
def test_create_two_tags(self):
food = Food.objects.create(
name="nacho",
tags="tortilla chips, salsa")
tags = Tag.objects.all()
self.assertTrue(food)
self.assertEqual(len(tags), 2)
self.assertEqual(tags[1].name, "tortilla chips")
self.assertEqual(tags[1].slug, "tortilla-chips")
self.assertEqual(tags[0].name, " salsa")
self.assertEqual(tags[0].slug, "salsa")
|
from __future__ import unicode_literals, absolute_import
from django.test import TestCase
from tags.models import Tag
from .models import Food
class TestFoodModel(TestCase):
def test_create_food(self):
food = Food.objects.create(
name="nacho",
tags="tortilla chips")
self.assertTrue(food)
self.assertEqual(Tag.objects.all()[0].name, "tortilla chips")
self.assertEqual(Tag.objects.all()[0].slug, "tortilla-chips")
def test_create_two_tags(self):
food = Food.objects.create(
name="nacho",
tags="tortilla chips, salsa")
tags = Tag.objects.all()
self.assertTrue(food)
self.assertEqual(len(tags), 2)
self.assertEqual(tags[1].slug, "tortilla-chips")
self.assertEqual(tags[0].slug, "salsa")
|
Fix test on python 3.3
|
Fix test on python 3.3
|
Python
|
mit
|
avelino/django-tags
|
from __future__ import unicode_literals, absolute_import
from django.test import TestCase
from tags.models import Tag
from .models import Food
class TestFoodModel(TestCase):
def test_create_food(self):
food = Food.objects.create(
name="nacho",
tags="tortilla chips")
self.assertTrue(food)
self.assertEqual(Tag.objects.all()[0].name, "tortilla chips")
self.assertEqual(Tag.objects.all()[0].slug, "tortilla-chips")
def test_create_two_tags(self):
food = Food.objects.create(
name="nacho",
tags="tortilla chips, salsa")
tags = Tag.objects.all()
self.assertTrue(food)
self.assertEqual(len(tags), 2)
- self.assertEqual(tags[1].name, "tortilla chips")
self.assertEqual(tags[1].slug, "tortilla-chips")
- self.assertEqual(tags[0].name, " salsa")
self.assertEqual(tags[0].slug, "salsa")
|
Fix test on python 3.3
|
## Code Before:
from __future__ import unicode_literals, absolute_import
from django.test import TestCase
from tags.models import Tag
from .models import Food
class TestFoodModel(TestCase):
def test_create_food(self):
food = Food.objects.create(
name="nacho",
tags="tortilla chips")
self.assertTrue(food)
self.assertEqual(Tag.objects.all()[0].name, "tortilla chips")
self.assertEqual(Tag.objects.all()[0].slug, "tortilla-chips")
def test_create_two_tags(self):
food = Food.objects.create(
name="nacho",
tags="tortilla chips, salsa")
tags = Tag.objects.all()
self.assertTrue(food)
self.assertEqual(len(tags), 2)
self.assertEqual(tags[1].name, "tortilla chips")
self.assertEqual(tags[1].slug, "tortilla-chips")
self.assertEqual(tags[0].name, " salsa")
self.assertEqual(tags[0].slug, "salsa")
## Instruction:
Fix test on python 3.3
## Code After:
from __future__ import unicode_literals, absolute_import
from django.test import TestCase
from tags.models import Tag
from .models import Food
class TestFoodModel(TestCase):
def test_create_food(self):
food = Food.objects.create(
name="nacho",
tags="tortilla chips")
self.assertTrue(food)
self.assertEqual(Tag.objects.all()[0].name, "tortilla chips")
self.assertEqual(Tag.objects.all()[0].slug, "tortilla-chips")
def test_create_two_tags(self):
food = Food.objects.create(
name="nacho",
tags="tortilla chips, salsa")
tags = Tag.objects.all()
self.assertTrue(food)
self.assertEqual(len(tags), 2)
self.assertEqual(tags[1].slug, "tortilla-chips")
self.assertEqual(tags[0].slug, "salsa")
|
// ... existing code ...
self.assertEqual(len(tags), 2)
self.assertEqual(tags[1].slug, "tortilla-chips")
self.assertEqual(tags[0].slug, "salsa")
// ... rest of the code ...
|
74eb842870424a22334fee35881f1b6c877da8e6
|
scot/backend_mne.py
|
scot/backend_mne.py
|
"""Use mne-python routines as backend."""
from __future__ import absolute_import
import scipy as sp
from . import datatools
from . import backend
from . import backend_builtin as builtin
def generate():
from mne.preprocessing.infomax_ import infomax
def wrapper_infomax(data, random_state=None):
"""Call Infomax for ICA calculation."""
u = infomax(datatools.cat_trials(data).T, extended=True,
random_state=random_state).T
m = sp.linalg.pinv(u)
return m, u
def wrapper_csp(x, cl, reducedim):
"""Call MNE CSP algorithm."""
from mne.decoding import CSP
csp = CSP(n_components=reducedim, cov_est="epoch")
csp.fit(x, cl)
c, d = csp.filters_.T[:, :reducedim], csp.patterns_[:reducedim, :]
y = datatools.dot_special(c.T, x)
return c, d, y
backend = builtin.generate()
backend.update({'ica': wrapper_infomax, 'csp': wrapper_csp})
return backend
backend.register('mne', generate)
|
"""Use mne-python routines as backend."""
from __future__ import absolute_import
import scipy as sp
from . import datatools
from . import backend
from . import backend_builtin as builtin
def generate():
from mne.preprocessing.infomax_ import infomax
def wrapper_infomax(data, random_state=None):
"""Call Infomax for ICA calculation."""
u = infomax(datatools.cat_trials(data).T, extended=True,
random_state=random_state).T
m = sp.linalg.pinv(u)
return m, u
def wrapper_csp(x, cl, reducedim):
"""Call MNE CSP algorithm."""
from mne.decoding import CSP
csp = CSP(n_components=reducedim, cov_est="epoch", reg="ledoit_wolf")
csp.fit(x, cl)
c, d = csp.filters_.T[:, :reducedim], csp.patterns_[:reducedim, :]
y = datatools.dot_special(c.T, x)
return c, d, y
backend = builtin.generate()
backend.update({'ica': wrapper_infomax, 'csp': wrapper_csp})
return backend
backend.register('mne', generate)
|
Use regularized covariance in CSP by default
|
Use regularized covariance in CSP by default
|
Python
|
mit
|
scot-dev/scot,cbrnr/scot,mbillingr/SCoT,cbrnr/scot,scot-dev/scot,cle1109/scot,cle1109/scot,mbillingr/SCoT
|
"""Use mne-python routines as backend."""
from __future__ import absolute_import
import scipy as sp
from . import datatools
from . import backend
from . import backend_builtin as builtin
def generate():
from mne.preprocessing.infomax_ import infomax
def wrapper_infomax(data, random_state=None):
"""Call Infomax for ICA calculation."""
u = infomax(datatools.cat_trials(data).T, extended=True,
random_state=random_state).T
m = sp.linalg.pinv(u)
return m, u
def wrapper_csp(x, cl, reducedim):
"""Call MNE CSP algorithm."""
from mne.decoding import CSP
- csp = CSP(n_components=reducedim, cov_est="epoch")
+ csp = CSP(n_components=reducedim, cov_est="epoch", reg="ledoit_wolf")
csp.fit(x, cl)
c, d = csp.filters_.T[:, :reducedim], csp.patterns_[:reducedim, :]
y = datatools.dot_special(c.T, x)
return c, d, y
backend = builtin.generate()
backend.update({'ica': wrapper_infomax, 'csp': wrapper_csp})
return backend
backend.register('mne', generate)
|
Use regularized covariance in CSP by default
|
## Code Before:
"""Use mne-python routines as backend."""
from __future__ import absolute_import
import scipy as sp
from . import datatools
from . import backend
from . import backend_builtin as builtin
def generate():
from mne.preprocessing.infomax_ import infomax
def wrapper_infomax(data, random_state=None):
"""Call Infomax for ICA calculation."""
u = infomax(datatools.cat_trials(data).T, extended=True,
random_state=random_state).T
m = sp.linalg.pinv(u)
return m, u
def wrapper_csp(x, cl, reducedim):
"""Call MNE CSP algorithm."""
from mne.decoding import CSP
csp = CSP(n_components=reducedim, cov_est="epoch")
csp.fit(x, cl)
c, d = csp.filters_.T[:, :reducedim], csp.patterns_[:reducedim, :]
y = datatools.dot_special(c.T, x)
return c, d, y
backend = builtin.generate()
backend.update({'ica': wrapper_infomax, 'csp': wrapper_csp})
return backend
backend.register('mne', generate)
## Instruction:
Use regularized covariance in CSP by default
## Code After:
"""Use mne-python routines as backend."""
from __future__ import absolute_import
import scipy as sp
from . import datatools
from . import backend
from . import backend_builtin as builtin
def generate():
from mne.preprocessing.infomax_ import infomax
def wrapper_infomax(data, random_state=None):
"""Call Infomax for ICA calculation."""
u = infomax(datatools.cat_trials(data).T, extended=True,
random_state=random_state).T
m = sp.linalg.pinv(u)
return m, u
def wrapper_csp(x, cl, reducedim):
"""Call MNE CSP algorithm."""
from mne.decoding import CSP
csp = CSP(n_components=reducedim, cov_est="epoch", reg="ledoit_wolf")
csp.fit(x, cl)
c, d = csp.filters_.T[:, :reducedim], csp.patterns_[:reducedim, :]
y = datatools.dot_special(c.T, x)
return c, d, y
backend = builtin.generate()
backend.update({'ica': wrapper_infomax, 'csp': wrapper_csp})
return backend
backend.register('mne', generate)
|
# ... existing code ...
from mne.decoding import CSP
csp = CSP(n_components=reducedim, cov_est="epoch", reg="ledoit_wolf")
csp.fit(x, cl)
# ... rest of the code ...
|
0fa33bb58d6b042e79c52a6f33454140a7150f64
|
lithium/blog/views.py
|
lithium/blog/views.py
|
from lithium.blog.models import Post
def decorator(request, view, author=None, tag=None, *args, **kwargs):
"""
A view decotator to change the queryset depending on whether
a user may read private posts.
"""
if request.user.has_perm('blog.can_read_private'):
kwargs['queryset'] = Post.on_site.all(allow_private=True)
if author:
kwargs['queryset'] = kwargs['queryset'].filter(author__username=author)
if tag:
kwargs['queryset'] = kwargs['queryset'].filter(category__slug=tag)
return view(request, *args, **kwargs)
|
from lithium.blog.models import Post
def decorator(request, view, author=None, tag=None, *args, **kwargs):
"""
A view decotator to change the queryset depending on whether
a user may read private posts.
"""
if request.user.has_perm('blog.can_read_private'):
kwargs['queryset'] = Post.on_site.all(allow_private=True)
kwargs['allow_future'] = True
if author:
kwargs['queryset'] = kwargs['queryset'].filter(author__username=author)
if tag:
kwargs['queryset'] = kwargs['queryset'].filter(category__slug=tag)
return view(request, *args, **kwargs)
|
Allow users with the permission 'blog.can_read_private' to see posts from the future.
|
Allow users with the permission 'blog.can_read_private' to see posts from the future.
|
Python
|
bsd-2-clause
|
kylef/lithium
|
from lithium.blog.models import Post
def decorator(request, view, author=None, tag=None, *args, **kwargs):
"""
A view decotator to change the queryset depending on whether
a user may read private posts.
"""
if request.user.has_perm('blog.can_read_private'):
kwargs['queryset'] = Post.on_site.all(allow_private=True)
+ kwargs['allow_future'] = True
if author:
kwargs['queryset'] = kwargs['queryset'].filter(author__username=author)
if tag:
kwargs['queryset'] = kwargs['queryset'].filter(category__slug=tag)
return view(request, *args, **kwargs)
|
Allow users with the permission 'blog.can_read_private' to see posts from the future.
|
## Code Before:
from lithium.blog.models import Post
def decorator(request, view, author=None, tag=None, *args, **kwargs):
"""
A view decotator to change the queryset depending on whether
a user may read private posts.
"""
if request.user.has_perm('blog.can_read_private'):
kwargs['queryset'] = Post.on_site.all(allow_private=True)
if author:
kwargs['queryset'] = kwargs['queryset'].filter(author__username=author)
if tag:
kwargs['queryset'] = kwargs['queryset'].filter(category__slug=tag)
return view(request, *args, **kwargs)
## Instruction:
Allow users with the permission 'blog.can_read_private' to see posts from the future.
## Code After:
from lithium.blog.models import Post
def decorator(request, view, author=None, tag=None, *args, **kwargs):
"""
A view decotator to change the queryset depending on whether
a user may read private posts.
"""
if request.user.has_perm('blog.can_read_private'):
kwargs['queryset'] = Post.on_site.all(allow_private=True)
kwargs['allow_future'] = True
if author:
kwargs['queryset'] = kwargs['queryset'].filter(author__username=author)
if tag:
kwargs['queryset'] = kwargs['queryset'].filter(category__slug=tag)
return view(request, *args, **kwargs)
|
...
kwargs['queryset'] = Post.on_site.all(allow_private=True)
kwargs['allow_future'] = True
...
|
d5d359c5ec0f1735e97355839f1a12c6ea45c460
|
polygamy/pygit2_git.py
|
polygamy/pygit2_git.py
|
from __future__ import absolute_import
import pygit2
from .base_git import NoSuchRemote
from .plain_git import PlainGit
class Pygit2Git(PlainGit):
@staticmethod
def is_on_branch(path):
repo = pygit2.Repository(path)
return not (repo.head_is_detached or repo.head_is_unborn)
@staticmethod
def get_remote_url(path, remote_name):
repo = pygit2.Repository(path)
for remote in repo.remotes:
if remote.name == remote_name:
break
else:
raise NoSuchRemote()
return remote.url
|
from __future__ import absolute_import
import pygit2
from .base_git import NoSuchRemote
from .plain_git import PlainGit
class Pygit2Git(PlainGit):
@staticmethod
def is_on_branch(path):
repo = pygit2.Repository(path)
return not (repo.head_is_detached or repo.head_is_unborn)
@staticmethod
def get_remote_url(path, remote_name):
repo = pygit2.Repository(path)
for remote in repo.remotes:
if remote.name == remote_name:
break
else:
raise NoSuchRemote()
return remote.url
@staticmethod
def add_remote(path, remote_name, remote_url):
repo = pygit2.Repository(path)
repo.create_remote(remote_name, remote_url)
|
Add add_remote to pygit2 implementation
|
Add add_remote to pygit2 implementation
|
Python
|
bsd-3-clause
|
solarnz/polygamy,solarnz/polygamy
|
from __future__ import absolute_import
import pygit2
from .base_git import NoSuchRemote
from .plain_git import PlainGit
class Pygit2Git(PlainGit):
@staticmethod
def is_on_branch(path):
repo = pygit2.Repository(path)
return not (repo.head_is_detached or repo.head_is_unborn)
@staticmethod
def get_remote_url(path, remote_name):
repo = pygit2.Repository(path)
for remote in repo.remotes:
if remote.name == remote_name:
break
else:
raise NoSuchRemote()
return remote.url
+ @staticmethod
+ def add_remote(path, remote_name, remote_url):
+ repo = pygit2.Repository(path)
+ repo.create_remote(remote_name, remote_url)
+
|
Add add_remote to pygit2 implementation
|
## Code Before:
from __future__ import absolute_import
import pygit2
from .base_git import NoSuchRemote
from .plain_git import PlainGit
class Pygit2Git(PlainGit):
@staticmethod
def is_on_branch(path):
repo = pygit2.Repository(path)
return not (repo.head_is_detached or repo.head_is_unborn)
@staticmethod
def get_remote_url(path, remote_name):
repo = pygit2.Repository(path)
for remote in repo.remotes:
if remote.name == remote_name:
break
else:
raise NoSuchRemote()
return remote.url
## Instruction:
Add add_remote to pygit2 implementation
## Code After:
from __future__ import absolute_import
import pygit2
from .base_git import NoSuchRemote
from .plain_git import PlainGit
class Pygit2Git(PlainGit):
@staticmethod
def is_on_branch(path):
repo = pygit2.Repository(path)
return not (repo.head_is_detached or repo.head_is_unborn)
@staticmethod
def get_remote_url(path, remote_name):
repo = pygit2.Repository(path)
for remote in repo.remotes:
if remote.name == remote_name:
break
else:
raise NoSuchRemote()
return remote.url
@staticmethod
def add_remote(path, remote_name, remote_url):
repo = pygit2.Repository(path)
repo.create_remote(remote_name, remote_url)
|
...
return remote.url
@staticmethod
def add_remote(path, remote_name, remote_url):
repo = pygit2.Repository(path)
repo.create_remote(remote_name, remote_url)
...
|
0933e4c671ca1297378b2ad388933e11265321d0
|
traptor/dd_monitoring.py
|
traptor/dd_monitoring.py
|
import os
from datadog import initialize
traptor_type = os.environ['TRAPTOR_TYPE']
traptor_id = os.environ['TRAPTOR_ID']
DEFAULT_TAGS = [
'traptor_type:{}'.format(traptor_type),
'traptor_id:{}'.format(traptor_id),
]
options = {
'statsd_host': os.environ['STATSD_HOST_IP'],
}
initialize(**options)
from datadog import statsd
DATADOG_METRICS = {
'tweet_process_success': 'traptor.src.tweet_process.success',
'tweet_process_failure': 'traptor.src.tweet_process.failure',
'tweet_to_kafka_success': 'traptor.src.tweet_to_kafka.success',
'tweet_to_kafka_failure': 'traptor.src.tweet_to_kafka.failure',
}
def increment(metric_name):
return statsd.increment(DATADOG_METRICS[metric_name], tags=DEFAULT_TAGS)
def gauge(metric_name, value):
return statsd.gauge(DATADOG_METRICS[metric_name], value, tags=DEFAULT_TAGS)
|
import os
from datadog import initialize
traptor_type = os.getenv('TRAPTOR_TYPE', 'track')
traptor_id = os.getenv('TRAPTOR_ID', '0')
DEFAULT_TAGS = [
'traptor_type:{}'.format(traptor_type),
'traptor_id:{}'.format(traptor_id),
]
options = {
'statsd_host': os.getenv('STATSD_HOST_IP', '127.0.0.1')
}
initialize(**options)
from datadog import statsd
DATADOG_METRICS = {
'tweet_process_success': 'traptor.src.tweet_process.success',
'tweet_process_failure': 'traptor.src.tweet_process.failure',
'tweet_to_kafka_success': 'traptor.src.tweet_to_kafka.success',
'tweet_to_kafka_failure': 'traptor.src.tweet_to_kafka.failure',
}
def increment(metric_name):
return statsd.increment(DATADOG_METRICS[metric_name], tags=DEFAULT_TAGS)
def gauge(metric_name, value):
return statsd.gauge(DATADOG_METRICS[metric_name], value, tags=DEFAULT_TAGS)
|
Use getenv instead of environment dict
|
Use getenv instead of environment dict
|
Python
|
mit
|
istresearch/traptor,istresearch/traptor
|
import os
from datadog import initialize
- traptor_type = os.environ['TRAPTOR_TYPE']
+ traptor_type = os.getenv('TRAPTOR_TYPE', 'track')
- traptor_id = os.environ['TRAPTOR_ID']
+ traptor_id = os.getenv('TRAPTOR_ID', '0')
DEFAULT_TAGS = [
'traptor_type:{}'.format(traptor_type),
'traptor_id:{}'.format(traptor_id),
]
options = {
- 'statsd_host': os.environ['STATSD_HOST_IP'],
+ 'statsd_host': os.getenv('STATSD_HOST_IP', '127.0.0.1')
}
initialize(**options)
from datadog import statsd
DATADOG_METRICS = {
'tweet_process_success': 'traptor.src.tweet_process.success',
'tweet_process_failure': 'traptor.src.tweet_process.failure',
'tweet_to_kafka_success': 'traptor.src.tweet_to_kafka.success',
'tweet_to_kafka_failure': 'traptor.src.tweet_to_kafka.failure',
}
def increment(metric_name):
return statsd.increment(DATADOG_METRICS[metric_name], tags=DEFAULT_TAGS)
def gauge(metric_name, value):
return statsd.gauge(DATADOG_METRICS[metric_name], value, tags=DEFAULT_TAGS)
|
Use getenv instead of environment dict
|
## Code Before:
import os
from datadog import initialize
traptor_type = os.environ['TRAPTOR_TYPE']
traptor_id = os.environ['TRAPTOR_ID']
DEFAULT_TAGS = [
'traptor_type:{}'.format(traptor_type),
'traptor_id:{}'.format(traptor_id),
]
options = {
'statsd_host': os.environ['STATSD_HOST_IP'],
}
initialize(**options)
from datadog import statsd
DATADOG_METRICS = {
'tweet_process_success': 'traptor.src.tweet_process.success',
'tweet_process_failure': 'traptor.src.tweet_process.failure',
'tweet_to_kafka_success': 'traptor.src.tweet_to_kafka.success',
'tweet_to_kafka_failure': 'traptor.src.tweet_to_kafka.failure',
}
def increment(metric_name):
return statsd.increment(DATADOG_METRICS[metric_name], tags=DEFAULT_TAGS)
def gauge(metric_name, value):
return statsd.gauge(DATADOG_METRICS[metric_name], value, tags=DEFAULT_TAGS)
## Instruction:
Use getenv instead of environment dict
## Code After:
import os
from datadog import initialize
traptor_type = os.getenv('TRAPTOR_TYPE', 'track')
traptor_id = os.getenv('TRAPTOR_ID', '0')
DEFAULT_TAGS = [
'traptor_type:{}'.format(traptor_type),
'traptor_id:{}'.format(traptor_id),
]
options = {
'statsd_host': os.getenv('STATSD_HOST_IP', '127.0.0.1')
}
initialize(**options)
from datadog import statsd
DATADOG_METRICS = {
'tweet_process_success': 'traptor.src.tweet_process.success',
'tweet_process_failure': 'traptor.src.tweet_process.failure',
'tweet_to_kafka_success': 'traptor.src.tweet_to_kafka.success',
'tweet_to_kafka_failure': 'traptor.src.tweet_to_kafka.failure',
}
def increment(metric_name):
return statsd.increment(DATADOG_METRICS[metric_name], tags=DEFAULT_TAGS)
def gauge(metric_name, value):
return statsd.gauge(DATADOG_METRICS[metric_name], value, tags=DEFAULT_TAGS)
|
// ... existing code ...
traptor_type = os.getenv('TRAPTOR_TYPE', 'track')
traptor_id = os.getenv('TRAPTOR_ID', '0')
// ... modified code ...
options = {
'statsd_host': os.getenv('STATSD_HOST_IP', '127.0.0.1')
}
// ... rest of the code ...
|
18bf9dd5e1e054d0c260959a8379f331940e167f
|
online_status/__init__.py
|
online_status/__init__.py
|
VERSION = (0, 1, 0)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version = '%s.%s' % (version, VERSION[2])
if VERSION[3:] == ('alpha', 0):
version = '%s pre-alpha' % version
else:
if VERSION[3] != 'final':
version = "%s %s" % (version, VERSION[3])
if VERSION[4] != 0:
version = '%s %s' % (version, VERSION[4])
return version
|
VERSION = (0, 1, 0)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version = '%s.%s' % (version, VERSION[2])
return version
|
Fix 'index out of bound' issue
|
Fix 'index out of bound' issue
|
Python
|
unlicense
|
hovel/django-online-status,hovel/django-online-status
|
VERSION = (0, 1, 0)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version = '%s.%s' % (version, VERSION[2])
- if VERSION[3:] == ('alpha', 0):
- version = '%s pre-alpha' % version
- else:
- if VERSION[3] != 'final':
- version = "%s %s" % (version, VERSION[3])
- if VERSION[4] != 0:
- version = '%s %s' % (version, VERSION[4])
return version
|
Fix 'index out of bound' issue
|
## Code Before:
VERSION = (0, 1, 0)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version = '%s.%s' % (version, VERSION[2])
if VERSION[3:] == ('alpha', 0):
version = '%s pre-alpha' % version
else:
if VERSION[3] != 'final':
version = "%s %s" % (version, VERSION[3])
if VERSION[4] != 0:
version = '%s %s' % (version, VERSION[4])
return version
## Instruction:
Fix 'index out of bound' issue
## Code After:
VERSION = (0, 1, 0)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version = '%s.%s' % (version, VERSION[2])
return version
|
// ... existing code ...
version = '%s.%s' % (version, VERSION[2])
return version
// ... rest of the code ...
|
183aacf12405eec38ba8b2193f8f89904d415c4a
|
yagocd/resources/base.py
|
yagocd/resources/base.py
|
from easydict import EasyDict
class Base(object):
def __init__(self, session, data):
self._session = session
self._data = EasyDict(data)
self.base_api = self._session.base_api()
@property
def data(self):
return self._data
if __name__ == '__main__':
pass
|
from easydict import EasyDict
class Base(object):
def __init__(self, session, data):
self._session = session
self._data = EasyDict(data)
self.base_api = self._session.base_api()
@property
def data(self):
return self._data
def __str__(self):
return self.data.__str__()
def __repr__(self):
return self.data.__repr__()
if __name__ == '__main__':
pass
|
Return internal data for string representation.
|
Return internal data for string representation.
|
Python
|
isc
|
grundic/yagocd,grundic/yagocd
|
from easydict import EasyDict
class Base(object):
def __init__(self, session, data):
self._session = session
self._data = EasyDict(data)
self.base_api = self._session.base_api()
@property
def data(self):
return self._data
+ def __str__(self):
+ return self.data.__str__()
+
+ def __repr__(self):
+ return self.data.__repr__()
+
if __name__ == '__main__':
pass
|
Return internal data for string representation.
|
## Code Before:
from easydict import EasyDict
class Base(object):
def __init__(self, session, data):
self._session = session
self._data = EasyDict(data)
self.base_api = self._session.base_api()
@property
def data(self):
return self._data
if __name__ == '__main__':
pass
## Instruction:
Return internal data for string representation.
## Code After:
from easydict import EasyDict
class Base(object):
def __init__(self, session, data):
self._session = session
self._data = EasyDict(data)
self.base_api = self._session.base_api()
@property
def data(self):
return self._data
def __str__(self):
return self.data.__str__()
def __repr__(self):
return self.data.__repr__()
if __name__ == '__main__':
pass
|
// ... existing code ...
def __str__(self):
return self.data.__str__()
def __repr__(self):
return self.data.__repr__()
// ... rest of the code ...
|
88995b5e2bcd6f3e21d8810a97f3c38cc84e8189
|
pulldb/subscriptions.py
|
pulldb/subscriptions.py
|
from google.appengine.ext import ndb
class Subscription(ndb.Model):
'''Subscription object in datastore.
Holds subscription data. Parent should be User.
'''
start_date = ndb.DateProperty()
volume = ndb.KeyProperty(kind='Volume')
|
from google.appengine.ext import ndb
from pulldb.users import user_key
class Subscription(ndb.Model):
'''Subscription object in datastore.
Holds subscription data. Parent should be User.
'''
start_date = ndb.DateProperty()
volume = ndb.KeyProperty(kind='Volume')
def subscription_key(volume_key, create=False):
key = None
user = user_key()
subscription = Subscription.query(Subscription.volume==volume_key,
ancestor=user).get()
if subscription:
key = subscription.key
elif create:
subscription = Subscription(parent=user,
volume=volume_key)
subscription.put()
key = user.key
return key
|
Add basic subscription fetcher / creator
|
Add basic subscription fetcher / creator
|
Python
|
mit
|
xchewtoyx/pulldb
|
from google.appengine.ext import ndb
+
+ from pulldb.users import user_key
class Subscription(ndb.Model):
'''Subscription object in datastore.
Holds subscription data. Parent should be User.
'''
start_date = ndb.DateProperty()
volume = ndb.KeyProperty(kind='Volume')
+ def subscription_key(volume_key, create=False):
+ key = None
+ user = user_key()
+ subscription = Subscription.query(Subscription.volume==volume_key,
+ ancestor=user).get()
+ if subscription:
+ key = subscription.key
+ elif create:
+ subscription = Subscription(parent=user,
+ volume=volume_key)
+ subscription.put()
+ key = user.key
+ return key
+
+
+
|
Add basic subscription fetcher / creator
|
## Code Before:
from google.appengine.ext import ndb
class Subscription(ndb.Model):
'''Subscription object in datastore.
Holds subscription data. Parent should be User.
'''
start_date = ndb.DateProperty()
volume = ndb.KeyProperty(kind='Volume')
## Instruction:
Add basic subscription fetcher / creator
## Code After:
from google.appengine.ext import ndb
from pulldb.users import user_key
class Subscription(ndb.Model):
'''Subscription object in datastore.
Holds subscription data. Parent should be User.
'''
start_date = ndb.DateProperty()
volume = ndb.KeyProperty(kind='Volume')
def subscription_key(volume_key, create=False):
key = None
user = user_key()
subscription = Subscription.query(Subscription.volume==volume_key,
ancestor=user).get()
if subscription:
key = subscription.key
elif create:
subscription = Subscription(parent=user,
volume=volume_key)
subscription.put()
key = user.key
return key
|
// ... existing code ...
from google.appengine.ext import ndb
from pulldb.users import user_key
// ... modified code ...
volume = ndb.KeyProperty(kind='Volume')
def subscription_key(volume_key, create=False):
key = None
user = user_key()
subscription = Subscription.query(Subscription.volume==volume_key,
ancestor=user).get()
if subscription:
key = subscription.key
elif create:
subscription = Subscription(parent=user,
volume=volume_key)
subscription.put()
key = user.key
return key
// ... rest of the code ...
|
5ee77b7294af840a47e11a8a9a3da109e33f4a63
|
lib/stats_backend.py
|
lib/stats_backend.py
|
import platform
from stats_file_backend import StatsFileBackend
class StatsBackend:
"""
This is a class to manage the Stats backend.
"""
def __init__(self, options={}):
if options == {}:
if platform.system() == "Darwin": # For my local dev I need this hack
options = {"db_path":"/tmp/stats.json"}
else:
options = {"db_path":"/var/lib/omniwallet/www/stats.json"}
self.engine = StatsFileBackend(options)
def put(self, key, val):
self.engine.put(key, val)
def increment(self, key):
val = self.engine.get(key)
if val == None:
val = 0
val += 1
self.engine.put(key, val)
def get(self, val):
return self.engine.get(val)
stats = StatsBackend()
stats.increment("amount_of_transactions")
|
import platform
from stats_file_backend import StatsFileBackend
class StatsBackend:
"""
This is a class to manage the Stats backend.
"""
def __init__(self, options={}):
if options == {}:
if platform.system() == "Darwin": # For my local dev I need this hack
options = {"db_path":"/tmp/stats.json"}
else:
options = {"db_path":"/var/lib/omniwallet/www/stats.json"}
self.engine = StatsFileBackend(options)
def put(self, key, val):
self.engine.put(key, val)
def increment(self, key):
val = self.engine.get(key)
if val == None:
val = 0
val += 1
self.engine.put(key, val)
def get(self, val):
return self.engine.get(val)
|
Remove some test code that got left behind
|
Remove some test code that got left behind
|
Python
|
agpl-3.0
|
dexX7/omniwallet,habibmasuro/omniwallet,habibmasuro/omniwallet,achamely/omniwallet,habibmasuro/omniwallet,VukDukic/omniwallet,OmniLayer/omniwallet,dexX7/omniwallet,achamely/omniwallet,OmniLayer/omniwallet,arowser/omniwallet,OmniLayer/omniwallet,arowser/omniwallet,curtislacy/omniwallet,ripper234/omniwallet,FuzzyBearBTC/omniwallet,dexX7/omniwallet,arowser/omniwallet,FuzzyBearBTC/omniwallet,Nevtep/omniwallet,Nevtep/omniwallet,curtislacy/omniwallet,achamely/omniwallet,ripper234/omniwallet,curtislacy/omniwallet,VukDukic/omniwallet,ripper234/omniwallet,Nevtep/omniwallet,OmniLayer/omniwallet,FuzzyBearBTC/omniwallet,VukDukic/omniwallet,Nevtep/omniwallet,habibmasuro/omniwallet,achamely/omniwallet
|
import platform
from stats_file_backend import StatsFileBackend
class StatsBackend:
"""
This is a class to manage the Stats backend.
"""
def __init__(self, options={}):
if options == {}:
if platform.system() == "Darwin": # For my local dev I need this hack
options = {"db_path":"/tmp/stats.json"}
else:
options = {"db_path":"/var/lib/omniwallet/www/stats.json"}
self.engine = StatsFileBackend(options)
def put(self, key, val):
self.engine.put(key, val)
def increment(self, key):
val = self.engine.get(key)
if val == None:
val = 0
val += 1
self.engine.put(key, val)
def get(self, val):
return self.engine.get(val)
-
- stats = StatsBackend()
- stats.increment("amount_of_transactions")
-
|
Remove some test code that got left behind
|
## Code Before:
import platform
from stats_file_backend import StatsFileBackend
class StatsBackend:
"""
This is a class to manage the Stats backend.
"""
def __init__(self, options={}):
if options == {}:
if platform.system() == "Darwin": # For my local dev I need this hack
options = {"db_path":"/tmp/stats.json"}
else:
options = {"db_path":"/var/lib/omniwallet/www/stats.json"}
self.engine = StatsFileBackend(options)
def put(self, key, val):
self.engine.put(key, val)
def increment(self, key):
val = self.engine.get(key)
if val == None:
val = 0
val += 1
self.engine.put(key, val)
def get(self, val):
return self.engine.get(val)
stats = StatsBackend()
stats.increment("amount_of_transactions")
## Instruction:
Remove some test code that got left behind
## Code After:
import platform
from stats_file_backend import StatsFileBackend
class StatsBackend:
"""
This is a class to manage the Stats backend.
"""
def __init__(self, options={}):
if options == {}:
if platform.system() == "Darwin": # For my local dev I need this hack
options = {"db_path":"/tmp/stats.json"}
else:
options = {"db_path":"/var/lib/omniwallet/www/stats.json"}
self.engine = StatsFileBackend(options)
def put(self, key, val):
self.engine.put(key, val)
def increment(self, key):
val = self.engine.get(key)
if val == None:
val = 0
val += 1
self.engine.put(key, val)
def get(self, val):
return self.engine.get(val)
|
// ... existing code ...
return self.engine.get(val)
// ... rest of the code ...
|
191e62a2547c4d3d013cb4c68fed60f1619fe82c
|
pyheufybot/modules/say.py
|
pyheufybot/modules/say.py
|
from pyheufybot.module_interface import Module, ModulePriority, ModuleType
class ModuleSpawner(Module):
def __init__(self, bot):
self.bot = bot
self.name = "Say"
self.trigger = "say"
self.moduleType = ModuleType.COMMAND
self.modulePriotity = ModulePriority.NORMAL
self.messageTypes = ["PRIVMSG"]
self.helpText = "Usage: say <message> | Makes the bot say the given line."
def execute(self, message):
if len(message.params) == 1:
self.bot.msg(message.replyTo, "Say what?")
else:
self.bot.msg(message.replyTo, " ".join(message.params[1:]))
return True
|
from pyheufybot.module_interface import Module, ModulePriority, ModuleType
class ModuleSpawner(Module):
def __init__(self, bot):
self.bot = bot
self.name = "Say"
self.trigger = "say|sayremote"
self.moduleType = ModuleType.COMMAND
self.modulePriority = ModulePriority.NORMAL
self.messageTypes = ["PRIVMSG"]
self.helpText = "Usage: say <message>/sayremote <target> <message> | Makes the bot say the given line."
def execute(self, message):
if message.params[0].lower() == "say":
if len(message.params) == 1:
self.bot.msg(message.replyTo, "Say what?")
else:
self.bot.msg(message.replyTo, " ".join(message.params[1:]))
elif message.params[0].lower() == "sayremote":
if len(message.params) < 3:
self.bot.msg(message.replyTo, "Say what?")
else:
self.bot.msg(message.params[1], " ".join(message.params[2:]))
return True
|
Add a remote option to Say
|
Add a remote option to Say
|
Python
|
mit
|
Heufneutje/PyHeufyBot,Heufneutje/PyHeufyBot
|
from pyheufybot.module_interface import Module, ModulePriority, ModuleType
class ModuleSpawner(Module):
def __init__(self, bot):
self.bot = bot
self.name = "Say"
- self.trigger = "say"
+ self.trigger = "say|sayremote"
self.moduleType = ModuleType.COMMAND
- self.modulePriotity = ModulePriority.NORMAL
+ self.modulePriority = ModulePriority.NORMAL
self.messageTypes = ["PRIVMSG"]
- self.helpText = "Usage: say <message> | Makes the bot say the given line."
+ self.helpText = "Usage: say <message>/sayremote <target> <message> | Makes the bot say the given line."
def execute(self, message):
+ if message.params[0].lower() == "say":
- if len(message.params) == 1:
+ if len(message.params) == 1:
- self.bot.msg(message.replyTo, "Say what?")
+ self.bot.msg(message.replyTo, "Say what?")
- else:
+ else:
- self.bot.msg(message.replyTo, " ".join(message.params[1:]))
+ self.bot.msg(message.replyTo, " ".join(message.params[1:]))
+ elif message.params[0].lower() == "sayremote":
+ if len(message.params) < 3:
+ self.bot.msg(message.replyTo, "Say what?")
+ else:
+ self.bot.msg(message.params[1], " ".join(message.params[2:]))
return True
|
Add a remote option to Say
|
## Code Before:
from pyheufybot.module_interface import Module, ModulePriority, ModuleType
class ModuleSpawner(Module):
def __init__(self, bot):
self.bot = bot
self.name = "Say"
self.trigger = "say"
self.moduleType = ModuleType.COMMAND
self.modulePriotity = ModulePriority.NORMAL
self.messageTypes = ["PRIVMSG"]
self.helpText = "Usage: say <message> | Makes the bot say the given line."
def execute(self, message):
if len(message.params) == 1:
self.bot.msg(message.replyTo, "Say what?")
else:
self.bot.msg(message.replyTo, " ".join(message.params[1:]))
return True
## Instruction:
Add a remote option to Say
## Code After:
from pyheufybot.module_interface import Module, ModulePriority, ModuleType
class ModuleSpawner(Module):
def __init__(self, bot):
self.bot = bot
self.name = "Say"
self.trigger = "say|sayremote"
self.moduleType = ModuleType.COMMAND
self.modulePriority = ModulePriority.NORMAL
self.messageTypes = ["PRIVMSG"]
self.helpText = "Usage: say <message>/sayremote <target> <message> | Makes the bot say the given line."
def execute(self, message):
if message.params[0].lower() == "say":
if len(message.params) == 1:
self.bot.msg(message.replyTo, "Say what?")
else:
self.bot.msg(message.replyTo, " ".join(message.params[1:]))
elif message.params[0].lower() == "sayremote":
if len(message.params) < 3:
self.bot.msg(message.replyTo, "Say what?")
else:
self.bot.msg(message.params[1], " ".join(message.params[2:]))
return True
|
# ... existing code ...
self.name = "Say"
self.trigger = "say|sayremote"
self.moduleType = ModuleType.COMMAND
self.modulePriority = ModulePriority.NORMAL
self.messageTypes = ["PRIVMSG"]
self.helpText = "Usage: say <message>/sayremote <target> <message> | Makes the bot say the given line."
# ... modified code ...
def execute(self, message):
if message.params[0].lower() == "say":
if len(message.params) == 1:
self.bot.msg(message.replyTo, "Say what?")
else:
self.bot.msg(message.replyTo, " ".join(message.params[1:]))
elif message.params[0].lower() == "sayremote":
if len(message.params) < 3:
self.bot.msg(message.replyTo, "Say what?")
else:
self.bot.msg(message.params[1], " ".join(message.params[2:]))
return True
# ... rest of the code ...
|
5e1ffdab41c322a9bcd466b34aabaa37ef08a6e2
|
profiling_run.py
|
profiling_run.py
|
import skin_core_scanner_simple as scss
reload(scss)
import equil_solver as es
reload(es)
import newcomb_simple as new
reload(new)
(lambda_a_mesh, k_a_mesh,
stability_maps) = scss.scan_lambda_k_space([0.01, 3.0, 25.], [0.01, 1.5, 25],
epsilon=0.11, core_radius_norm=0.9,
transition_width_norm=0.033,
skin_width_norm=0.034,
method='lsoda',
max_step=1E-2, nsteps=1000)
|
import skin_core_scanner_simple as scss
reload(scss)
import equil_solver as es
reload(es)
import newcomb_simple as new
reload(new)
(lambda_a_mesh, k_a_mesh,
stability_maps) = scss.scan_lambda_k_space([0.01, 3.0, 25.], [0.01, 1.5, 25],
epsilon=0.11, core_radius_norm=0.9,
transition_width_norm=0.033,
skin_width_norm=0.034,
method='lsoda',
max_step=1E-2, nsteps=1000, use_jac=True)
|
Make jacobian use more explicit.
|
Make jacobian use more explicit.
|
Python
|
mit
|
jensv/fluxtubestability,jensv/fluxtubestability
|
import skin_core_scanner_simple as scss
reload(scss)
import equil_solver as es
reload(es)
import newcomb_simple as new
reload(new)
(lambda_a_mesh, k_a_mesh,
stability_maps) = scss.scan_lambda_k_space([0.01, 3.0, 25.], [0.01, 1.5, 25],
epsilon=0.11, core_radius_norm=0.9,
transition_width_norm=0.033,
skin_width_norm=0.034,
method='lsoda',
- max_step=1E-2, nsteps=1000)
+ max_step=1E-2, nsteps=1000, use_jac=True)
|
Make jacobian use more explicit.
|
## Code Before:
import skin_core_scanner_simple as scss
reload(scss)
import equil_solver as es
reload(es)
import newcomb_simple as new
reload(new)
(lambda_a_mesh, k_a_mesh,
stability_maps) = scss.scan_lambda_k_space([0.01, 3.0, 25.], [0.01, 1.5, 25],
epsilon=0.11, core_radius_norm=0.9,
transition_width_norm=0.033,
skin_width_norm=0.034,
method='lsoda',
max_step=1E-2, nsteps=1000)
## Instruction:
Make jacobian use more explicit.
## Code After:
import skin_core_scanner_simple as scss
reload(scss)
import equil_solver as es
reload(es)
import newcomb_simple as new
reload(new)
(lambda_a_mesh, k_a_mesh,
stability_maps) = scss.scan_lambda_k_space([0.01, 3.0, 25.], [0.01, 1.5, 25],
epsilon=0.11, core_radius_norm=0.9,
transition_width_norm=0.033,
skin_width_norm=0.034,
method='lsoda',
max_step=1E-2, nsteps=1000, use_jac=True)
|
# ... existing code ...
method='lsoda',
max_step=1E-2, nsteps=1000, use_jac=True)
# ... rest of the code ...
|
8858cf1f0b87026ce913a19c4e5df415409cfd79
|
streak-podium/read.py
|
streak-podium/read.py
|
import logging
import requests
def input_file(filename):
"""
Read a file and return list of usernames.
Assumes one username per line and ignores blank lines.
"""
with open(filename, 'r') as f:
return list(line.strip() for line in f if line.strip())
def org_members(org_name):
"""
Query Github API and return list of members from a Github organization.
"""
url = 'https://github.com/orgs/{}/members'.format(org_name)
headers = {'Accept': 'application/vnd.github.ironman-preview+json'}
try:
r = requests.get(url, headers=headers)
except requests.exceptions.ConnectionError:
logging.warn('Connection error trying to get org members: [{}]'.format(url))
return []
if r.status_code == 404:
print('Got 404')
print(r.status_code)
return []
print('response')
print(r.text)
return r.text
def svg_data(username):
"""
Returns the contribution streak SVG file contents from Github
for a specific username.
"""
url = 'https://github.com/users/{}/contributions'.format(username)
try:
r = requests.get(url)
except requests.exceptions.ConnectionError:
logging.warn('Connection error trying to get url: [{}]'.format(url))
return None
return r.text
|
import logging
import requests
def input_file(filename):
"""
Read a file and return list of usernames.
Assumes one username per line and ignores blank lines.
"""
with open(filename, 'r') as f:
return list(line.strip() for line in f if line.strip())
def org_members(org_name):
"""
Query Github API and return list of members from a Github organization.
"""
if org_name is None:
org_name = 'pulseenergy'
url = 'https://github.com/orgs/{}/members'.format(org_name)
headers = {'Accept': 'application/vnd.github.ironman-preview+json'}
try:
r = requests.get(url, headers=headers)
except requests.exceptions.ConnectionError:
logging.warn('Connection error trying to get org members: [{}]'.format(url))
return []
if r.status_code == 404:
print('Got 404')
print(r.status_code)
return []
print('response')
print(r.text)
return r.text
def svg_data(username):
"""
Returns the contribution streak SVG file contents from Github
for a specific username.
"""
url = 'https://github.com/users/{}/contributions'.format(username)
try:
r = requests.get(url)
except requests.exceptions.ConnectionError:
logging.warn('Connection error trying to get url: [{}]'.format(url))
return None
return r.text
|
Handle None for org or file with default org name
|
Handle None for org or file with default org name
|
Python
|
mit
|
supermitch/streak-podium,jollyra/hubot-commit-streak,jollyra/hubot-streak-podium,jollyra/hubot-commit-streak,supermitch/streak-podium,jollyra/hubot-streak-podium
|
import logging
import requests
def input_file(filename):
"""
Read a file and return list of usernames.
Assumes one username per line and ignores blank lines.
"""
with open(filename, 'r') as f:
return list(line.strip() for line in f if line.strip())
def org_members(org_name):
"""
Query Github API and return list of members from a Github organization.
"""
+ if org_name is None:
+ org_name = 'pulseenergy'
+
url = 'https://github.com/orgs/{}/members'.format(org_name)
headers = {'Accept': 'application/vnd.github.ironman-preview+json'}
try:
r = requests.get(url, headers=headers)
except requests.exceptions.ConnectionError:
logging.warn('Connection error trying to get org members: [{}]'.format(url))
return []
if r.status_code == 404:
print('Got 404')
print(r.status_code)
return []
print('response')
print(r.text)
return r.text
def svg_data(username):
"""
Returns the contribution streak SVG file contents from Github
for a specific username.
"""
url = 'https://github.com/users/{}/contributions'.format(username)
try:
r = requests.get(url)
except requests.exceptions.ConnectionError:
logging.warn('Connection error trying to get url: [{}]'.format(url))
return None
return r.text
|
Handle None for org or file with default org name
|
## Code Before:
import logging
import requests
def input_file(filename):
"""
Read a file and return list of usernames.
Assumes one username per line and ignores blank lines.
"""
with open(filename, 'r') as f:
return list(line.strip() for line in f if line.strip())
def org_members(org_name):
"""
Query Github API and return list of members from a Github organization.
"""
url = 'https://github.com/orgs/{}/members'.format(org_name)
headers = {'Accept': 'application/vnd.github.ironman-preview+json'}
try:
r = requests.get(url, headers=headers)
except requests.exceptions.ConnectionError:
logging.warn('Connection error trying to get org members: [{}]'.format(url))
return []
if r.status_code == 404:
print('Got 404')
print(r.status_code)
return []
print('response')
print(r.text)
return r.text
def svg_data(username):
"""
Returns the contribution streak SVG file contents from Github
for a specific username.
"""
url = 'https://github.com/users/{}/contributions'.format(username)
try:
r = requests.get(url)
except requests.exceptions.ConnectionError:
logging.warn('Connection error trying to get url: [{}]'.format(url))
return None
return r.text
## Instruction:
Handle None for org or file with default org name
## Code After:
import logging
import requests
def input_file(filename):
"""
Read a file and return list of usernames.
Assumes one username per line and ignores blank lines.
"""
with open(filename, 'r') as f:
return list(line.strip() for line in f if line.strip())
def org_members(org_name):
"""
Query Github API and return list of members from a Github organization.
"""
if org_name is None:
org_name = 'pulseenergy'
url = 'https://github.com/orgs/{}/members'.format(org_name)
headers = {'Accept': 'application/vnd.github.ironman-preview+json'}
try:
r = requests.get(url, headers=headers)
except requests.exceptions.ConnectionError:
logging.warn('Connection error trying to get org members: [{}]'.format(url))
return []
if r.status_code == 404:
print('Got 404')
print(r.status_code)
return []
print('response')
print(r.text)
return r.text
def svg_data(username):
"""
Returns the contribution streak SVG file contents from Github
for a specific username.
"""
url = 'https://github.com/users/{}/contributions'.format(username)
try:
r = requests.get(url)
except requests.exceptions.ConnectionError:
logging.warn('Connection error trying to get url: [{}]'.format(url))
return None
return r.text
|
...
"""
if org_name is None:
org_name = 'pulseenergy'
url = 'https://github.com/orgs/{}/members'.format(org_name)
...
|
630ba21f3b08dcd2685297b057cbee4b6abee6f7
|
us_ignite/sections/models.py
|
us_ignite/sections/models.py
|
from django.db import models
class Sponsor(models.Model):
name = models.CharField(max_length=255)
website = models.URLField(max_length=500)
image = models.ImageField(upload_to="sponsor")
order = models.IntegerField(default=0)
class Meta:
ordering = ('order', )
def __unicode__(self):
return self.name
|
from django.db import models
class Sponsor(models.Model):
name = models.CharField(max_length=255)
website = models.URLField(max_length=500)
image = models.ImageField(
upload_to="sponsor", help_text='This image is not post processed. '
'Please make sure it has the right design specs.')
order = models.IntegerField(default=0)
class Meta:
ordering = ('order', )
def __unicode__(self):
return self.name
|
Add help text describing the image field functionality.
|
Add help text describing the image field functionality.
|
Python
|
bsd-3-clause
|
us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite
|
from django.db import models
class Sponsor(models.Model):
name = models.CharField(max_length=255)
website = models.URLField(max_length=500)
- image = models.ImageField(upload_to="sponsor")
+ image = models.ImageField(
+ upload_to="sponsor", help_text='This image is not post processed. '
+ 'Please make sure it has the right design specs.')
order = models.IntegerField(default=0)
class Meta:
ordering = ('order', )
def __unicode__(self):
return self.name
|
Add help text describing the image field functionality.
|
## Code Before:
from django.db import models
class Sponsor(models.Model):
name = models.CharField(max_length=255)
website = models.URLField(max_length=500)
image = models.ImageField(upload_to="sponsor")
order = models.IntegerField(default=0)
class Meta:
ordering = ('order', )
def __unicode__(self):
return self.name
## Instruction:
Add help text describing the image field functionality.
## Code After:
from django.db import models
class Sponsor(models.Model):
name = models.CharField(max_length=255)
website = models.URLField(max_length=500)
image = models.ImageField(
upload_to="sponsor", help_text='This image is not post processed. '
'Please make sure it has the right design specs.')
order = models.IntegerField(default=0)
class Meta:
ordering = ('order', )
def __unicode__(self):
return self.name
|
# ... existing code ...
website = models.URLField(max_length=500)
image = models.ImageField(
upload_to="sponsor", help_text='This image is not post processed. '
'Please make sure it has the right design specs.')
order = models.IntegerField(default=0)
# ... rest of the code ...
|
671ca30892e3ebeb0a9140f95690853b4b92dc02
|
post/views.py
|
post/views.py
|
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext as _
from post.models import Post
from jmbo.generic.views import GenericObjectDetail, GenericObjectList
from jmbo.view_modifiers import DefaultViewModifier
class ObjectList(GenericObjectList):
def get_extra_context(self, *args, **kwargs):
return {'title': _('Posts')}
def get_view_modifier(self, request, *args, **kwargs):
return DefaultViewModifier(request, *args, **kwargs)
def get_paginate_by(self, *args, **kwargs):
return 12
def get_queryset(self, *args, **kwargs):
return Post.permitted.all()
object_list = ObjectList()
class ObjectDetail(GenericObjectDetail):
def get_queryset(self, *args, **kwargs):
return Post.permitted.all()
def get_extra_context(self, *args, **kwargs):
return {'title': 'Posts'}
def get_view_modifier(self, request, *args, **kwargs):
return DefaultViewModifier(
request,
base_url=reverse("post_object_list"),
ignore_defaults=True,
*args,
**kwargs
)
object_detail = ObjectDetail()
|
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext as _
from post.models import Post
from jmbo.generic.views import GenericObjectDetail, GenericObjectList
from jmbo.view_modifiers import DefaultViewModifier
class ObjectList(GenericObjectList):
def get_extra_context(self, *args, **kwargs):
return {'title': _('Posts')}
def get_view_modifier(self, request, *args, **kwargs):
return DefaultViewModifier(request, *args, **kwargs)
def get_paginate_by(self, *args, **kwargs):
return 12
def get_queryset(self, *args, **kwargs):
return Post.permitted.all()
object_list = ObjectList()
class ObjectDetail(GenericObjectDetail):
def get_queryset(self, *args, **kwargs):
return Post.permitted.all()
def get_extra_context(self, *args, **kwargs):
return {'title': 'Posts'}
def get_view_modifier(self, request, *args, **kwargs):
return DefaultViewModifier(
request,
base_url=reverse("object_list", args=['post', 'post']),
ignore_defaults=True,
*args,
**kwargs
)
object_detail = ObjectDetail()
|
Fix reverse since we deprecated post_object_list
|
Fix reverse since we deprecated post_object_list
|
Python
|
bsd-3-clause
|
praekelt/jmbo-post,praekelt/jmbo-post
|
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext as _
from post.models import Post
from jmbo.generic.views import GenericObjectDetail, GenericObjectList
from jmbo.view_modifiers import DefaultViewModifier
class ObjectList(GenericObjectList):
def get_extra_context(self, *args, **kwargs):
return {'title': _('Posts')}
def get_view_modifier(self, request, *args, **kwargs):
return DefaultViewModifier(request, *args, **kwargs)
def get_paginate_by(self, *args, **kwargs):
return 12
def get_queryset(self, *args, **kwargs):
return Post.permitted.all()
object_list = ObjectList()
class ObjectDetail(GenericObjectDetail):
def get_queryset(self, *args, **kwargs):
return Post.permitted.all()
def get_extra_context(self, *args, **kwargs):
return {'title': 'Posts'}
def get_view_modifier(self, request, *args, **kwargs):
return DefaultViewModifier(
request,
- base_url=reverse("post_object_list"),
+ base_url=reverse("object_list", args=['post', 'post']),
ignore_defaults=True,
*args,
**kwargs
)
object_detail = ObjectDetail()
|
Fix reverse since we deprecated post_object_list
|
## Code Before:
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext as _
from post.models import Post
from jmbo.generic.views import GenericObjectDetail, GenericObjectList
from jmbo.view_modifiers import DefaultViewModifier
class ObjectList(GenericObjectList):
def get_extra_context(self, *args, **kwargs):
return {'title': _('Posts')}
def get_view_modifier(self, request, *args, **kwargs):
return DefaultViewModifier(request, *args, **kwargs)
def get_paginate_by(self, *args, **kwargs):
return 12
def get_queryset(self, *args, **kwargs):
return Post.permitted.all()
object_list = ObjectList()
class ObjectDetail(GenericObjectDetail):
def get_queryset(self, *args, **kwargs):
return Post.permitted.all()
def get_extra_context(self, *args, **kwargs):
return {'title': 'Posts'}
def get_view_modifier(self, request, *args, **kwargs):
return DefaultViewModifier(
request,
base_url=reverse("post_object_list"),
ignore_defaults=True,
*args,
**kwargs
)
object_detail = ObjectDetail()
## Instruction:
Fix reverse since we deprecated post_object_list
## Code After:
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext as _
from post.models import Post
from jmbo.generic.views import GenericObjectDetail, GenericObjectList
from jmbo.view_modifiers import DefaultViewModifier
class ObjectList(GenericObjectList):
def get_extra_context(self, *args, **kwargs):
return {'title': _('Posts')}
def get_view_modifier(self, request, *args, **kwargs):
return DefaultViewModifier(request, *args, **kwargs)
def get_paginate_by(self, *args, **kwargs):
return 12
def get_queryset(self, *args, **kwargs):
return Post.permitted.all()
object_list = ObjectList()
class ObjectDetail(GenericObjectDetail):
def get_queryset(self, *args, **kwargs):
return Post.permitted.all()
def get_extra_context(self, *args, **kwargs):
return {'title': 'Posts'}
def get_view_modifier(self, request, *args, **kwargs):
return DefaultViewModifier(
request,
base_url=reverse("object_list", args=['post', 'post']),
ignore_defaults=True,
*args,
**kwargs
)
object_detail = ObjectDetail()
|
# ... existing code ...
request,
base_url=reverse("object_list", args=['post', 'post']),
ignore_defaults=True,
# ... rest of the code ...
|
3cc3c0b90714bbf7a2638b16faec69aba82a4050
|
op_robot_tests/tests_files/brokers/openprocurement_client_helper.py
|
op_robot_tests/tests_files/brokers/openprocurement_client_helper.py
|
from openprocurement_client.client import Client
import sys
def prepare_api_wrapper(key='', host_url="https://api-sandbox.openprocurement.org", api_version='0.8' ):
return Client(key, host_url, api_version )
def get_internal_id(get_tenders_function, date):
result = get_tenders_function({"offset": date, "opt_fields": 'tenderID', })
#import pdb; pdb.Pdb(stdout=sys.__stdout__).set_trace()
return result
|
from openprocurement_client.client import Client
import sys
def prepare_api_wrapper(key='', host_url="https://api-sandbox.openprocurement.org", api_version='0.8'):
return Client(key, host_url, api_version)
def get_internal_id(get_tenders_function, date):
result = get_tenders_function({"offset": date, "opt_fields": 'tenderID'})
return result
|
Improve PEP8 compliance in op_client_helper.py
|
Improve PEP8 compliance in op_client_helper.py
|
Python
|
apache-2.0
|
SlaOne/robot_tests,kosaniak/robot_tests,selurvedu/robot_tests,Leits/robot_tests,cleardevice/robot_tests,VadimShurhal/robot_tests.broker.aps,mykhaly/robot_tests,Rzaporozhets/robot_tests,bubanoid/robot_tests,openprocurement/robot_tests
|
from openprocurement_client.client import Client
import sys
+
- def prepare_api_wrapper(key='', host_url="https://api-sandbox.openprocurement.org", api_version='0.8' ):
+ def prepare_api_wrapper(key='', host_url="https://api-sandbox.openprocurement.org", api_version='0.8'):
- return Client(key, host_url, api_version )
+ return Client(key, host_url, api_version)
+
def get_internal_id(get_tenders_function, date):
- result = get_tenders_function({"offset": date, "opt_fields": 'tenderID', })
+ result = get_tenders_function({"offset": date, "opt_fields": 'tenderID'})
- #import pdb; pdb.Pdb(stdout=sys.__stdout__).set_trace()
- return result
+ return result
|
Improve PEP8 compliance in op_client_helper.py
|
## Code Before:
from openprocurement_client.client import Client
import sys
def prepare_api_wrapper(key='', host_url="https://api-sandbox.openprocurement.org", api_version='0.8' ):
return Client(key, host_url, api_version )
def get_internal_id(get_tenders_function, date):
result = get_tenders_function({"offset": date, "opt_fields": 'tenderID', })
#import pdb; pdb.Pdb(stdout=sys.__stdout__).set_trace()
return result
## Instruction:
Improve PEP8 compliance in op_client_helper.py
## Code After:
from openprocurement_client.client import Client
import sys
def prepare_api_wrapper(key='', host_url="https://api-sandbox.openprocurement.org", api_version='0.8'):
return Client(key, host_url, api_version)
def get_internal_id(get_tenders_function, date):
result = get_tenders_function({"offset": date, "opt_fields": 'tenderID'})
return result
|
...
def prepare_api_wrapper(key='', host_url="https://api-sandbox.openprocurement.org", api_version='0.8'):
return Client(key, host_url, api_version)
...
def get_internal_id(get_tenders_function, date):
result = get_tenders_function({"offset": date, "opt_fields": 'tenderID'})
return result
...
|
4f45e55e5b0e14cf6bf32b42a14cbdf9b3c08258
|
dbus_notify.py
|
dbus_notify.py
|
from cgi import escape
import dbus
from utils import is_string
ITEM = "org.freedesktop.Notifications"
PATH = "/org/freedesktop/Notifications"
INTERFACE = "org.freedesktop.Notifications"
APP_NAME = "mpd-hiss"
def dbus_raw_image(im):
"""Convert image for DBUS"""
raw = im.tobytes("raw", "RGBA")
alpha, bps, channels = 0, 8, 4
stride = channels * im.size[0]
return (im.size[0], im.size[1], stride, alpha, bps, channels,
dbus.ByteArray(raw))
def native_load_image(image):
return image
def notify(title, description, icon):
actions = ""
hint = {"suppress-sound": True, "urgency": 0}
time = 5000
if is_string(icon):
# File path
icon_file = icon
else:
icon_file = ""
# Not all notifiers support this
# Some require "icon" and an image on disk
hint["icon_data"] = dbus_raw_image(icon)
bus = dbus.SessionBus()
notif = bus.get_object(ITEM, PATH)
notify = dbus.Interface(notif, INTERFACE)
notify.Notify(APP_NAME, 1, icon_file, title, escape(description), actions,
hint, time)
|
from cgi import escape
import dbus
from utils import is_string
ITEM = "org.freedesktop.Notifications"
PATH = "/org/freedesktop/Notifications"
INTERFACE = "org.freedesktop.Notifications"
APP_NAME = "mpd-hiss"
def dbus_raw_image(im):
"""Convert image for DBUS"""
raw = im.tobytes("raw", "RGBA")
alpha, bps, channels = 0, 8, 4
stride = channels * im.size[0]
return (im.size[0], im.size[1], stride, alpha, bps, channels,
dbus.ByteArray(raw))
def native_load_image(image):
return image
def notify(title, description, icon):
actions = ""
hint = {"suppress-sound": True, "urgency": 0}
time = 5000
icon_file = ""
if is_string(icon):
# File path
icon_file = icon
elif icon:
# Not all notifiers support this
# Some require "icon" and an image on disk
hint["icon_data"] = dbus_raw_image(icon)
bus = dbus.SessionBus()
notif = bus.get_object(ITEM, PATH)
notify = dbus.Interface(notif, INTERFACE)
notify.Notify(APP_NAME, 1, icon_file, title, escape(description), actions,
hint, time)
|
Make sure we do not try to convert None
|
Make sure we do not try to convert None
|
Python
|
cc0-1.0
|
hellhovnd/mpd-hiss,ahihi/mpd-hiss
|
from cgi import escape
import dbus
from utils import is_string
ITEM = "org.freedesktop.Notifications"
PATH = "/org/freedesktop/Notifications"
INTERFACE = "org.freedesktop.Notifications"
APP_NAME = "mpd-hiss"
def dbus_raw_image(im):
"""Convert image for DBUS"""
raw = im.tobytes("raw", "RGBA")
alpha, bps, channels = 0, 8, 4
stride = channels * im.size[0]
return (im.size[0], im.size[1], stride, alpha, bps, channels,
dbus.ByteArray(raw))
def native_load_image(image):
return image
def notify(title, description, icon):
actions = ""
hint = {"suppress-sound": True, "urgency": 0}
time = 5000
+ icon_file = ""
if is_string(icon):
# File path
icon_file = icon
+ elif icon:
- else:
- icon_file = ""
# Not all notifiers support this
# Some require "icon" and an image on disk
hint["icon_data"] = dbus_raw_image(icon)
bus = dbus.SessionBus()
notif = bus.get_object(ITEM, PATH)
notify = dbus.Interface(notif, INTERFACE)
notify.Notify(APP_NAME, 1, icon_file, title, escape(description), actions,
hint, time)
|
Make sure we do not try to convert None
|
## Code Before:
from cgi import escape
import dbus
from utils import is_string
ITEM = "org.freedesktop.Notifications"
PATH = "/org/freedesktop/Notifications"
INTERFACE = "org.freedesktop.Notifications"
APP_NAME = "mpd-hiss"
def dbus_raw_image(im):
"""Convert image for DBUS"""
raw = im.tobytes("raw", "RGBA")
alpha, bps, channels = 0, 8, 4
stride = channels * im.size[0]
return (im.size[0], im.size[1], stride, alpha, bps, channels,
dbus.ByteArray(raw))
def native_load_image(image):
return image
def notify(title, description, icon):
actions = ""
hint = {"suppress-sound": True, "urgency": 0}
time = 5000
if is_string(icon):
# File path
icon_file = icon
else:
icon_file = ""
# Not all notifiers support this
# Some require "icon" and an image on disk
hint["icon_data"] = dbus_raw_image(icon)
bus = dbus.SessionBus()
notif = bus.get_object(ITEM, PATH)
notify = dbus.Interface(notif, INTERFACE)
notify.Notify(APP_NAME, 1, icon_file, title, escape(description), actions,
hint, time)
## Instruction:
Make sure we do not try to convert None
## Code After:
from cgi import escape
import dbus
from utils import is_string
ITEM = "org.freedesktop.Notifications"
PATH = "/org/freedesktop/Notifications"
INTERFACE = "org.freedesktop.Notifications"
APP_NAME = "mpd-hiss"
def dbus_raw_image(im):
"""Convert image for DBUS"""
raw = im.tobytes("raw", "RGBA")
alpha, bps, channels = 0, 8, 4
stride = channels * im.size[0]
return (im.size[0], im.size[1], stride, alpha, bps, channels,
dbus.ByteArray(raw))
def native_load_image(image):
return image
def notify(title, description, icon):
actions = ""
hint = {"suppress-sound": True, "urgency": 0}
time = 5000
icon_file = ""
if is_string(icon):
# File path
icon_file = icon
elif icon:
# Not all notifiers support this
# Some require "icon" and an image on disk
hint["icon_data"] = dbus_raw_image(icon)
bus = dbus.SessionBus()
notif = bus.get_object(ITEM, PATH)
notify = dbus.Interface(notif, INTERFACE)
notify.Notify(APP_NAME, 1, icon_file, title, escape(description), actions,
hint, time)
|
// ... existing code ...
time = 5000
icon_file = ""
// ... modified code ...
icon_file = icon
elif icon:
# Not all notifiers support this
// ... rest of the code ...
|
5954196d3c81083f7f94eca147fe1a76a6dfb301
|
vc_vidyo/indico_vc_vidyo/blueprint.py
|
vc_vidyo/indico_vc_vidyo/blueprint.py
|
from __future__ import unicode_literals
from indico.core.plugins import IndicoPluginBlueprint
from indico_vc_vidyo.controllers import RHVidyoRoomOwner
blueprint = IndicoPluginBlueprint('vc_vidyo', 'indico_vc_vidyo')
# Room management
blueprint.add_url_rule('/event/<confId>/manage/videoconference/vidyo/<int:event_vc_room_id>/room-owner/',
'set_room_owner', RHVidyoRoomOwner, methods=('POST',), defaults={'service': 'vidyo'})
|
from __future__ import unicode_literals
from indico.core.plugins import IndicoPluginBlueprint
from indico_vc_vidyo.controllers import RHVidyoRoomOwner
blueprint = IndicoPluginBlueprint('vc_vidyo', 'indico_vc_vidyo')
# Room management
# using any(vidyo) instead of defaults since the event vc room locator
# includes the service and normalization skips values provided in 'defaults'
blueprint.add_url_rule('/event/<confId>/manage/videoconference/<any(vidyo):service>/<int:event_vc_room_id>/room-owner',
'set_room_owner', RHVidyoRoomOwner, methods=('POST',))
|
Fix "make me room owner"
|
VC/Vidyo: Fix "make me room owner"
|
Python
|
mit
|
ThiefMaster/indico-plugins,ThiefMaster/indico-plugins,ThiefMaster/indico-plugins,indico/indico-plugins,ThiefMaster/indico-plugins,indico/indico-plugins,indico/indico-plugins,indico/indico-plugins
|
from __future__ import unicode_literals
from indico.core.plugins import IndicoPluginBlueprint
from indico_vc_vidyo.controllers import RHVidyoRoomOwner
blueprint = IndicoPluginBlueprint('vc_vidyo', 'indico_vc_vidyo')
# Room management
+ # using any(vidyo) instead of defaults since the event vc room locator
+ # includes the service and normalization skips values provided in 'defaults'
- blueprint.add_url_rule('/event/<confId>/manage/videoconference/vidyo/<int:event_vc_room_id>/room-owner/',
+ blueprint.add_url_rule('/event/<confId>/manage/videoconference/<any(vidyo):service>/<int:event_vc_room_id>/room-owner',
- 'set_room_owner', RHVidyoRoomOwner, methods=('POST',), defaults={'service': 'vidyo'})
+ 'set_room_owner', RHVidyoRoomOwner, methods=('POST',))
|
Fix "make me room owner"
|
## Code Before:
from __future__ import unicode_literals
from indico.core.plugins import IndicoPluginBlueprint
from indico_vc_vidyo.controllers import RHVidyoRoomOwner
blueprint = IndicoPluginBlueprint('vc_vidyo', 'indico_vc_vidyo')
# Room management
blueprint.add_url_rule('/event/<confId>/manage/videoconference/vidyo/<int:event_vc_room_id>/room-owner/',
'set_room_owner', RHVidyoRoomOwner, methods=('POST',), defaults={'service': 'vidyo'})
## Instruction:
Fix "make me room owner"
## Code After:
from __future__ import unicode_literals
from indico.core.plugins import IndicoPluginBlueprint
from indico_vc_vidyo.controllers import RHVidyoRoomOwner
blueprint = IndicoPluginBlueprint('vc_vidyo', 'indico_vc_vidyo')
# Room management
# using any(vidyo) instead of defaults since the event vc room locator
# includes the service and normalization skips values provided in 'defaults'
blueprint.add_url_rule('/event/<confId>/manage/videoconference/<any(vidyo):service>/<int:event_vc_room_id>/room-owner',
'set_room_owner', RHVidyoRoomOwner, methods=('POST',))
|
# ... existing code ...
# Room management
# using any(vidyo) instead of defaults since the event vc room locator
# includes the service and normalization skips values provided in 'defaults'
blueprint.add_url_rule('/event/<confId>/manage/videoconference/<any(vidyo):service>/<int:event_vc_room_id>/room-owner',
'set_room_owner', RHVidyoRoomOwner, methods=('POST',))
# ... rest of the code ...
|
3fb1800548ad421520bf3f2845aad4f51f6f5839
|
rapidsms_multimodem/tests/__init__.py
|
rapidsms_multimodem/tests/__init__.py
|
from test_utils import * # noqa
from test_views import * # noqa
|
from test_outgoing import * # noqa
from test_utils import * # noqa
from test_views import * # noqa
|
Add import for older versions of Django
|
Add import for older versions of Django
|
Python
|
bsd-3-clause
|
caktus/rapidsms-multimodem
|
+ from test_outgoing import * # noqa
from test_utils import * # noqa
from test_views import * # noqa
|
Add import for older versions of Django
|
## Code Before:
from test_utils import * # noqa
from test_views import * # noqa
## Instruction:
Add import for older versions of Django
## Code After:
from test_outgoing import * # noqa
from test_utils import * # noqa
from test_views import * # noqa
|
# ... existing code ...
from test_outgoing import * # noqa
from test_utils import * # noqa
# ... rest of the code ...
|
19a508aad7c42469923c6f62acef67113e280501
|
memefarm/imagesearch.py
|
memefarm/imagesearch.py
|
import json
import os
import random
import requests
from PIL import Image
# GLOBALS
endpoint = "https://www.googleapis.com/customsearch/v1"
searchid = "013060195084513904668:z7-hxk7q35k"
# Retrieve my API key from a secret file
with open(os.path.join(os.path.dirname(__file__), "API_KEY.txt"), "r") as f:
API_KEY = f.read()
# API
def get_image(search):
""" Get a ramdom image URL from the first 10 google images results for a
given search term """
r = requests.get(endpoint, params={
"key": API_KEY,
"cx": searchid,
"searchType": "image",
"q": search,
})
data = json.loads(r.text) # Load JSON responses
results = data["items"] # Find the images returned
result = random.choice(results) # Pick a random one
return result["link"] # Return its link
if __name__ == "__main__":
print(get_image("cow"))
|
import json
import os
import random
import requests
from io import BytesIO
from PIL import Image
# GLOBALS
endpoint = "https://www.googleapis.com/customsearch/v1"
searchid = "013060195084513904668:z7-hxk7q35k"
# Retrieve my API key from a secret file
with open(os.path.join(os.path.dirname(__file__), "API_KEY.txt"), "r") as f:
API_KEY = f.read()
# API
def getImageUrl(search):
""" Get a ramdom image URL from the first 10 google images results for a
given search term """
r = requests.get(endpoint, params={
"key": API_KEY,
"cx": searchid,
"searchType": "image",
"q": search,
})
data = json.loads(r.text) # Load JSON responses
results = data["items"] # Find the images returned
result = random.choice(results) # Pick a random one
return result["link"] # Return its link
def getImage(search):
""" Get a PIL image for a given search term """
url = getImageUrl(search) # Get an image URL
req = requests.get(url) # Download image
b = BytesIO(req.content) # Load into file-like object
return Image.open(b) # Open and return
if __name__ == "__main__":
getImage("cow").show()
|
Add method for getting PIL image
|
Add method for getting PIL image
Also change function names to be uniform throughout
|
Python
|
mit
|
The-Penultimate-Defenestrator/memefarm
|
import json
import os
import random
-
import requests
+ from io import BytesIO
from PIL import Image
# GLOBALS
endpoint = "https://www.googleapis.com/customsearch/v1"
searchid = "013060195084513904668:z7-hxk7q35k"
# Retrieve my API key from a secret file
with open(os.path.join(os.path.dirname(__file__), "API_KEY.txt"), "r") as f:
API_KEY = f.read()
# API
- def get_image(search):
+ def getImageUrl(search):
""" Get a ramdom image URL from the first 10 google images results for a
given search term """
r = requests.get(endpoint, params={
"key": API_KEY,
"cx": searchid,
"searchType": "image",
"q": search,
})
data = json.loads(r.text) # Load JSON responses
results = data["items"] # Find the images returned
result = random.choice(results) # Pick a random one
return result["link"] # Return its link
+
+ def getImage(search):
+ """ Get a PIL image for a given search term """
+ url = getImageUrl(search) # Get an image URL
+ req = requests.get(url) # Download image
+ b = BytesIO(req.content) # Load into file-like object
+ return Image.open(b) # Open and return
+
+
if __name__ == "__main__":
- print(get_image("cow"))
+ getImage("cow").show()
|
Add method for getting PIL image
|
## Code Before:
import json
import os
import random
import requests
from PIL import Image
# GLOBALS
endpoint = "https://www.googleapis.com/customsearch/v1"
searchid = "013060195084513904668:z7-hxk7q35k"
# Retrieve my API key from a secret file
with open(os.path.join(os.path.dirname(__file__), "API_KEY.txt"), "r") as f:
API_KEY = f.read()
# API
def get_image(search):
""" Get a ramdom image URL from the first 10 google images results for a
given search term """
r = requests.get(endpoint, params={
"key": API_KEY,
"cx": searchid,
"searchType": "image",
"q": search,
})
data = json.loads(r.text) # Load JSON responses
results = data["items"] # Find the images returned
result = random.choice(results) # Pick a random one
return result["link"] # Return its link
if __name__ == "__main__":
print(get_image("cow"))
## Instruction:
Add method for getting PIL image
## Code After:
import json
import os
import random
import requests
from io import BytesIO
from PIL import Image
# GLOBALS
endpoint = "https://www.googleapis.com/customsearch/v1"
searchid = "013060195084513904668:z7-hxk7q35k"
# Retrieve my API key from a secret file
with open(os.path.join(os.path.dirname(__file__), "API_KEY.txt"), "r") as f:
API_KEY = f.read()
# API
def getImageUrl(search):
""" Get a ramdom image URL from the first 10 google images results for a
given search term """
r = requests.get(endpoint, params={
"key": API_KEY,
"cx": searchid,
"searchType": "image",
"q": search,
})
data = json.loads(r.text) # Load JSON responses
results = data["items"] # Find the images returned
result = random.choice(results) # Pick a random one
return result["link"] # Return its link
def getImage(search):
""" Get a PIL image for a given search term """
url = getImageUrl(search) # Get an image URL
req = requests.get(url) # Download image
b = BytesIO(req.content) # Load into file-like object
return Image.open(b) # Open and return
if __name__ == "__main__":
getImage("cow").show()
|
# ... existing code ...
import random
import requests
# ... modified code ...
from io import BytesIO
from PIL import Image
...
def getImageUrl(search):
""" Get a ramdom image URL from the first 10 google images results for a
...
def getImage(search):
""" Get a PIL image for a given search term """
url = getImageUrl(search) # Get an image URL
req = requests.get(url) # Download image
b = BytesIO(req.content) # Load into file-like object
return Image.open(b) # Open and return
if __name__ == "__main__":
getImage("cow").show()
# ... rest of the code ...
|
ff12421cc6c3067bac11ece75cf4a16d11859ed0
|
tests/test_envs.py
|
tests/test_envs.py
|
import gym
import pytest
# Import for side-effect of registering environment
import imitation.examples.airl_envs # noqa: F401
import imitation.examples.model_envs # noqa: F401
ENV_NAMES = [env_spec.id for env_spec in gym.envs.registration.registry.all()
if env_spec.id.startswith('imitation/')]
@pytest.mark.parametrize("env_name", ENV_NAMES)
def test_envs(env_name): # pragma: no cover
"""Check that our custom environments don't crash on `step`, and `reset`."""
try:
env = gym.make(env_name)
except gym.error.DependencyNotInstalled as e:
if e.args[0].find('mujoco_py') != -1:
pytest.skip("Requires `mujoco_py`, which isn't installed.")
else:
raise
env.reset()
obs_space = env.observation_space
for _ in range(4):
act = env.action_space.sample()
obs, rew, done, info = env.step(act)
assert obs in obs_space
|
import gym
import pytest
# Import for side-effect of registering environment
import imitation.examples.airl_envs # noqa: F401
import imitation.examples.model_envs # noqa: F401
ENV_NAMES = [env_spec.id for env_spec in gym.envs.registration.registry.all()
if env_spec.id.startswith('imitation/')]
@pytest.mark.parametrize("env_name", ENV_NAMES)
def test_envs(env_name):
"""Check that our custom environments don't crash on `step`, and `reset`."""
try:
env = gym.make(env_name)
except gym.error.DependencyNotInstalled as e: # pragma: nocover
if e.args[0].find('mujoco_py') != -1:
pytest.skip("Requires `mujoco_py`, which isn't installed.")
else:
raise
env.reset()
obs_space = env.observation_space
for _ in range(4):
act = env.action_space.sample()
obs, rew, done, info = env.step(act)
assert obs in obs_space
|
Move the pragma: nocover to except block
|
Move the pragma: nocover to except block
|
Python
|
mit
|
HumanCompatibleAI/imitation,humancompatibleai/imitation,humancompatibleai/imitation,HumanCompatibleAI/imitation
|
import gym
import pytest
# Import for side-effect of registering environment
import imitation.examples.airl_envs # noqa: F401
import imitation.examples.model_envs # noqa: F401
ENV_NAMES = [env_spec.id for env_spec in gym.envs.registration.registry.all()
if env_spec.id.startswith('imitation/')]
@pytest.mark.parametrize("env_name", ENV_NAMES)
- def test_envs(env_name): # pragma: no cover
+ def test_envs(env_name):
"""Check that our custom environments don't crash on `step`, and `reset`."""
try:
env = gym.make(env_name)
- except gym.error.DependencyNotInstalled as e:
+ except gym.error.DependencyNotInstalled as e: # pragma: nocover
if e.args[0].find('mujoco_py') != -1:
pytest.skip("Requires `mujoco_py`, which isn't installed.")
else:
raise
env.reset()
obs_space = env.observation_space
for _ in range(4):
act = env.action_space.sample()
obs, rew, done, info = env.step(act)
assert obs in obs_space
|
Move the pragma: nocover to except block
|
## Code Before:
import gym
import pytest
# Import for side-effect of registering environment
import imitation.examples.airl_envs # noqa: F401
import imitation.examples.model_envs # noqa: F401
ENV_NAMES = [env_spec.id for env_spec in gym.envs.registration.registry.all()
if env_spec.id.startswith('imitation/')]
@pytest.mark.parametrize("env_name", ENV_NAMES)
def test_envs(env_name): # pragma: no cover
"""Check that our custom environments don't crash on `step`, and `reset`."""
try:
env = gym.make(env_name)
except gym.error.DependencyNotInstalled as e:
if e.args[0].find('mujoco_py') != -1:
pytest.skip("Requires `mujoco_py`, which isn't installed.")
else:
raise
env.reset()
obs_space = env.observation_space
for _ in range(4):
act = env.action_space.sample()
obs, rew, done, info = env.step(act)
assert obs in obs_space
## Instruction:
Move the pragma: nocover to except block
## Code After:
import gym
import pytest
# Import for side-effect of registering environment
import imitation.examples.airl_envs # noqa: F401
import imitation.examples.model_envs # noqa: F401
ENV_NAMES = [env_spec.id for env_spec in gym.envs.registration.registry.all()
if env_spec.id.startswith('imitation/')]
@pytest.mark.parametrize("env_name", ENV_NAMES)
def test_envs(env_name):
"""Check that our custom environments don't crash on `step`, and `reset`."""
try:
env = gym.make(env_name)
except gym.error.DependencyNotInstalled as e: # pragma: nocover
if e.args[0].find('mujoco_py') != -1:
pytest.skip("Requires `mujoco_py`, which isn't installed.")
else:
raise
env.reset()
obs_space = env.observation_space
for _ in range(4):
act = env.action_space.sample()
obs, rew, done, info = env.step(act)
assert obs in obs_space
|
// ... existing code ...
@pytest.mark.parametrize("env_name", ENV_NAMES)
def test_envs(env_name):
"""Check that our custom environments don't crash on `step`, and `reset`."""
// ... modified code ...
env = gym.make(env_name)
except gym.error.DependencyNotInstalled as e: # pragma: nocover
if e.args[0].find('mujoco_py') != -1:
// ... rest of the code ...
|
ab500891a44e7034e02889acc5f8ac1d44cb9aad
|
tests/test_error.py
|
tests/test_error.py
|
from __future__ import unicode_literals
import unittest
import six
import spotify
class ErrorTest(unittest.TestCase):
def test_error_has_error_code(self):
error = spotify.Error(0)
self.assertEqual(error.error_code, 0)
error = spotify.Error(1)
self.assertEqual(error.error_code, 1)
def test_error_has_error_message(self):
error = spotify.Error(0)
self.assertEqual(error.message, 'No error')
self.assertIsInstance(error.message, six.text_type)
error = spotify.Error(1)
self.assertEqual(error.message, 'Invalid library version')
def test_error_has_useful_repr(self):
error = spotify.Error(0)
self.assertEqual(repr(error), b"Error(u'No error',)")
def test_error_has_useful_str(self):
error = spotify.Error(0)
self.assertEqual(str(error), 'No error')
def test_error_has_error_constants(self):
self.assertEqual(spotify.Error.OK, 0)
self.assertEqual(spotify.Error.BAD_API_VERSION, 1)
|
from __future__ import unicode_literals
import unittest
import six
import spotify
class ErrorTest(unittest.TestCase):
def test_error_has_error_code(self):
error = spotify.Error(0)
self.assertEqual(error.error_code, 0)
error = spotify.Error(1)
self.assertEqual(error.error_code, 1)
def test_error_has_useful_repr(self):
error = spotify.Error(0)
self.assertIn('No error', repr(error))
def test_error_has_useful_string_representation(self):
error = spotify.Error(0)
self.assertEqual('%s' % error, 'No error')
self.assertIsInstance('%s' % error, six.text_type)
error = spotify.Error(1)
self.assertEqual('%s' % error, 'Invalid library version')
def test_error_has_error_constants(self):
self.assertEqual(spotify.Error.OK, 0)
self.assertEqual(spotify.Error.BAD_API_VERSION, 1)
|
Make Error behavior consistent across Pythons
|
Make Error behavior consistent across Pythons
|
Python
|
apache-2.0
|
felix1m/pyspotify,jodal/pyspotify,jodal/pyspotify,felix1m/pyspotify,kotamat/pyspotify,jodal/pyspotify,kotamat/pyspotify,mopidy/pyspotify,mopidy/pyspotify,kotamat/pyspotify,felix1m/pyspotify
|
from __future__ import unicode_literals
import unittest
import six
import spotify
class ErrorTest(unittest.TestCase):
def test_error_has_error_code(self):
error = spotify.Error(0)
self.assertEqual(error.error_code, 0)
error = spotify.Error(1)
self.assertEqual(error.error_code, 1)
- def test_error_has_error_message(self):
+ def test_error_has_useful_repr(self):
error = spotify.Error(0)
+ self.assertIn('No error', repr(error))
+
+ def test_error_has_useful_string_representation(self):
+ error = spotify.Error(0)
- self.assertEqual(error.message, 'No error')
+ self.assertEqual('%s' % error, 'No error')
- self.assertIsInstance(error.message, six.text_type)
+ self.assertIsInstance('%s' % error, six.text_type)
error = spotify.Error(1)
- self.assertEqual(error.message, 'Invalid library version')
+ self.assertEqual('%s' % error, 'Invalid library version')
-
- def test_error_has_useful_repr(self):
- error = spotify.Error(0)
- self.assertEqual(repr(error), b"Error(u'No error',)")
-
- def test_error_has_useful_str(self):
- error = spotify.Error(0)
- self.assertEqual(str(error), 'No error')
def test_error_has_error_constants(self):
self.assertEqual(spotify.Error.OK, 0)
self.assertEqual(spotify.Error.BAD_API_VERSION, 1)
|
Make Error behavior consistent across Pythons
|
## Code Before:
from __future__ import unicode_literals
import unittest
import six
import spotify
class ErrorTest(unittest.TestCase):
def test_error_has_error_code(self):
error = spotify.Error(0)
self.assertEqual(error.error_code, 0)
error = spotify.Error(1)
self.assertEqual(error.error_code, 1)
def test_error_has_error_message(self):
error = spotify.Error(0)
self.assertEqual(error.message, 'No error')
self.assertIsInstance(error.message, six.text_type)
error = spotify.Error(1)
self.assertEqual(error.message, 'Invalid library version')
def test_error_has_useful_repr(self):
error = spotify.Error(0)
self.assertEqual(repr(error), b"Error(u'No error',)")
def test_error_has_useful_str(self):
error = spotify.Error(0)
self.assertEqual(str(error), 'No error')
def test_error_has_error_constants(self):
self.assertEqual(spotify.Error.OK, 0)
self.assertEqual(spotify.Error.BAD_API_VERSION, 1)
## Instruction:
Make Error behavior consistent across Pythons
## Code After:
from __future__ import unicode_literals
import unittest
import six
import spotify
class ErrorTest(unittest.TestCase):
def test_error_has_error_code(self):
error = spotify.Error(0)
self.assertEqual(error.error_code, 0)
error = spotify.Error(1)
self.assertEqual(error.error_code, 1)
def test_error_has_useful_repr(self):
error = spotify.Error(0)
self.assertIn('No error', repr(error))
def test_error_has_useful_string_representation(self):
error = spotify.Error(0)
self.assertEqual('%s' % error, 'No error')
self.assertIsInstance('%s' % error, six.text_type)
error = spotify.Error(1)
self.assertEqual('%s' % error, 'Invalid library version')
def test_error_has_error_constants(self):
self.assertEqual(spotify.Error.OK, 0)
self.assertEqual(spotify.Error.BAD_API_VERSION, 1)
|
// ... existing code ...
def test_error_has_useful_repr(self):
error = spotify.Error(0)
self.assertIn('No error', repr(error))
def test_error_has_useful_string_representation(self):
error = spotify.Error(0)
self.assertEqual('%s' % error, 'No error')
self.assertIsInstance('%s' % error, six.text_type)
// ... modified code ...
error = spotify.Error(1)
self.assertEqual('%s' % error, 'Invalid library version')
// ... rest of the code ...
|
4622125a0f73a77ae0327deb886ac9d4b1c50791
|
events/views.py
|
events/views.py
|
from django.shortcuts import render_to_response
from django.template import RequestContext
import datetime
from django.http import Http404
from events.models import Event
from events.models import Calendar
def index(request):
calendars = Calendar.objects.all()
return render_to_response(
'events/index.html',
{'calendars': calendars},
context_instance=RequestContext(request)
)
def calendar_events(request, calendar_slug=None):
events = Event.objects.filter(calendar__slug=calendar_slug).filter(end__gte=datetime.date.today()).order_by('start')
calendars = Calendar.objects.all()
calendar = calendars.get(slug=calendar_slug)
if not calendar:
raise Http404
return render_to_response(
'events/calendar_events.html',
{'events': events, 'calendar': calendar, 'calendars': calendars},
context_instance=RequestContext(request)
)
def event_detail(request, calendar_slug=None, event_slug=None):
calendars = Calendar.objects.all()
try:
event = Event.objects.get(slug=event_slug)
except Event.DoesNotExist:
raise Http404
return render_to_response(
'events/event_detail.html',
{'event': event, 'calendars': calendars},
context_instance=RequestContext(request)
)
|
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.core.urlresolvers import resolve
import datetime
from django.http import Http404
from events.models import Event
from events.models import Calendar
def index(request):
calendars = Calendar.objects.all()
current_app = resolve(request.path).namespace
context = RequestContext(request, current_app=current_app)
return render_to_response(
'events/index.html',
{'calendars': calendars},
context_instance=context
)
def calendar_events(request, calendar_slug=None):
events = Event.objects.filter(calendar__slug=calendar_slug).filter(end__gte=datetime.date.today()).order_by('start')
calendars = Calendar.objects.all()
calendar = calendars.get(slug=calendar_slug)
if not calendar:
raise Http404
current_app = resolve(request.path).namespace
context = RequestContext(request, current_app=current_app)
return render_to_response(
'events/calendar_events.html',
{'events': events, 'calendar': calendar, 'calendars': calendars},
context_instance=context
)
def event_detail(request, calendar_slug=None, event_slug=None):
calendars = Calendar.objects.all()
try:
event = Event.objects.get(slug=event_slug)
except Event.DoesNotExist:
raise Http404
current_app = resolve(request.path).namespace
context = RequestContext(request, current_app=current_app)
return render_to_response(
'events/event_detail.html',
{'event': event, 'calendars': calendars},
context_instance=context
)
|
Add current_app to context for django-cms v3 support
|
Add current_app to context for django-cms v3 support
|
Python
|
bsd-3-clause
|
theherk/django-theherk-events
|
from django.shortcuts import render_to_response
from django.template import RequestContext
+ from django.core.urlresolvers import resolve
import datetime
from django.http import Http404
from events.models import Event
from events.models import Calendar
def index(request):
calendars = Calendar.objects.all()
+ current_app = resolve(request.path).namespace
+ context = RequestContext(request, current_app=current_app)
return render_to_response(
'events/index.html',
{'calendars': calendars},
- context_instance=RequestContext(request)
+ context_instance=context
)
def calendar_events(request, calendar_slug=None):
events = Event.objects.filter(calendar__slug=calendar_slug).filter(end__gte=datetime.date.today()).order_by('start')
calendars = Calendar.objects.all()
calendar = calendars.get(slug=calendar_slug)
if not calendar:
raise Http404
+ current_app = resolve(request.path).namespace
+ context = RequestContext(request, current_app=current_app)
return render_to_response(
'events/calendar_events.html',
{'events': events, 'calendar': calendar, 'calendars': calendars},
- context_instance=RequestContext(request)
+ context_instance=context
)
def event_detail(request, calendar_slug=None, event_slug=None):
calendars = Calendar.objects.all()
try:
event = Event.objects.get(slug=event_slug)
except Event.DoesNotExist:
raise Http404
+ current_app = resolve(request.path).namespace
+ context = RequestContext(request, current_app=current_app)
return render_to_response(
'events/event_detail.html',
{'event': event, 'calendars': calendars},
- context_instance=RequestContext(request)
+ context_instance=context
)
|
Add current_app to context for django-cms v3 support
|
## Code Before:
from django.shortcuts import render_to_response
from django.template import RequestContext
import datetime
from django.http import Http404
from events.models import Event
from events.models import Calendar
def index(request):
calendars = Calendar.objects.all()
return render_to_response(
'events/index.html',
{'calendars': calendars},
context_instance=RequestContext(request)
)
def calendar_events(request, calendar_slug=None):
events = Event.objects.filter(calendar__slug=calendar_slug).filter(end__gte=datetime.date.today()).order_by('start')
calendars = Calendar.objects.all()
calendar = calendars.get(slug=calendar_slug)
if not calendar:
raise Http404
return render_to_response(
'events/calendar_events.html',
{'events': events, 'calendar': calendar, 'calendars': calendars},
context_instance=RequestContext(request)
)
def event_detail(request, calendar_slug=None, event_slug=None):
calendars = Calendar.objects.all()
try:
event = Event.objects.get(slug=event_slug)
except Event.DoesNotExist:
raise Http404
return render_to_response(
'events/event_detail.html',
{'event': event, 'calendars': calendars},
context_instance=RequestContext(request)
)
## Instruction:
Add current_app to context for django-cms v3 support
## Code After:
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.core.urlresolvers import resolve
import datetime
from django.http import Http404
from events.models import Event
from events.models import Calendar
def index(request):
calendars = Calendar.objects.all()
current_app = resolve(request.path).namespace
context = RequestContext(request, current_app=current_app)
return render_to_response(
'events/index.html',
{'calendars': calendars},
context_instance=context
)
def calendar_events(request, calendar_slug=None):
events = Event.objects.filter(calendar__slug=calendar_slug).filter(end__gte=datetime.date.today()).order_by('start')
calendars = Calendar.objects.all()
calendar = calendars.get(slug=calendar_slug)
if not calendar:
raise Http404
current_app = resolve(request.path).namespace
context = RequestContext(request, current_app=current_app)
return render_to_response(
'events/calendar_events.html',
{'events': events, 'calendar': calendar, 'calendars': calendars},
context_instance=context
)
def event_detail(request, calendar_slug=None, event_slug=None):
calendars = Calendar.objects.all()
try:
event = Event.objects.get(slug=event_slug)
except Event.DoesNotExist:
raise Http404
current_app = resolve(request.path).namespace
context = RequestContext(request, current_app=current_app)
return render_to_response(
'events/event_detail.html',
{'event': event, 'calendars': calendars},
context_instance=context
)
|
# ... existing code ...
from django.template import RequestContext
from django.core.urlresolvers import resolve
import datetime
# ... modified code ...
calendars = Calendar.objects.all()
current_app = resolve(request.path).namespace
context = RequestContext(request, current_app=current_app)
return render_to_response(
...
{'calendars': calendars},
context_instance=context
)
...
raise Http404
current_app = resolve(request.path).namespace
context = RequestContext(request, current_app=current_app)
return render_to_response(
...
{'events': events, 'calendar': calendar, 'calendars': calendars},
context_instance=context
)
...
raise Http404
current_app = resolve(request.path).namespace
context = RequestContext(request, current_app=current_app)
return render_to_response(
...
{'event': event, 'calendars': calendars},
context_instance=context
)
# ... rest of the code ...
|
2c54a9eb78a1cb88ef03db97e21e376ae764a33e
|
errata/admin_actions.py
|
errata/admin_actions.py
|
import unicodecsv
from django.http import HttpResponse
from django.utils.encoding import smart_str
def export_as_csv_action(description="Export selected objects as CSV file",
fields=None, exclude=None, header=True):
"""
This function returns an export csv action
'fields' and 'exclude' work like in django ModelForm
'header' is whether or not to output the column names as the first row
"""
def export_as_csv(modeladmin, request, queryset):
opts = modeladmin.model._meta
if not fields:
field_names = [field.name for field in opts.fields]
else:
field_names = fields
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename=%s.csv' % str(opts).replace('.', '_')
writer = unicodecsv.writer(response, encoding='utf-8')
if header:
writer.writerow(field_names)
for obj in queryset:
row = [getattr(obj, field)() if callable(getattr(obj, field)) else getattr(obj, field) for field in field_names]
writer.writerow(row)
return response
export_as_csv.short_description = description
return export_as_csv
|
import unicodecsv
from django.http import StreamingHttpResponse
class Echo:
"""An object that implements just the write method of the file-like
interface.
"""
def write(self, value):
"""Write the value by returning it, instead of storing in a buffer."""
return value
def export_as_csv_action(description="Export selected objects as CSV file",
fields=None, exclude=None, header=True):
"""
This function returns an export csv action
'fields' and 'exclude' work like in django ModelForm
'header' is whether or not to output the column names as the first row
"""
def export_as_csv(modeladmin, request, queryset):
opts = modeladmin.model._meta
if not fields:
field_names = [field.name for field in opts.fields]
else:
field_names = fields
response = StreamingHttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename=%s.csv' % str(opts).replace('.', '_')
pseudo_buffer = Echo()
writer = unicodecsv.writer(pseudo_buffer)
if header:
writer.writerow(field_names)
for obj in queryset:
row = [getattr(obj, field)() if callable(getattr(obj, field)) else getattr(obj, field) for field in field_names]
writer.writerow(row)
return response
export_as_csv.short_description = description
return export_as_csv
|
Make use of Django's StreamingHttpResponse for large CSV exports
|
Make use of Django's StreamingHttpResponse for large CSV exports
|
Python
|
agpl-3.0
|
Connexions/openstax-cms,Connexions/openstax-cms,openstax/openstax-cms,openstax/openstax-cms,openstax/openstax-cms,openstax/openstax-cms
|
import unicodecsv
- from django.http import HttpResponse
+ from django.http import StreamingHttpResponse
- from django.utils.encoding import smart_str
+
+ class Echo:
+ """An object that implements just the write method of the file-like
+ interface.
+ """
+ def write(self, value):
+ """Write the value by returning it, instead of storing in a buffer."""
+ return value
def export_as_csv_action(description="Export selected objects as CSV file",
fields=None, exclude=None, header=True):
"""
This function returns an export csv action
'fields' and 'exclude' work like in django ModelForm
'header' is whether or not to output the column names as the first row
"""
def export_as_csv(modeladmin, request, queryset):
opts = modeladmin.model._meta
if not fields:
field_names = [field.name for field in opts.fields]
else:
field_names = fields
- response = HttpResponse(content_type='text/csv')
+ response = StreamingHttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename=%s.csv' % str(opts).replace('.', '_')
- writer = unicodecsv.writer(response, encoding='utf-8')
+ pseudo_buffer = Echo()
+ writer = unicodecsv.writer(pseudo_buffer)
if header:
writer.writerow(field_names)
for obj in queryset:
row = [getattr(obj, field)() if callable(getattr(obj, field)) else getattr(obj, field) for field in field_names]
writer.writerow(row)
return response
export_as_csv.short_description = description
return export_as_csv
+
|
Make use of Django's StreamingHttpResponse for large CSV exports
|
## Code Before:
import unicodecsv
from django.http import HttpResponse
from django.utils.encoding import smart_str
def export_as_csv_action(description="Export selected objects as CSV file",
fields=None, exclude=None, header=True):
"""
This function returns an export csv action
'fields' and 'exclude' work like in django ModelForm
'header' is whether or not to output the column names as the first row
"""
def export_as_csv(modeladmin, request, queryset):
opts = modeladmin.model._meta
if not fields:
field_names = [field.name for field in opts.fields]
else:
field_names = fields
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename=%s.csv' % str(opts).replace('.', '_')
writer = unicodecsv.writer(response, encoding='utf-8')
if header:
writer.writerow(field_names)
for obj in queryset:
row = [getattr(obj, field)() if callable(getattr(obj, field)) else getattr(obj, field) for field in field_names]
writer.writerow(row)
return response
export_as_csv.short_description = description
return export_as_csv
## Instruction:
Make use of Django's StreamingHttpResponse for large CSV exports
## Code After:
import unicodecsv
from django.http import StreamingHttpResponse
class Echo:
"""An object that implements just the write method of the file-like
interface.
"""
def write(self, value):
"""Write the value by returning it, instead of storing in a buffer."""
return value
def export_as_csv_action(description="Export selected objects as CSV file",
fields=None, exclude=None, header=True):
"""
This function returns an export csv action
'fields' and 'exclude' work like in django ModelForm
'header' is whether or not to output the column names as the first row
"""
def export_as_csv(modeladmin, request, queryset):
opts = modeladmin.model._meta
if not fields:
field_names = [field.name for field in opts.fields]
else:
field_names = fields
response = StreamingHttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename=%s.csv' % str(opts).replace('.', '_')
pseudo_buffer = Echo()
writer = unicodecsv.writer(pseudo_buffer)
if header:
writer.writerow(field_names)
for obj in queryset:
row = [getattr(obj, field)() if callable(getattr(obj, field)) else getattr(obj, field) for field in field_names]
writer.writerow(row)
return response
export_as_csv.short_description = description
return export_as_csv
|
// ... existing code ...
import unicodecsv
from django.http import StreamingHttpResponse
class Echo:
"""An object that implements just the write method of the file-like
interface.
"""
def write(self, value):
"""Write the value by returning it, instead of storing in a buffer."""
return value
// ... modified code ...
response = StreamingHttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename=%s.csv' % str(opts).replace('.', '_')
...
pseudo_buffer = Echo()
writer = unicodecsv.writer(pseudo_buffer)
if header:
// ... rest of the code ...
|
d6a03fad6c9280981ae3beee24de89bd6361bcc9
|
dumbrepl.py
|
dumbrepl.py
|
if __name__ == "__main__":
import pycket.test.testhelper as th
th.dumb_repl()
|
if __name__ == "__main__":
import pycket.values
import pycket.config
from pycket.env import w_global_config
#w_global_config.set_linklet_mode_off()
import pycket.test.testhelper as th
th.dumb_repl()
|
Make sure things are loaded right.
|
Make sure things are loaded right.
|
Python
|
mit
|
samth/pycket,pycket/pycket,pycket/pycket,samth/pycket,samth/pycket,pycket/pycket
|
if __name__ == "__main__":
+ import pycket.values
+ import pycket.config
+ from pycket.env import w_global_config
+ #w_global_config.set_linklet_mode_off()
import pycket.test.testhelper as th
th.dumb_repl()
|
Make sure things are loaded right.
|
## Code Before:
if __name__ == "__main__":
import pycket.test.testhelper as th
th.dumb_repl()
## Instruction:
Make sure things are loaded right.
## Code After:
if __name__ == "__main__":
import pycket.values
import pycket.config
from pycket.env import w_global_config
#w_global_config.set_linklet_mode_off()
import pycket.test.testhelper as th
th.dumb_repl()
|
...
if __name__ == "__main__":
import pycket.values
import pycket.config
from pycket.env import w_global_config
#w_global_config.set_linklet_mode_off()
import pycket.test.testhelper as th
...
|
ae8273f86fc3cc7fdacadf495aa148dda796f11b
|
printcli.py
|
printcli.py
|
import argparse
import os
from labelprinter import Labelprinter
if os.path.isfile('labelprinterServeConf_local.py'):
import labelprinterServeConf_local as conf
else:
import labelprinterServeConf as conf
def text(args, labelprinter):
bold = 'on' if args.bold else 'off'
labelprinter.printText(args.text,
charSize=args.char_size,
font=args.font,
align=args.align,
bold=bold,
charStyle=args.char_style,
cut=args.cut
)
parser = argparse.ArgumentParser(description="A command line interface to Labello.")
subparsers = parser.add_subparsers(help="commands")
parser_text = subparsers.add_parser("text", help="print a text")
parser_text.add_argument("text", type=str, help="the text to print")
parser_text.add_argument("--char_size", type=str, default='42')
parser_text.add_argument("--font", type=str, default='lettergothic')
parser_text.add_argument("--align", type=str, default='left')
parser_text.add_argument("--bold", action='store_true')
parser_text.add_argument("--char_style", type=str, default='normal')
parser_text.add_argument("--cut", type=str, default='full')
parser_text.set_defaults(func=text)
args = parser.parse_args()
labelprinter = Labelprinter(conf=conf)
args.func(args, labelprinter)
|
import argparse
import os
from labelprinter import Labelprinter
import labelprinterServeConf as conf
def text(args, labelprinter):
bold = 'on' if args.bold else 'off'
labelprinter.printText(args.text,
charSize=args.char_size,
font=args.font,
align=args.align,
bold=bold,
charStyle=args.char_style,
cut=args.cut
)
parser = argparse.ArgumentParser(description="A command line interface to Labello.")
subparsers = parser.add_subparsers(help="commands")
parser_text = subparsers.add_parser("text", help="print a text")
parser_text.add_argument("text", type=str, help="the text to print")
parser_text.add_argument("--char_size", type=str, default='42')
parser_text.add_argument("--font", type=str, default='lettergothic')
parser_text.add_argument("--align", type=str, default='left')
parser_text.add_argument("--bold", action='store_true')
parser_text.add_argument("--char_style", type=str, default='normal')
parser_text.add_argument("--cut", type=str, default='full')
parser_text.set_defaults(func=text)
args = parser.parse_args()
labelprinter = Labelprinter(conf=conf)
args.func(args, labelprinter)
|
Make the CLI use the new config (see e4054fb).
|
Make the CLI use the new config (see e4054fb).
|
Python
|
mit
|
chaosdorf/labello,chaosdorf/labello,chaosdorf/labello
|
import argparse
import os
from labelprinter import Labelprinter
-
- if os.path.isfile('labelprinterServeConf_local.py'):
- import labelprinterServeConf_local as conf
- else:
- import labelprinterServeConf as conf
+ import labelprinterServeConf as conf
def text(args, labelprinter):
bold = 'on' if args.bold else 'off'
labelprinter.printText(args.text,
charSize=args.char_size,
font=args.font,
align=args.align,
bold=bold,
charStyle=args.char_style,
cut=args.cut
)
parser = argparse.ArgumentParser(description="A command line interface to Labello.")
subparsers = parser.add_subparsers(help="commands")
parser_text = subparsers.add_parser("text", help="print a text")
parser_text.add_argument("text", type=str, help="the text to print")
parser_text.add_argument("--char_size", type=str, default='42')
parser_text.add_argument("--font", type=str, default='lettergothic')
parser_text.add_argument("--align", type=str, default='left')
parser_text.add_argument("--bold", action='store_true')
parser_text.add_argument("--char_style", type=str, default='normal')
parser_text.add_argument("--cut", type=str, default='full')
parser_text.set_defaults(func=text)
args = parser.parse_args()
labelprinter = Labelprinter(conf=conf)
args.func(args, labelprinter)
|
Make the CLI use the new config (see e4054fb).
|
## Code Before:
import argparse
import os
from labelprinter import Labelprinter
if os.path.isfile('labelprinterServeConf_local.py'):
import labelprinterServeConf_local as conf
else:
import labelprinterServeConf as conf
def text(args, labelprinter):
bold = 'on' if args.bold else 'off'
labelprinter.printText(args.text,
charSize=args.char_size,
font=args.font,
align=args.align,
bold=bold,
charStyle=args.char_style,
cut=args.cut
)
parser = argparse.ArgumentParser(description="A command line interface to Labello.")
subparsers = parser.add_subparsers(help="commands")
parser_text = subparsers.add_parser("text", help="print a text")
parser_text.add_argument("text", type=str, help="the text to print")
parser_text.add_argument("--char_size", type=str, default='42')
parser_text.add_argument("--font", type=str, default='lettergothic')
parser_text.add_argument("--align", type=str, default='left')
parser_text.add_argument("--bold", action='store_true')
parser_text.add_argument("--char_style", type=str, default='normal')
parser_text.add_argument("--cut", type=str, default='full')
parser_text.set_defaults(func=text)
args = parser.parse_args()
labelprinter = Labelprinter(conf=conf)
args.func(args, labelprinter)
## Instruction:
Make the CLI use the new config (see e4054fb).
## Code After:
import argparse
import os
from labelprinter import Labelprinter
import labelprinterServeConf as conf
def text(args, labelprinter):
bold = 'on' if args.bold else 'off'
labelprinter.printText(args.text,
charSize=args.char_size,
font=args.font,
align=args.align,
bold=bold,
charStyle=args.char_style,
cut=args.cut
)
parser = argparse.ArgumentParser(description="A command line interface to Labello.")
subparsers = parser.add_subparsers(help="commands")
parser_text = subparsers.add_parser("text", help="print a text")
parser_text.add_argument("text", type=str, help="the text to print")
parser_text.add_argument("--char_size", type=str, default='42')
parser_text.add_argument("--font", type=str, default='lettergothic')
parser_text.add_argument("--align", type=str, default='left')
parser_text.add_argument("--bold", action='store_true')
parser_text.add_argument("--char_style", type=str, default='normal')
parser_text.add_argument("--cut", type=str, default='full')
parser_text.set_defaults(func=text)
args = parser.parse_args()
labelprinter = Labelprinter(conf=conf)
args.func(args, labelprinter)
|
...
from labelprinter import Labelprinter
import labelprinterServeConf as conf
...
|
3e5e35aa85e656efbdddddf4c4d2accad964a42b
|
members/elections/serializers.py
|
members/elections/serializers.py
|
from rest_framework import serializers
from .models import Election, Candidate
class CandidatePublicSerializer(serializers.ModelSerializer):
organization = serializers.CharField(source='organization.display_name')
class Meta:
model = Candidate
fields = ('candidate_first_name', 'candidate_last_name', 'candidate_job_title',
'biography', 'vision', 'ideas', 'expertise', 'external_url', 'seat_type', 'organization', 'reason')
|
from rest_framework import serializers
from .models import Election, Candidate
class CandidatePublicSerializer(serializers.ModelSerializer):
organization = serializers.CharField(source='organization.display_name')
expertise = serializers.SerializerMethodField()
class Meta:
model = Candidate
fields = ('candidate_first_name', 'candidate_last_name', 'candidate_job_title',
'biography', 'vision', 'ideas', 'expertise', 'expertise_other', 'expertise_expanded',
'external_url', 'seat_type', 'organization', 'reason')
def get_expertise(self, obj):
return ', '.join(obj.get_expertise_items())
|
Update elections with new apis
|
Update elections with new apis
|
Python
|
mit
|
ocwc/ocwc-members,ocwc/ocwc-members,ocwc/ocwc-members,ocwc/ocwc-members
|
from rest_framework import serializers
from .models import Election, Candidate
+
class CandidatePublicSerializer(serializers.ModelSerializer):
- organization = serializers.CharField(source='organization.display_name')
+ organization = serializers.CharField(source='organization.display_name')
+ expertise = serializers.SerializerMethodField()
- class Meta:
- model = Candidate
- fields = ('candidate_first_name', 'candidate_last_name', 'candidate_job_title',
- 'biography', 'vision', 'ideas', 'expertise', 'external_url', 'seat_type', 'organization', 'reason')
+ class Meta:
+ model = Candidate
+ fields = ('candidate_first_name', 'candidate_last_name', 'candidate_job_title',
+ 'biography', 'vision', 'ideas', 'expertise', 'expertise_other', 'expertise_expanded',
+ 'external_url', 'seat_type', 'organization', 'reason')
+
+ def get_expertise(self, obj):
+ return ', '.join(obj.get_expertise_items())
+
|
Update elections with new apis
|
## Code Before:
from rest_framework import serializers
from .models import Election, Candidate
class CandidatePublicSerializer(serializers.ModelSerializer):
organization = serializers.CharField(source='organization.display_name')
class Meta:
model = Candidate
fields = ('candidate_first_name', 'candidate_last_name', 'candidate_job_title',
'biography', 'vision', 'ideas', 'expertise', 'external_url', 'seat_type', 'organization', 'reason')
## Instruction:
Update elections with new apis
## Code After:
from rest_framework import serializers
from .models import Election, Candidate
class CandidatePublicSerializer(serializers.ModelSerializer):
organization = serializers.CharField(source='organization.display_name')
expertise = serializers.SerializerMethodField()
class Meta:
model = Candidate
fields = ('candidate_first_name', 'candidate_last_name', 'candidate_job_title',
'biography', 'vision', 'ideas', 'expertise', 'expertise_other', 'expertise_expanded',
'external_url', 'seat_type', 'organization', 'reason')
def get_expertise(self, obj):
return ', '.join(obj.get_expertise_items())
|
# ... existing code ...
class CandidatePublicSerializer(serializers.ModelSerializer):
organization = serializers.CharField(source='organization.display_name')
expertise = serializers.SerializerMethodField()
class Meta:
model = Candidate
fields = ('candidate_first_name', 'candidate_last_name', 'candidate_job_title',
'biography', 'vision', 'ideas', 'expertise', 'expertise_other', 'expertise_expanded',
'external_url', 'seat_type', 'organization', 'reason')
def get_expertise(self, obj):
return ', '.join(obj.get_expertise_items())
# ... rest of the code ...
|
dd68fbb86100d0d3da08172505e7c564cc5bd3e7
|
monitor-notifier-slack.py
|
monitor-notifier-slack.py
|
import pika
import json
import requests
SLACK_WEBHOOK_URL = os.environ["SLACK_WEBHOOK_URL"]
RABBIT_MQ_SERVER = os.environ["RABBIT_MQ_SERVER"]
connection = pika.BlockingConnection(pika.ConnectionParameters(
RABBIT_MQ_SERVER))
channel = connection.channel()
channel.queue_declare(queue='slack')
def callback(ch, method, properties, body):
payload = {}
payload["text"] = body
r = requests.post(SLACK_WEBHOOK_URL, data = json.dumps(payload))
channel.basic_consume(callback, queue='slack', no_ack=True)
channel.start_consuming()
|
import pika
import json
import requests
import os
RABBIT_MQ_SERVER = os.environ["RABBIT_MQ_SERVER"]
RABBIT_MQ_USER = os.environ["RABBIT_MQ_USER"]
RABBIT_MQ_PWD = os.environ["RABBIT_MQ_PWD"]
credentials = pika.PlainCredentials(RABBIT_MQ_USER, RABBIT_MQ_PWD)
connection = pika.BlockingConnection(pika.ConnectionParameters(
RABBIT_MQ_SERVER, credentials = credentials))
channel = connection.channel()
# channel.queue_declare(queue='slack')
def callback(ch, method, properties, body):
payload = {}
payload["text"] = body
req = json.loads(body)
webhook_url = json.loads(req["monitor"]["notifier"]["arguments"])["webhook_url"]
r = requests.post(webhook_url, data = json.dumps(payload))
channel.basic_consume(callback, queue='slack', no_ack=True)
channel.start_consuming()
|
Add credentials + read webhook_url from notifier arguments
|
Add credentials + read webhook_url from notifier arguments
|
Python
|
mit
|
observer-hackaton/monitor-notifier-slack
|
import pika
import json
import requests
+ import os
- SLACK_WEBHOOK_URL = os.environ["SLACK_WEBHOOK_URL"]
RABBIT_MQ_SERVER = os.environ["RABBIT_MQ_SERVER"]
+ RABBIT_MQ_USER = os.environ["RABBIT_MQ_USER"]
+ RABBIT_MQ_PWD = os.environ["RABBIT_MQ_PWD"]
+
+ credentials = pika.PlainCredentials(RABBIT_MQ_USER, RABBIT_MQ_PWD)
connection = pika.BlockingConnection(pika.ConnectionParameters(
- RABBIT_MQ_SERVER))
+ RABBIT_MQ_SERVER, credentials = credentials))
channel = connection.channel()
- channel.queue_declare(queue='slack')
+ # channel.queue_declare(queue='slack')
def callback(ch, method, properties, body):
payload = {}
payload["text"] = body
+ req = json.loads(body)
+ webhook_url = json.loads(req["monitor"]["notifier"]["arguments"])["webhook_url"]
- r = requests.post(SLACK_WEBHOOK_URL, data = json.dumps(payload))
+ r = requests.post(webhook_url, data = json.dumps(payload))
channel.basic_consume(callback, queue='slack', no_ack=True)
channel.start_consuming()
|
Add credentials + read webhook_url from notifier arguments
|
## Code Before:
import pika
import json
import requests
SLACK_WEBHOOK_URL = os.environ["SLACK_WEBHOOK_URL"]
RABBIT_MQ_SERVER = os.environ["RABBIT_MQ_SERVER"]
connection = pika.BlockingConnection(pika.ConnectionParameters(
RABBIT_MQ_SERVER))
channel = connection.channel()
channel.queue_declare(queue='slack')
def callback(ch, method, properties, body):
payload = {}
payload["text"] = body
r = requests.post(SLACK_WEBHOOK_URL, data = json.dumps(payload))
channel.basic_consume(callback, queue='slack', no_ack=True)
channel.start_consuming()
## Instruction:
Add credentials + read webhook_url from notifier arguments
## Code After:
import pika
import json
import requests
import os
RABBIT_MQ_SERVER = os.environ["RABBIT_MQ_SERVER"]
RABBIT_MQ_USER = os.environ["RABBIT_MQ_USER"]
RABBIT_MQ_PWD = os.environ["RABBIT_MQ_PWD"]
credentials = pika.PlainCredentials(RABBIT_MQ_USER, RABBIT_MQ_PWD)
connection = pika.BlockingConnection(pika.ConnectionParameters(
RABBIT_MQ_SERVER, credentials = credentials))
channel = connection.channel()
# channel.queue_declare(queue='slack')
def callback(ch, method, properties, body):
payload = {}
payload["text"] = body
req = json.loads(body)
webhook_url = json.loads(req["monitor"]["notifier"]["arguments"])["webhook_url"]
r = requests.post(webhook_url, data = json.dumps(payload))
channel.basic_consume(callback, queue='slack', no_ack=True)
channel.start_consuming()
|
// ... existing code ...
import requests
import os
RABBIT_MQ_SERVER = os.environ["RABBIT_MQ_SERVER"]
RABBIT_MQ_USER = os.environ["RABBIT_MQ_USER"]
RABBIT_MQ_PWD = os.environ["RABBIT_MQ_PWD"]
credentials = pika.PlainCredentials(RABBIT_MQ_USER, RABBIT_MQ_PWD)
// ... modified code ...
connection = pika.BlockingConnection(pika.ConnectionParameters(
RABBIT_MQ_SERVER, credentials = credentials))
channel = connection.channel()
# channel.queue_declare(queue='slack')
...
payload["text"] = body
req = json.loads(body)
webhook_url = json.loads(req["monitor"]["notifier"]["arguments"])["webhook_url"]
r = requests.post(webhook_url, data = json.dumps(payload))
// ... rest of the code ...
|
8bacd0f657a931754d8c03e2de86c5e00ac5f791
|
modoboa/lib/cryptutils.py
|
modoboa/lib/cryptutils.py
|
from Crypto.Cipher import AES
import base64
import random
import string
from modoboa.lib import parameters
def random_key(l=16):
"""Generate a random key
:param integer l: the key's length
:return: a string
"""
char_set = string.digits + string.letters + string.punctuation
return ''.join(random.sample(char_set * l, l))
def encrypt(clear):
key = parameters.get_admin("SECRET_KEY", app="core")
obj = AES.new(key, AES.MODE_ECB)
if type(clear) is unicode:
clear = clear.encode("utf-8")
if len(clear) % AES.block_size:
clear += " " * (AES.block_size - len(clear) % AES.block_size)
ciph = obj.encrypt(clear)
ciph = base64.b64encode(ciph)
return ciph
def decrypt(ciph):
obj = AES.new(
parameters.get_admin("SECRET_KEY", app="core"), AES.MODE_ECB
)
ciph = base64.b64decode(ciph)
clear = obj.decrypt(ciph)
return clear.rstrip(' ')
def get_password(request):
return decrypt(request.session["password"])
|
"""Crypto related utilities."""
import base64
import random
import string
from Crypto.Cipher import AES
from modoboa.lib import parameters
def random_key(l=16):
"""Generate a random key.
:param integer l: the key's length
:return: a string
"""
population = string.digits + string.letters + string.punctuation
while True:
key = "".join(random.sample(population * l, l))
if len(key) == l:
return key
def encrypt(clear):
key = parameters.get_admin("SECRET_KEY", app="core")
obj = AES.new(key, AES.MODE_ECB)
if type(clear) is unicode:
clear = clear.encode("utf-8")
if len(clear) % AES.block_size:
clear += " " * (AES.block_size - len(clear) % AES.block_size)
ciph = obj.encrypt(clear)
ciph = base64.b64encode(ciph)
return ciph
def decrypt(ciph):
obj = AES.new(
parameters.get_admin("SECRET_KEY", app="core"), AES.MODE_ECB
)
ciph = base64.b64decode(ciph)
clear = obj.decrypt(ciph)
return clear.rstrip(' ')
def get_password(request):
return decrypt(request.session["password"])
|
Make sure key has the required size.
|
Make sure key has the required size.
see #867
|
Python
|
isc
|
tonioo/modoboa,modoboa/modoboa,bearstech/modoboa,carragom/modoboa,tonioo/modoboa,modoboa/modoboa,bearstech/modoboa,carragom/modoboa,bearstech/modoboa,bearstech/modoboa,modoboa/modoboa,carragom/modoboa,modoboa/modoboa,tonioo/modoboa
|
- from Crypto.Cipher import AES
+ """Crypto related utilities."""
+
import base64
import random
import string
+
+ from Crypto.Cipher import AES
+
from modoboa.lib import parameters
def random_key(l=16):
- """Generate a random key
+ """Generate a random key.
:param integer l: the key's length
:return: a string
"""
- char_set = string.digits + string.letters + string.punctuation
+ population = string.digits + string.letters + string.punctuation
- return ''.join(random.sample(char_set * l, l))
+ while True:
+ key = "".join(random.sample(population * l, l))
+ if len(key) == l:
+ return key
def encrypt(clear):
key = parameters.get_admin("SECRET_KEY", app="core")
obj = AES.new(key, AES.MODE_ECB)
if type(clear) is unicode:
clear = clear.encode("utf-8")
if len(clear) % AES.block_size:
clear += " " * (AES.block_size - len(clear) % AES.block_size)
ciph = obj.encrypt(clear)
ciph = base64.b64encode(ciph)
return ciph
def decrypt(ciph):
obj = AES.new(
parameters.get_admin("SECRET_KEY", app="core"), AES.MODE_ECB
)
ciph = base64.b64decode(ciph)
clear = obj.decrypt(ciph)
return clear.rstrip(' ')
def get_password(request):
return decrypt(request.session["password"])
|
Make sure key has the required size.
|
## Code Before:
from Crypto.Cipher import AES
import base64
import random
import string
from modoboa.lib import parameters
def random_key(l=16):
"""Generate a random key
:param integer l: the key's length
:return: a string
"""
char_set = string.digits + string.letters + string.punctuation
return ''.join(random.sample(char_set * l, l))
def encrypt(clear):
key = parameters.get_admin("SECRET_KEY", app="core")
obj = AES.new(key, AES.MODE_ECB)
if type(clear) is unicode:
clear = clear.encode("utf-8")
if len(clear) % AES.block_size:
clear += " " * (AES.block_size - len(clear) % AES.block_size)
ciph = obj.encrypt(clear)
ciph = base64.b64encode(ciph)
return ciph
def decrypt(ciph):
obj = AES.new(
parameters.get_admin("SECRET_KEY", app="core"), AES.MODE_ECB
)
ciph = base64.b64decode(ciph)
clear = obj.decrypt(ciph)
return clear.rstrip(' ')
def get_password(request):
return decrypt(request.session["password"])
## Instruction:
Make sure key has the required size.
## Code After:
"""Crypto related utilities."""
import base64
import random
import string
from Crypto.Cipher import AES
from modoboa.lib import parameters
def random_key(l=16):
"""Generate a random key.
:param integer l: the key's length
:return: a string
"""
population = string.digits + string.letters + string.punctuation
while True:
key = "".join(random.sample(population * l, l))
if len(key) == l:
return key
def encrypt(clear):
key = parameters.get_admin("SECRET_KEY", app="core")
obj = AES.new(key, AES.MODE_ECB)
if type(clear) is unicode:
clear = clear.encode("utf-8")
if len(clear) % AES.block_size:
clear += " " * (AES.block_size - len(clear) % AES.block_size)
ciph = obj.encrypt(clear)
ciph = base64.b64encode(ciph)
return ciph
def decrypt(ciph):
obj = AES.new(
parameters.get_admin("SECRET_KEY", app="core"), AES.MODE_ECB
)
ciph = base64.b64decode(ciph)
clear = obj.decrypt(ciph)
return clear.rstrip(' ')
def get_password(request):
return decrypt(request.session["password"])
|
# ... existing code ...
"""Crypto related utilities."""
import base64
# ... modified code ...
import string
from Crypto.Cipher import AES
from modoboa.lib import parameters
...
def random_key(l=16):
"""Generate a random key.
...
"""
population = string.digits + string.letters + string.punctuation
while True:
key = "".join(random.sample(population * l, l))
if len(key) == l:
return key
# ... rest of the code ...
|
fb5fc6e62a3c1b018d8f68cc37e4d541226a564b
|
integration-tests/features/steps/gremlin.py
|
integration-tests/features/steps/gremlin.py
|
"""Tests for Gremlin database."""
import os
import requests
from behave import given, then, when
from urllib.parse import urljoin
@when('I access Gremlin API')
def gremlin_url_access(context):
"""Access the Gremlin service API using the HTTP POST method."""
post_query(context, "")
def post_query(context, query):
"""Post the already constructed query to the Gremlin."""
data = {"gremlin": query}
context.response = requests.post(context.gremlin_url, json=data)
|
"""Tests for Gremlin database."""
import os
import requests
from behave import given, then, when
from urllib.parse import urljoin
from src.json_utils import *
@when('I access Gremlin API')
def gremlin_url_access(context):
"""Access the Gremlin service API using the HTTP POST method."""
post_query(context, "")
def post_query(context, query):
"""Post the already constructed query to the Gremlin."""
data = {"gremlin": query}
context.response = requests.post(context.gremlin_url, json=data)
@then('I should get valid Gremlin response')
def valid_gremlin_response(context):
"""Check that the Gremlin response is valid."""
check_request_id_value_in_json_response(context, "requestId")
data = context.response.json()
assert data, "Gremlin does not send a proper response"
check_gremlin_status_node(data)
check_gremlin_result_node(data)
def check_gremlin_status_node(data):
"""Check the basic structure of the 'status' node in Gremlin response."""
status = check_and_get_attribute(data, "status")
message = check_and_get_attribute(status, "message")
code = check_and_get_attribute(status, "code")
attributes = check_and_get_attribute(status, "attributes")
assert message == ""
assert code == 200
def check_gremlin_result_node(data):
"""Check the basic structure of the 'result' node in Gremlin response."""
result = check_and_get_attribute(data, "result")
data = check_and_get_attribute(result, "data")
meta = check_and_get_attribute(result, "meta")
assert type(data) is list
assert type(meta) is dict
|
Test step for check the Gremlin response structure
|
Test step for check the Gremlin response structure
|
Python
|
apache-2.0
|
tisnik/fabric8-analytics-common,jpopelka/fabric8-analytics-common,jpopelka/fabric8-analytics-common,tisnik/fabric8-analytics-common,tisnik/fabric8-analytics-common,jpopelka/fabric8-analytics-common
|
"""Tests for Gremlin database."""
import os
import requests
from behave import given, then, when
from urllib.parse import urljoin
+ from src.json_utils import *
@when('I access Gremlin API')
def gremlin_url_access(context):
"""Access the Gremlin service API using the HTTP POST method."""
post_query(context, "")
def post_query(context, query):
"""Post the already constructed query to the Gremlin."""
data = {"gremlin": query}
context.response = requests.post(context.gremlin_url, json=data)
+
+ @then('I should get valid Gremlin response')
+ def valid_gremlin_response(context):
+ """Check that the Gremlin response is valid."""
+ check_request_id_value_in_json_response(context, "requestId")
+
+ data = context.response.json()
+ assert data, "Gremlin does not send a proper response"
+
+ check_gremlin_status_node(data)
+ check_gremlin_result_node(data)
+
+
+ def check_gremlin_status_node(data):
+ """Check the basic structure of the 'status' node in Gremlin response."""
+ status = check_and_get_attribute(data, "status")
+ message = check_and_get_attribute(status, "message")
+ code = check_and_get_attribute(status, "code")
+ attributes = check_and_get_attribute(status, "attributes")
+
+ assert message == ""
+ assert code == 200
+
+
+ def check_gremlin_result_node(data):
+ """Check the basic structure of the 'result' node in Gremlin response."""
+ result = check_and_get_attribute(data, "result")
+ data = check_and_get_attribute(result, "data")
+ meta = check_and_get_attribute(result, "meta")
+
+ assert type(data) is list
+ assert type(meta) is dict
+
|
Test step for check the Gremlin response structure
|
## Code Before:
"""Tests for Gremlin database."""
import os
import requests
from behave import given, then, when
from urllib.parse import urljoin
@when('I access Gremlin API')
def gremlin_url_access(context):
"""Access the Gremlin service API using the HTTP POST method."""
post_query(context, "")
def post_query(context, query):
"""Post the already constructed query to the Gremlin."""
data = {"gremlin": query}
context.response = requests.post(context.gremlin_url, json=data)
## Instruction:
Test step for check the Gremlin response structure
## Code After:
"""Tests for Gremlin database."""
import os
import requests
from behave import given, then, when
from urllib.parse import urljoin
from src.json_utils import *
@when('I access Gremlin API')
def gremlin_url_access(context):
"""Access the Gremlin service API using the HTTP POST method."""
post_query(context, "")
def post_query(context, query):
"""Post the already constructed query to the Gremlin."""
data = {"gremlin": query}
context.response = requests.post(context.gremlin_url, json=data)
@then('I should get valid Gremlin response')
def valid_gremlin_response(context):
"""Check that the Gremlin response is valid."""
check_request_id_value_in_json_response(context, "requestId")
data = context.response.json()
assert data, "Gremlin does not send a proper response"
check_gremlin_status_node(data)
check_gremlin_result_node(data)
def check_gremlin_status_node(data):
"""Check the basic structure of the 'status' node in Gremlin response."""
status = check_and_get_attribute(data, "status")
message = check_and_get_attribute(status, "message")
code = check_and_get_attribute(status, "code")
attributes = check_and_get_attribute(status, "attributes")
assert message == ""
assert code == 200
def check_gremlin_result_node(data):
"""Check the basic structure of the 'result' node in Gremlin response."""
result = check_and_get_attribute(data, "result")
data = check_and_get_attribute(result, "data")
meta = check_and_get_attribute(result, "meta")
assert type(data) is list
assert type(meta) is dict
|
// ... existing code ...
from urllib.parse import urljoin
from src.json_utils import *
// ... modified code ...
context.response = requests.post(context.gremlin_url, json=data)
@then('I should get valid Gremlin response')
def valid_gremlin_response(context):
"""Check that the Gremlin response is valid."""
check_request_id_value_in_json_response(context, "requestId")
data = context.response.json()
assert data, "Gremlin does not send a proper response"
check_gremlin_status_node(data)
check_gremlin_result_node(data)
def check_gremlin_status_node(data):
"""Check the basic structure of the 'status' node in Gremlin response."""
status = check_and_get_attribute(data, "status")
message = check_and_get_attribute(status, "message")
code = check_and_get_attribute(status, "code")
attributes = check_and_get_attribute(status, "attributes")
assert message == ""
assert code == 200
def check_gremlin_result_node(data):
"""Check the basic structure of the 'result' node in Gremlin response."""
result = check_and_get_attribute(data, "result")
data = check_and_get_attribute(result, "data")
meta = check_and_get_attribute(result, "meta")
assert type(data) is list
assert type(meta) is dict
// ... rest of the code ...
|
8f2d6d2714aa1b60950a2fc355d39297b7f2cdfb
|
keras/activations.py
|
keras/activations.py
|
from __future__ import absolute_import
from . import backend as K
def softmax(x):
return K.softmax(x)
def softplus(x):
return K.softplus(x)
def relu(x, alpha=0., max_value=None):
return K.relu(x, alpha=alpha, max_value=max_value)
def tanh(x):
return K.tanh(x)
def sigmoid(x):
return K.sigmoid(x)
def hard_sigmoid(x):
return K.hard_sigmoid(x)
def linear(x):
'''
The function returns the variable that is passed in, so all types work
'''
return x
from .utils.generic_utils import get_from_module
def get(identifier):
return get_from_module(identifier, globals(), 'activation function')
|
from __future__ import absolute_import
from . import backend as K
def softmax(x):
ndim = K.ndim(x)
if ndim == 2:
return K.softmax(x)
elif ndim == 3:
# apply softmax to each timestep
def step(x, states):
return K.softmax(x), []
last_output, outputs, states = K.rnn(step, x, [], masking=False)
return outputs
else:
raise Exception('Cannot apply softmax to a tensor that is not 2D or 3D. ' +
'Here, ndim=' + str(ndim))
def softplus(x):
return K.softplus(x)
def relu(x, alpha=0., max_value=None):
return K.relu(x, alpha=alpha, max_value=max_value)
def tanh(x):
return K.tanh(x)
def sigmoid(x):
return K.sigmoid(x)
def hard_sigmoid(x):
return K.hard_sigmoid(x)
def linear(x):
'''
The function returns the variable that is passed in, so all types work
'''
return x
from .utils.generic_utils import get_from_module
def get(identifier):
return get_from_module(identifier, globals(), 'activation function')
|
Add support for time-distributed softmax.
|
Add support for time-distributed softmax.
|
Python
|
mit
|
daviddiazvico/keras,DeepGnosis/keras,kemaswill/keras,keras-team/keras,relh/keras,keras-team/keras,dolaameng/keras,kuza55/keras,nebw/keras
|
from __future__ import absolute_import
from . import backend as K
def softmax(x):
+ ndim = K.ndim(x)
+ if ndim == 2:
- return K.softmax(x)
+ return K.softmax(x)
+ elif ndim == 3:
+ # apply softmax to each timestep
+ def step(x, states):
+ return K.softmax(x), []
+ last_output, outputs, states = K.rnn(step, x, [], masking=False)
+ return outputs
+ else:
+ raise Exception('Cannot apply softmax to a tensor that is not 2D or 3D. ' +
+ 'Here, ndim=' + str(ndim))
def softplus(x):
return K.softplus(x)
def relu(x, alpha=0., max_value=None):
return K.relu(x, alpha=alpha, max_value=max_value)
def tanh(x):
return K.tanh(x)
def sigmoid(x):
return K.sigmoid(x)
def hard_sigmoid(x):
return K.hard_sigmoid(x)
def linear(x):
'''
The function returns the variable that is passed in, so all types work
'''
return x
from .utils.generic_utils import get_from_module
def get(identifier):
return get_from_module(identifier, globals(), 'activation function')
|
Add support for time-distributed softmax.
|
## Code Before:
from __future__ import absolute_import
from . import backend as K
def softmax(x):
return K.softmax(x)
def softplus(x):
return K.softplus(x)
def relu(x, alpha=0., max_value=None):
return K.relu(x, alpha=alpha, max_value=max_value)
def tanh(x):
return K.tanh(x)
def sigmoid(x):
return K.sigmoid(x)
def hard_sigmoid(x):
return K.hard_sigmoid(x)
def linear(x):
'''
The function returns the variable that is passed in, so all types work
'''
return x
from .utils.generic_utils import get_from_module
def get(identifier):
return get_from_module(identifier, globals(), 'activation function')
## Instruction:
Add support for time-distributed softmax.
## Code After:
from __future__ import absolute_import
from . import backend as K
def softmax(x):
ndim = K.ndim(x)
if ndim == 2:
return K.softmax(x)
elif ndim == 3:
# apply softmax to each timestep
def step(x, states):
return K.softmax(x), []
last_output, outputs, states = K.rnn(step, x, [], masking=False)
return outputs
else:
raise Exception('Cannot apply softmax to a tensor that is not 2D or 3D. ' +
'Here, ndim=' + str(ndim))
def softplus(x):
return K.softplus(x)
def relu(x, alpha=0., max_value=None):
return K.relu(x, alpha=alpha, max_value=max_value)
def tanh(x):
return K.tanh(x)
def sigmoid(x):
return K.sigmoid(x)
def hard_sigmoid(x):
return K.hard_sigmoid(x)
def linear(x):
'''
The function returns the variable that is passed in, so all types work
'''
return x
from .utils.generic_utils import get_from_module
def get(identifier):
return get_from_module(identifier, globals(), 'activation function')
|
# ... existing code ...
def softmax(x):
ndim = K.ndim(x)
if ndim == 2:
return K.softmax(x)
elif ndim == 3:
# apply softmax to each timestep
def step(x, states):
return K.softmax(x), []
last_output, outputs, states = K.rnn(step, x, [], masking=False)
return outputs
else:
raise Exception('Cannot apply softmax to a tensor that is not 2D or 3D. ' +
'Here, ndim=' + str(ndim))
# ... rest of the code ...
|
69d0cf6cc0d19f1669f56a361447935e375ac05c
|
indico/modules/events/logs/views.py
|
indico/modules/events/logs/views.py
|
from __future__ import unicode_literals
from indico.modules.events.management.views import WPEventManagement
class WPEventLogs(WPEventManagement):
bundles = ('react.js', 'module_events.logs.js', 'module_events.logs.css')
template_prefix = 'events/logs/'
sidemenu_option = 'logs'
|
from __future__ import unicode_literals
from indico.modules.events.management.views import WPEventManagement
class WPEventLogs(WPEventManagement):
bundles = ('react.js', 'semantic-ui.js', 'module_events.logs.js', 'module_events.logs.css')
template_prefix = 'events/logs/'
sidemenu_option = 'logs'
|
Include SUIR JS on logs page
|
Include SUIR JS on logs page
This is not pretty, as we don't even use SUIR there, but
indico/utils/redux imports a module that imports SUIR and thus breaks
the logs page if SUIR is not included.
|
Python
|
mit
|
DirkHoffmann/indico,indico/indico,OmeGak/indico,pferreir/indico,ThiefMaster/indico,ThiefMaster/indico,DirkHoffmann/indico,mvidalgarcia/indico,mvidalgarcia/indico,pferreir/indico,indico/indico,OmeGak/indico,mvidalgarcia/indico,indico/indico,pferreir/indico,ThiefMaster/indico,mic4ael/indico,indico/indico,DirkHoffmann/indico,mic4ael/indico,pferreir/indico,OmeGak/indico,ThiefMaster/indico,DirkHoffmann/indico,OmeGak/indico,mvidalgarcia/indico,mic4ael/indico,mic4ael/indico
|
from __future__ import unicode_literals
from indico.modules.events.management.views import WPEventManagement
class WPEventLogs(WPEventManagement):
- bundles = ('react.js', 'module_events.logs.js', 'module_events.logs.css')
+ bundles = ('react.js', 'semantic-ui.js', 'module_events.logs.js', 'module_events.logs.css')
template_prefix = 'events/logs/'
sidemenu_option = 'logs'
|
Include SUIR JS on logs page
|
## Code Before:
from __future__ import unicode_literals
from indico.modules.events.management.views import WPEventManagement
class WPEventLogs(WPEventManagement):
bundles = ('react.js', 'module_events.logs.js', 'module_events.logs.css')
template_prefix = 'events/logs/'
sidemenu_option = 'logs'
## Instruction:
Include SUIR JS on logs page
## Code After:
from __future__ import unicode_literals
from indico.modules.events.management.views import WPEventManagement
class WPEventLogs(WPEventManagement):
bundles = ('react.js', 'semantic-ui.js', 'module_events.logs.js', 'module_events.logs.css')
template_prefix = 'events/logs/'
sidemenu_option = 'logs'
|
// ... existing code ...
class WPEventLogs(WPEventManagement):
bundles = ('react.js', 'semantic-ui.js', 'module_events.logs.js', 'module_events.logs.css')
template_prefix = 'events/logs/'
// ... rest of the code ...
|
2207dd266887e812cae9da67ca00bef80c9985fd
|
thefuck/shells/__init__.py
|
thefuck/shells/__init__.py
|
import os
from psutil import Process
from .bash import Bash
from .fish import Fish
from .generic import Generic
from .tcsh import Tcsh
from .zsh import Zsh
shells = {'bash': Bash,
'fish': Fish,
'zsh': Zsh,
'csh': Tcsh,
'tcsh': Tcsh}
def _get_shell():
try:
shell_name = Process(os.getpid()).parent().name()
except TypeError:
shell_name = Process(os.getpid()).parent.name
return shells.get(shell_name, Generic)()
shell = _get_shell()
|
import os
from psutil import Process
from .bash import Bash
from .fish import Fish
from .generic import Generic
from .tcsh import Tcsh
from .zsh import Zsh
shells = {'bash': Bash,
'fish': Fish,
'zsh': Zsh,
'csh': Tcsh,
'tcsh': Tcsh}
def _get_shell():
proc = Process(os.getpid())
while (proc is not None):
name = None
try:
name = proc.name()
except TypeError:
name = proc.name
name = os.path.splitext(name)[0]
if name in shells:
return shells[name]()
try:
proc = proc.parent()
except TypeError:
proc = proc.parent
return Generic()
shell = _get_shell()
|
Update _get_shell to work with Windows
|
Update _get_shell to work with Windows
- _get_shell assumed the parent process would always be the shell process, in Powershell the
parent process is Python, with the grandparent being the shell
- Switched to walking the process tree so the same code path can be used in both places
|
Python
|
mit
|
mlk/thefuck,SimenB/thefuck,SimenB/thefuck,mlk/thefuck,nvbn/thefuck,Clpsplug/thefuck,scorphus/thefuck,Clpsplug/thefuck,scorphus/thefuck,nvbn/thefuck
|
import os
from psutil import Process
from .bash import Bash
from .fish import Fish
from .generic import Generic
from .tcsh import Tcsh
from .zsh import Zsh
shells = {'bash': Bash,
'fish': Fish,
'zsh': Zsh,
'csh': Tcsh,
'tcsh': Tcsh}
def _get_shell():
+ proc = Process(os.getpid())
+
+ while (proc is not None):
+ name = None
- try:
+ try:
- shell_name = Process(os.getpid()).parent().name()
+ name = proc.name()
- except TypeError:
+ except TypeError:
- shell_name = Process(os.getpid()).parent.name
- return shells.get(shell_name, Generic)()
+ name = proc.name
+
+ name = os.path.splitext(name)[0]
+
+ if name in shells:
+ return shells[name]()
+
+ try:
+ proc = proc.parent()
+ except TypeError:
+ proc = proc.parent
+
+ return Generic()
shell = _get_shell()
|
Update _get_shell to work with Windows
|
## Code Before:
import os
from psutil import Process
from .bash import Bash
from .fish import Fish
from .generic import Generic
from .tcsh import Tcsh
from .zsh import Zsh
shells = {'bash': Bash,
'fish': Fish,
'zsh': Zsh,
'csh': Tcsh,
'tcsh': Tcsh}
def _get_shell():
try:
shell_name = Process(os.getpid()).parent().name()
except TypeError:
shell_name = Process(os.getpid()).parent.name
return shells.get(shell_name, Generic)()
shell = _get_shell()
## Instruction:
Update _get_shell to work with Windows
## Code After:
import os
from psutil import Process
from .bash import Bash
from .fish import Fish
from .generic import Generic
from .tcsh import Tcsh
from .zsh import Zsh
shells = {'bash': Bash,
'fish': Fish,
'zsh': Zsh,
'csh': Tcsh,
'tcsh': Tcsh}
def _get_shell():
proc = Process(os.getpid())
while (proc is not None):
name = None
try:
name = proc.name()
except TypeError:
name = proc.name
name = os.path.splitext(name)[0]
if name in shells:
return shells[name]()
try:
proc = proc.parent()
except TypeError:
proc = proc.parent
return Generic()
shell = _get_shell()
|
# ... existing code ...
def _get_shell():
proc = Process(os.getpid())
while (proc is not None):
name = None
try:
name = proc.name()
except TypeError:
name = proc.name
name = os.path.splitext(name)[0]
if name in shells:
return shells[name]()
try:
proc = proc.parent()
except TypeError:
proc = proc.parent
return Generic()
# ... rest of the code ...
|
d27b2d71a0e5f834d4758c67fa6e8ed342001a88
|
salt/output/__init__.py
|
salt/output/__init__.py
|
'''
Used to manage the outputter system. This package is the modular system used
for managing outputters.
'''
import salt.loader
def display_output(data, out, opts=None):
'''
Print the passed data using the desired output
'''
if opts is None:
opts = {}
outputters = salt.loader.outputters(opts)
if not out in outputters:
outputters['pprint'](data)
outputters[out](data)
|
'''
Used to manage the outputter system. This package is the modular system used
for managing outputters.
'''
# Import salt utils
import salt.loader
def display_output(data, out, opts=None):
'''
Print the passed data using the desired output
'''
if opts is None:
opts = {}
outputters = salt.loader.outputters(opts)
if not out in outputters:
outputters['pprint'](data)
outputters[out](data)
|
Add some checks to output module
|
Add some checks to output module
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
'''
Used to manage the outputter system. This package is the modular system used
for managing outputters.
'''
+ # Import salt utils
import salt.loader
def display_output(data, out, opts=None):
'''
Print the passed data using the desired output
'''
if opts is None:
opts = {}
outputters = salt.loader.outputters(opts)
if not out in outputters:
outputters['pprint'](data)
outputters[out](data)
|
Add some checks to output module
|
## Code Before:
'''
Used to manage the outputter system. This package is the modular system used
for managing outputters.
'''
import salt.loader
def display_output(data, out, opts=None):
'''
Print the passed data using the desired output
'''
if opts is None:
opts = {}
outputters = salt.loader.outputters(opts)
if not out in outputters:
outputters['pprint'](data)
outputters[out](data)
## Instruction:
Add some checks to output module
## Code After:
'''
Used to manage the outputter system. This package is the modular system used
for managing outputters.
'''
# Import salt utils
import salt.loader
def display_output(data, out, opts=None):
'''
Print the passed data using the desired output
'''
if opts is None:
opts = {}
outputters = salt.loader.outputters(opts)
if not out in outputters:
outputters['pprint'](data)
outputters[out](data)
|
// ... existing code ...
# Import salt utils
import salt.loader
// ... rest of the code ...
|
ec52fee0fbefaa8fe2df1f38aab000456fb44c45
|
website/admin.py
|
website/admin.py
|
from django.contrib import admin
from .models import Card, FaqQuestion, Banner, Rule
admin.site.register(Card)
admin.site.register(FaqQuestion)
admin.site.register(Rule)
admin.site.register(Banner)
|
from django.contrib import admin
from django.contrib.admin import EmptyFieldListFilter
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from .models import Card, FaqQuestion, Banner, Rule
class WatchlistFilter(EmptyFieldListFilter):
def __init__(self, field, request, params, model, model_admin, field_path):
super().__init__(field, request, params, model, model_admin, field_path)
self.title = "vaktliste"
def choices(self, changelist):
for lookup, title in (
(None, 'Alle'),
('1', 'På vaktliste'),
('0', 'Ikke på vaktliste'),
):
yield {
'selected': self.lookup_val == lookup,
'query_string': changelist.get_query_string({self.lookup_kwarg: lookup}),
'display': title,
}
class CustomUserAdmin(UserAdmin):
list_filter = UserAdmin.list_filter + (("watches", WatchlistFilter),)
admin.site.unregister(User)
admin.site.register(User, CustomUserAdmin)
admin.site.register(Card)
admin.site.register(FaqQuestion)
admin.site.register(Rule)
admin.site.register(Banner)
|
Add filter for users on watchlist
|
Add filter for users on watchlist
|
Python
|
mit
|
hackerspace-ntnu/website,hackerspace-ntnu/website,hackerspace-ntnu/website
|
from django.contrib import admin
+ from django.contrib.admin import EmptyFieldListFilter
+ from django.contrib.auth.admin import UserAdmin
+ from django.contrib.auth.models import User
+
from .models import Card, FaqQuestion, Banner, Rule
+
+ class WatchlistFilter(EmptyFieldListFilter):
+ def __init__(self, field, request, params, model, model_admin, field_path):
+ super().__init__(field, request, params, model, model_admin, field_path)
+ self.title = "vaktliste"
+
+ def choices(self, changelist):
+ for lookup, title in (
+ (None, 'Alle'),
+ ('1', 'På vaktliste'),
+ ('0', 'Ikke på vaktliste'),
+ ):
+ yield {
+ 'selected': self.lookup_val == lookup,
+ 'query_string': changelist.get_query_string({self.lookup_kwarg: lookup}),
+ 'display': title,
+ }
+
+
+ class CustomUserAdmin(UserAdmin):
+ list_filter = UserAdmin.list_filter + (("watches", WatchlistFilter),)
+
+
+ admin.site.unregister(User)
+ admin.site.register(User, CustomUserAdmin)
admin.site.register(Card)
admin.site.register(FaqQuestion)
admin.site.register(Rule)
admin.site.register(Banner)
|
Add filter for users on watchlist
|
## Code Before:
from django.contrib import admin
from .models import Card, FaqQuestion, Banner, Rule
admin.site.register(Card)
admin.site.register(FaqQuestion)
admin.site.register(Rule)
admin.site.register(Banner)
## Instruction:
Add filter for users on watchlist
## Code After:
from django.contrib import admin
from django.contrib.admin import EmptyFieldListFilter
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from .models import Card, FaqQuestion, Banner, Rule
class WatchlistFilter(EmptyFieldListFilter):
def __init__(self, field, request, params, model, model_admin, field_path):
super().__init__(field, request, params, model, model_admin, field_path)
self.title = "vaktliste"
def choices(self, changelist):
for lookup, title in (
(None, 'Alle'),
('1', 'På vaktliste'),
('0', 'Ikke på vaktliste'),
):
yield {
'selected': self.lookup_val == lookup,
'query_string': changelist.get_query_string({self.lookup_kwarg: lookup}),
'display': title,
}
class CustomUserAdmin(UserAdmin):
list_filter = UserAdmin.list_filter + (("watches", WatchlistFilter),)
admin.site.unregister(User)
admin.site.register(User, CustomUserAdmin)
admin.site.register(Card)
admin.site.register(FaqQuestion)
admin.site.register(Rule)
admin.site.register(Banner)
|
// ... existing code ...
from django.contrib import admin
from django.contrib.admin import EmptyFieldListFilter
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from .models import Card, FaqQuestion, Banner, Rule
// ... modified code ...
class WatchlistFilter(EmptyFieldListFilter):
def __init__(self, field, request, params, model, model_admin, field_path):
super().__init__(field, request, params, model, model_admin, field_path)
self.title = "vaktliste"
def choices(self, changelist):
for lookup, title in (
(None, 'Alle'),
('1', 'På vaktliste'),
('0', 'Ikke på vaktliste'),
):
yield {
'selected': self.lookup_val == lookup,
'query_string': changelist.get_query_string({self.lookup_kwarg: lookup}),
'display': title,
}
class CustomUserAdmin(UserAdmin):
list_filter = UserAdmin.list_filter + (("watches", WatchlistFilter),)
admin.site.unregister(User)
admin.site.register(User, CustomUserAdmin)
admin.site.register(Card)
// ... rest of the code ...
|
c43d58ac79d6144eb6252ff611cc9605f290006d
|
patches/1401/p01_move_related_property_setters_to_custom_field.py
|
patches/1401/p01_move_related_property_setters_to_custom_field.py
|
import webnotes
from webnotes.model.meta import get_field
def execute():
webnotes.reload_doc("core", "doctype", "custom_field")
custom_fields = {}
for cf in webnotes.conn.sql("""select dt, fieldname from `tabCustom Field`""", as_dict=1):
custom_fields.setdefault(cf.dt, []).append(cf.fieldname)
delete_list = []
for ps in webnotes.conn.sql("""select * from `tabProperty Setter`""", as_dict=1):
if ps.field_name in custom_fields.get(ps.doc_type, []):
if ps.property == "previous_field":
property_name = "insert_after"
field_meta = get_field(ps.doc_type, ps.value)
property_value = field_meta.label if field_meta else ""
else:
property_name = ps.property
property_value =ps.value
webnotes.conn.sql("""update `tabCustom Field`
set %s=%s where dt=%s and fieldname=%s""" % (property_name, '%s', '%s', '%s'),
(property_value, ps.doc_type, ps.field_name))
delete_list.append(ps.name)
if delete_list:
webnotes.conn.sql("""delete from `tabProperty Setter` where name in (%s)""" %
', '.join(['%s']*len(delete_list)), tuple(delete_list))
|
import webnotes
def execute():
webnotes.reload_doc("core", "doctype", "custom_field")
cf_doclist = webnotes.get_doctype("Custom Field")
delete_list = []
for d in webnotes.conn.sql("""select cf.name as cf_name, ps.property,
ps.value, ps.name as ps_name
from `tabProperty Setter` ps, `tabCustom Field` cf
where ps.doctype_or_field = 'DocField' and ps.property != 'previous_field'
and ps.doc_type=cf.dt and ps.field_name=cf.fieldname""", as_dict=1):
if cf_doclist.get_field(d.property):
webnotes.conn.sql("""update `tabCustom Field`
set `%s`=%s where name=%s""" % (d.property, '%s', '%s'), (d.value, d.cf_name))
delete_list.append(d.ps_name)
if delete_list:
webnotes.conn.sql("""delete from `tabProperty Setter` where name in (%s)""" %
', '.join(['%s']*len(delete_list)), tuple(delete_list))
|
Delete Property Setters for Custom Fields, and set them inside Custom Field
|
Delete Property Setters for Custom Fields, and set them inside Custom Field
|
Python
|
agpl-3.0
|
hatwar/buyback-erpnext,indictranstech/osmosis-erpnext,suyashphadtare/vestasi-erp-jan-end,Drooids/erpnext,gangadharkadam/saloon_erp_install,indictranstech/buyback-erp,suyashphadtare/gd-erp,geekroot/erpnext,gangadhar-kadam/verve_erp,anandpdoshi/erpnext,suyashphadtare/vestasi-update-erp,anandpdoshi/erpnext,sagar30051991/ozsmart-erp,shft117/SteckerApp,indictranstech/tele-erpnext,hatwar/focal-erpnext,gangadhar-kadam/verve_live_erp,Tejal011089/paypal_erpnext,indictranstech/fbd_erpnext,hatwar/focal-erpnext,suyashphadtare/sajil-final-erp,Tejal011089/digitales_erpnext,saurabh6790/omnitech-apps,tmimori/erpnext,fuhongliang/erpnext,njmube/erpnext,Suninus/erpnext,gangadhar-kadam/smrterp,suyashphadtare/vestasi-erp-jan-end,gangadharkadam/sher,gangadharkadam/letzerp,gmarke/erpnext,aruizramon/alec_erpnext,rohitwaghchaure/New_Theme_Erp,SPKian/Testing,mbauskar/alec_frappe5_erpnext,gangadharkadam/office_erp,anandpdoshi/erpnext,dieface/erpnext,shft117/SteckerApp,indictranstech/Das_Erpnext,hanselke/erpnext-1,saurabh6790/test-erp,saurabh6790/trufil_app,pombredanne/erpnext,Suninus/erpnext,gangadhar-kadam/hrerp,Tejal011089/trufil-erpnext,pawaranand/phrerp,Tejal011089/paypal_erpnext,hatwar/Das_erpnext,hanselke/erpnext-1,susuchina/ERPNEXT,rohitwaghchaure/erpnext_smart,indictranstech/fbd_erpnext,mbauskar/omnitech-erpnext,mbauskar/omnitech-erpnext,tmimori/erpnext,shitolepriya/test-erp,saurabh6790/omni-apps,Tejal011089/fbd_erpnext,saurabh6790/test-erp,hatwar/Das_erpnext,hanselke/erpnext-1,mbauskar/helpdesk-erpnext,gangadharkadam/v6_erp,indictranstech/vestasi-erpnext,netfirms/erpnext,indictranstech/reciphergroup-erpnext,gangadhar-kadam/powapp,rohitwaghchaure/erpnext_smart,SPKian/Testing2,rohitwaghchaure/New_Theme_Erp,saurabh6790/aimobilize-app-backup,indictranstech/trufil-erpnext,gangadharkadam/vlinkerp,rohitwaghchaure/digitales_erpnext,SPKian/Testing2,netfirms/erpnext,netfirms/erpnext,indictranstech/erpnext,saurabh6790/omn-app,indictranstech/tele-erpnext,gangadharkadam/sher,gangadharkadam/contributionerp,gangadharkadam/verveerp,suyashphadtare/test,mahabuber/erpnext,mbauskar/helpdesk-erpnext,meisterkleister/erpnext,MartinEnder/erpnext-de,pombredanne/erpnext,gangadharkadam/saloon_erp,Tejal011089/huntercamp_erpnext,gangadhar-kadam/laganerp,gangadharkadam/johnerp,BhupeshGupta/erpnext,gangadharkadam/vlinkerp,Tejal011089/med2-app,mbauskar/internal-hr,mbauskar/sapphire-erpnext,gangadhar-kadam/verve_test_erp,indictranstech/phrerp,gangadhar-kadam/verve-erp,fuhongliang/erpnext,indictranstech/erpnext,suyashphadtare/sajil-erp,4commerce-technologies-AG/erpnext,gangadharkadam/saloon_erp_install,shitolepriya/test-erp,indictranstech/internal-erpnext,gsnbng/erpnext,indictranstech/vestasi-erpnext,mbauskar/Das_Erpnext,shitolepriya/test-erp,susuchina/ERPNEXT,sagar30051991/ozsmart-erp,Tejal011089/huntercamp_erpnext,hatwar/Das_erpnext,njmube/erpnext,SPKian/Testing,Tejal011089/osmosis_erpnext,MartinEnder/erpnext-de,indictranstech/buyback-erp,ThiagoGarciaAlves/erpnext,gangadharkadam/contributionerp,Tejal011089/osmosis_erpnext,mbauskar/Das_Erpnext,treejames/erpnext,indictranstech/biggift-erpnext,suyashphadtare/sajil-final-erp,gangadharkadam/v4_erp,gangadharkadam/sterp,saurabh6790/test-erp,BhupeshGupta/erpnext,dieface/erpnext,indictranstech/biggift-erpnext,Tejal011089/fbd_erpnext,mbauskar/phrerp,mbauskar/omnitech-erpnext,gangadharkadam/tailorerp,gangadhar-kadam/latestchurcherp,gangadharkadam/letzerp,MartinEnder/erpnext-de,rohitwaghchaure/New_Theme_Erp,gangadhar-kadam/powapp,indictranstech/internal-erpnext,gsnbng/erpnext,mbauskar/internal-hr,indictranstech/phrerp,indictranstech/buyback-erp,Tejal011089/osmosis_erpnext,Tejal011089/trufil-erpnext,saurabh6790/omnitech-apps,ThiagoGarciaAlves/erpnext,mbauskar/Das_Erpnext,gangadhar-kadam/prjapp,gangadharkadam/verveerp,indictranstech/fbd_erpnext,Tejal011089/Medsyn2_app,SPKian/Testing,indictranstech/trufil-erpnext,njmube/erpnext,Tejal011089/fbd_erpnext,rohitwaghchaure/erpnext_smart,mbauskar/phrerp,indictranstech/reciphergroup-erpnext,saurabh6790/omnisys-app,saurabh6790/aimobilize-app-backup,gangadhar-kadam/helpdesk-erpnext,hernad/erpnext,geekroot/erpnext,gangadhar-kadam/verve_test_erp,Drooids/erpnext,gmarke/erpnext,Tejal011089/huntercamp_erpnext,fuhongliang/erpnext,suyashphadtare/vestasi-update-erp,Tejal011089/paypal_erpnext,hanselke/erpnext-1,saurabh6790/omnisys-app,suyashphadtare/sajil-erp,gangadharkadam/v5_erp,saurabh6790/omni-apps,gangadharkadam/saloon_erp,Tejal011089/digitales_erpnext,hatwar/buyback-erpnext,gangadharkadam/vlinkerp,gangadhar-kadam/verve_erp,gangadhar-kadam/helpdesk-erpnext,gangadharkadam/office_erp,Tejal011089/Medsyn2_app,dieface/erpnext,indictranstech/reciphergroup-erpnext,gangadharkadam/smrterp,suyashphadtare/test,anandpdoshi/erpnext,gangadharkadam/letzerp,Aptitudetech/ERPNext,Tejal011089/digitales_erpnext,Suninus/erpnext,mbauskar/omnitech-demo-erpnext,gangadharkadam/verveerp,gangadharkadam/v5_erp,indictranstech/Das_Erpnext,gangadhar-kadam/smrterp,gangadhar-kadam/latestchurcherp,hatwar/Das_erpnext,sheafferusa/erpnext,gangadharkadam/contributionerp,gangadharkadam/office_erp,SPKian/Testing,gangadhar-kadam/nassimapp,suyashphadtare/vestasi-erp-1,mbauskar/alec_frappe5_erpnext,pawaranand/phrerp,Tejal011089/trufil-erpnext,meisterkleister/erpnext,gangadharkadam/saloon_erp_install,BhupeshGupta/erpnext,gangadhar-kadam/verve_erp,Tejal011089/paypal_erpnext,Tejal011089/med2-app,geekroot/erpnext,indictranstech/phrerp,Drooids/erpnext,saurabh6790/trufil_app,ThiagoGarciaAlves/erpnext,saurabh6790/tru_app_back,indictranstech/focal-erpnext,gangadharkadam/saloon_erp,tmimori/erpnext,rohitwaghchaure/erpnext-receipher,gangadharkadam/sterp,dieface/erpnext,suyashphadtare/vestasi-erp-final,ShashaQin/erpnext,sagar30051991/ozsmart-erp,gangadharkadam/letzerp,indictranstech/internal-erpnext,gangadhar-kadam/helpdesk-erpnext,mbauskar/phrerp,gangadhar-kadam/verve-erp,njmube/erpnext,gangadhar-kadam/verve_live_erp,sheafferusa/erpnext,suyashphadtare/gd-erp,gangadharkadam/vlinkerp,gsnbng/erpnext,sheafferusa/erpnext,indictranstech/erpnext,mbauskar/alec_frappe5_erpnext,pombredanne/erpnext,saurabh6790/pow-app,tmimori/erpnext,SPKian/Testing2,aruizramon/alec_erpnext,indictranstech/phrerp,meisterkleister/erpnext,gangadharkadam/tailorerp,hatwar/focal-erpnext,suyashphadtare/sajil-final-erp,mbauskar/helpdesk-erpnext,indictranstech/vestasi-erpnext,gangadhar-kadam/verve_test_erp,gangadharkadam/johnerp,saurabh6790/pow-app,ShashaQin/erpnext,sheafferusa/erpnext,rohitwaghchaure/erpnext-receipher,indictranstech/biggift-erpnext,suyashphadtare/vestasi-erp-final,mahabuber/erpnext,rohitwaghchaure/digitales_erpnext,indictranstech/osmosis-erpnext,mbauskar/omnitech-demo-erpnext,hernad/erpnext,gangadhar-kadam/laganerp,gangadhar-kadam/latestchurcherp,indictranstech/internal-erpnext,suyashphadtare/vestasi-erp-jan-end,BhupeshGupta/erpnext,mbauskar/Das_Erpnext,suyashphadtare/vestasi-erp-1,rohitwaghchaure/digitales_erpnext,mbauskar/alec_frappe5_erpnext,saurabh6790/test-erp,saurabh6790/omnit-app,SPKian/Testing2,gangadhar-kadam/laganerp,suyashphadtare/vestasi-erp-final,fuhongliang/erpnext,mahabuber/erpnext,hatwar/buyback-erpnext,sagar30051991/ozsmart-erp,gangadhar-kadam/verve_live_erp,ThiagoGarciaAlves/erpnext,gangadharkadam/verveerp,suyashphadtare/test,indictranstech/osmosis-erpnext,rohitwaghchaure/GenieManager-erpnext,Tejal011089/huntercamp_erpnext,4commerce-technologies-AG/erpnext,gangadharkadam/v5_erp,Tejal011089/trufil-erpnext,indictranstech/fbd_erpnext,Drooids/erpnext,hatwar/buyback-erpnext,treejames/erpnext,indictranstech/erpnext,suyashphadtare/vestasi-erp-1,indictranstech/tele-erpnext,mbauskar/helpdesk-erpnext,suyashphadtare/gd-erp,saurabh6790/omn-app,gangadharkadam/v4_erp,treejames/erpnext,gangadharkadam/smrterp,susuchina/ERPNEXT,gangadhar-kadam/hrerp,indictranstech/Das_Erpnext,indictranstech/reciphergroup-erpnext,indictranstech/biggift-erpnext,aruizramon/alec_erpnext,shft117/SteckerApp,gangadhar-kadam/nassimapp,gangadhar-kadam/powapp,gangadhar-kadam/prjapp,mbauskar/sapphire-erpnext,mbauskar/omnitech-erpnext,Tejal011089/osmosis_erpnext,indictranstech/focal-erpnext,hernad/erpnext,gsnbng/erpnext,shft117/SteckerApp,indictranstech/vestasi-erpnext,mbauskar/phrerp,gangadhar-kadam/verve_test_erp,Tejal011089/digitales_erpnext,indictranstech/trufil-erpnext,gangadharkadam/v6_erp,gangadharkadam/saloon_erp,mbauskar/omnitech-demo-erpnext,pombredanne/erpnext,mbauskar/internal-hr,mbauskar/sapphire-erpnext,mbauskar/omnitech-demo-erpnext,rohitwaghchaure/GenieManager-erpnext,gangadhar-kadam/verve-erp,gangadharkadam/v4_erp,pawaranand/phrerp,gangadharkadam/saloon_erp_install,gangadharkadam/v6_erp,indictranstech/trufil-erpnext,MartinEnder/erpnext-de,gangadharkadam/v6_erp,gangadhar-kadam/latestchurcherp,indictranstech/buyback-erp,rohitwaghchaure/erpnext-receipher,rohitwaghchaure/erpnext-receipher,rohitwaghchaure/GenieManager-erpnext,hatwar/focal-erpnext,pawaranand/phrerp,suyashphadtare/sajil-erp,saurabh6790/omnit-app,gangadharkadam/v4_erp,Tejal011089/fbd_erpnext,gmarke/erpnext,geekroot/erpnext,saurabh6790/tru_app_back,suyashphadtare/gd-erp,rohitwaghchaure/digitales_erpnext,gangadhar-kadam/verve_live_erp,treejames/erpnext,gangadhar-kadam/helpdesk-erpnext,indictranstech/focal-erpnext,4commerce-technologies-AG/erpnext,rohitwaghchaure/GenieManager-erpnext,gangadhar-kadam/verve_erp,aruizramon/alec_erpnext,mbauskar/sapphire-erpnext,meisterkleister/erpnext,netfirms/erpnext,gangadharkadam/contributionerp,indictranstech/focal-erpnext,indictranstech/tele-erpnext,hernad/erpnext,shitolepriya/test-erp,susuchina/ERPNEXT,suyashphadtare/vestasi-update-erp,mahabuber/erpnext,gmarke/erpnext,gangadharkadam/v5_erp,rohitwaghchaure/New_Theme_Erp,indictranstech/Das_Erpnext,Suninus/erpnext,ShashaQin/erpnext,suyashphadtare/vestasi-erp-jan-end,ShashaQin/erpnext,indictranstech/osmosis-erpnext
|
import webnotes
- from webnotes.model.meta import get_field
def execute():
webnotes.reload_doc("core", "doctype", "custom_field")
+ cf_doclist = webnotes.get_doctype("Custom Field")
+
- custom_fields = {}
- for cf in webnotes.conn.sql("""select dt, fieldname from `tabCustom Field`""", as_dict=1):
- custom_fields.setdefault(cf.dt, []).append(cf.fieldname)
-
delete_list = []
- for ps in webnotes.conn.sql("""select * from `tabProperty Setter`""", as_dict=1):
- if ps.field_name in custom_fields.get(ps.doc_type, []):
-
- if ps.property == "previous_field":
- property_name = "insert_after"
+ for d in webnotes.conn.sql("""select cf.name as cf_name, ps.property,
+ ps.value, ps.name as ps_name
+ from `tabProperty Setter` ps, `tabCustom Field` cf
+ where ps.doctype_or_field = 'DocField' and ps.property != 'previous_field'
+ and ps.doc_type=cf.dt and ps.field_name=cf.fieldname""", as_dict=1):
+ if cf_doclist.get_field(d.property):
+ webnotes.conn.sql("""update `tabCustom Field`
+ set `%s`=%s where name=%s""" % (d.property, '%s', '%s'), (d.value, d.cf_name))
+ delete_list.append(d.ps_name)
- field_meta = get_field(ps.doc_type, ps.value)
- property_value = field_meta.label if field_meta else ""
- else:
- property_name = ps.property
- property_value =ps.value
- webnotes.conn.sql("""update `tabCustom Field`
- set %s=%s where dt=%s and fieldname=%s""" % (property_name, '%s', '%s', '%s'),
- (property_value, ps.doc_type, ps.field_name))
-
- delete_list.append(ps.name)
-
if delete_list:
webnotes.conn.sql("""delete from `tabProperty Setter` where name in (%s)""" %
', '.join(['%s']*len(delete_list)), tuple(delete_list))
|
Delete Property Setters for Custom Fields, and set them inside Custom Field
|
## Code Before:
import webnotes
from webnotes.model.meta import get_field
def execute():
webnotes.reload_doc("core", "doctype", "custom_field")
custom_fields = {}
for cf in webnotes.conn.sql("""select dt, fieldname from `tabCustom Field`""", as_dict=1):
custom_fields.setdefault(cf.dt, []).append(cf.fieldname)
delete_list = []
for ps in webnotes.conn.sql("""select * from `tabProperty Setter`""", as_dict=1):
if ps.field_name in custom_fields.get(ps.doc_type, []):
if ps.property == "previous_field":
property_name = "insert_after"
field_meta = get_field(ps.doc_type, ps.value)
property_value = field_meta.label if field_meta else ""
else:
property_name = ps.property
property_value =ps.value
webnotes.conn.sql("""update `tabCustom Field`
set %s=%s where dt=%s and fieldname=%s""" % (property_name, '%s', '%s', '%s'),
(property_value, ps.doc_type, ps.field_name))
delete_list.append(ps.name)
if delete_list:
webnotes.conn.sql("""delete from `tabProperty Setter` where name in (%s)""" %
', '.join(['%s']*len(delete_list)), tuple(delete_list))
## Instruction:
Delete Property Setters for Custom Fields, and set them inside Custom Field
## Code After:
import webnotes
def execute():
webnotes.reload_doc("core", "doctype", "custom_field")
cf_doclist = webnotes.get_doctype("Custom Field")
delete_list = []
for d in webnotes.conn.sql("""select cf.name as cf_name, ps.property,
ps.value, ps.name as ps_name
from `tabProperty Setter` ps, `tabCustom Field` cf
where ps.doctype_or_field = 'DocField' and ps.property != 'previous_field'
and ps.doc_type=cf.dt and ps.field_name=cf.fieldname""", as_dict=1):
if cf_doclist.get_field(d.property):
webnotes.conn.sql("""update `tabCustom Field`
set `%s`=%s where name=%s""" % (d.property, '%s', '%s'), (d.value, d.cf_name))
delete_list.append(d.ps_name)
if delete_list:
webnotes.conn.sql("""delete from `tabProperty Setter` where name in (%s)""" %
', '.join(['%s']*len(delete_list)), tuple(delete_list))
|
// ... existing code ...
import webnotes
// ... modified code ...
cf_doclist = webnotes.get_doctype("Custom Field")
delete_list = []
for d in webnotes.conn.sql("""select cf.name as cf_name, ps.property,
ps.value, ps.name as ps_name
from `tabProperty Setter` ps, `tabCustom Field` cf
where ps.doctype_or_field = 'DocField' and ps.property != 'previous_field'
and ps.doc_type=cf.dt and ps.field_name=cf.fieldname""", as_dict=1):
if cf_doclist.get_field(d.property):
webnotes.conn.sql("""update `tabCustom Field`
set `%s`=%s where name=%s""" % (d.property, '%s', '%s'), (d.value, d.cf_name))
delete_list.append(d.ps_name)
if delete_list:
// ... rest of the code ...
|
83c12d598221aac8e7173fb7d78083bc1c5ab64b
|
tests/test_commands.py
|
tests/test_commands.py
|
import unittest
from cobe.commands import LearnIrcLogCommand
class testIrcLogParsing(unittest.TestCase):
def setUp(self):
self.command = LearnIrcLogCommand()
def testNonPubmsg(self):
msg = "this is some non-pubmsg text found in a log"
cmd = self.command
self.assertEqual(None, cmd._parse_irc_message(msg))
def testNormalPubmsg(self):
msg = "12:00 <foo> bar baz"
cmd = self.command
self.assertEqual("bar baz", cmd._parse_irc_message(msg))
def testKibotQuotePubmsg(self):
msg = "12:00 <foo> \"bar baz\" --user, 01-oct-09"
cmd = self.command
self.assertEqual("bar baz", cmd._parse_irc_message(msg))
if __name__ == '__main__':
unittest.main()
|
import unittest
from cobe.commands import LearnIrcLogCommand
class testIrcLogParsing(unittest.TestCase):
def setUp(self):
self.command = LearnIrcLogCommand()
def testNonPubmsg(self):
msg = "this is some non-pubmsg text found in a log"
cmd = self.command
self.assertEqual(None, cmd._parse_irc_message(msg))
def testNormalPubmsg(self):
msg = "12:00 <foo> bar baz"
cmd = self.command
self.assertEqual("bar baz", cmd._parse_irc_message(msg))
def testKibotQuotePubmsg(self):
msg = "12:00 <foo> \"bar baz\" --user, 01-oct-09"
cmd = self.command
self.assertEqual("bar baz", cmd._parse_irc_message(msg))
def testIgnoredNickPubmsg(self):
msg = "12:00 <foo> bar baz"
cmd = self.command
self.assertEqual(None, cmd._parse_irc_message(msg, ["foo"]))
if __name__ == '__main__':
unittest.main()
|
Add a unit test for ignored nicks in _parse_irc_message
|
Add a unit test for ignored nicks in _parse_irc_message
|
Python
|
mit
|
pteichman/cobe,wodim/cobe-ng,wodim/cobe-ng,DarkMio/cobe,tiagochiavericosta/cobe,meska/cobe,LeMagnesium/cobe,LeMagnesium/cobe,DarkMio/cobe,meska/cobe,tiagochiavericosta/cobe,pteichman/cobe
|
import unittest
from cobe.commands import LearnIrcLogCommand
class testIrcLogParsing(unittest.TestCase):
def setUp(self):
self.command = LearnIrcLogCommand()
def testNonPubmsg(self):
msg = "this is some non-pubmsg text found in a log"
cmd = self.command
self.assertEqual(None, cmd._parse_irc_message(msg))
def testNormalPubmsg(self):
msg = "12:00 <foo> bar baz"
cmd = self.command
self.assertEqual("bar baz", cmd._parse_irc_message(msg))
def testKibotQuotePubmsg(self):
msg = "12:00 <foo> \"bar baz\" --user, 01-oct-09"
cmd = self.command
self.assertEqual("bar baz", cmd._parse_irc_message(msg))
+ def testIgnoredNickPubmsg(self):
+ msg = "12:00 <foo> bar baz"
+ cmd = self.command
+
+ self.assertEqual(None, cmd._parse_irc_message(msg, ["foo"]))
+
if __name__ == '__main__':
unittest.main()
|
Add a unit test for ignored nicks in _parse_irc_message
|
## Code Before:
import unittest
from cobe.commands import LearnIrcLogCommand
class testIrcLogParsing(unittest.TestCase):
def setUp(self):
self.command = LearnIrcLogCommand()
def testNonPubmsg(self):
msg = "this is some non-pubmsg text found in a log"
cmd = self.command
self.assertEqual(None, cmd._parse_irc_message(msg))
def testNormalPubmsg(self):
msg = "12:00 <foo> bar baz"
cmd = self.command
self.assertEqual("bar baz", cmd._parse_irc_message(msg))
def testKibotQuotePubmsg(self):
msg = "12:00 <foo> \"bar baz\" --user, 01-oct-09"
cmd = self.command
self.assertEqual("bar baz", cmd._parse_irc_message(msg))
if __name__ == '__main__':
unittest.main()
## Instruction:
Add a unit test for ignored nicks in _parse_irc_message
## Code After:
import unittest
from cobe.commands import LearnIrcLogCommand
class testIrcLogParsing(unittest.TestCase):
def setUp(self):
self.command = LearnIrcLogCommand()
def testNonPubmsg(self):
msg = "this is some non-pubmsg text found in a log"
cmd = self.command
self.assertEqual(None, cmd._parse_irc_message(msg))
def testNormalPubmsg(self):
msg = "12:00 <foo> bar baz"
cmd = self.command
self.assertEqual("bar baz", cmd._parse_irc_message(msg))
def testKibotQuotePubmsg(self):
msg = "12:00 <foo> \"bar baz\" --user, 01-oct-09"
cmd = self.command
self.assertEqual("bar baz", cmd._parse_irc_message(msg))
def testIgnoredNickPubmsg(self):
msg = "12:00 <foo> bar baz"
cmd = self.command
self.assertEqual(None, cmd._parse_irc_message(msg, ["foo"]))
if __name__ == '__main__':
unittest.main()
|
...
def testIgnoredNickPubmsg(self):
msg = "12:00 <foo> bar baz"
cmd = self.command
self.assertEqual(None, cmd._parse_irc_message(msg, ["foo"]))
if __name__ == '__main__':
...
|
0519824c537a96474e0501e1ac45f7a626391a31
|
tests/test_model_object.py
|
tests/test_model_object.py
|
from marathon.models.base import MarathonObject
import unittest
class MarathonObjectTest(unittest.TestCase):
def test_hashable(self):
"""
Regression test for issue #203
MarathonObject defined __eq__ but not __hash__, meaning that in
in Python2.7 MarathonObjects are hashable, but in Python3 they're not,
This test ensures that we are hashable in all versions of python
"""
obj = MarathonObject()
collection = {}
collection[obj] = True
assert collection[obj]
|
from marathon.models.base import MarathonObject
from marathon.models.base import MarathonResource
import unittest
class MarathonObjectTest(unittest.TestCase):
def test_hashable(self):
"""
Regression test for issue #203
MarathonObject defined __eq__ but not __hash__, meaning that in
in Python2.7 MarathonObjects are hashable, but in Python3 they're not,
This test ensures that we are hashable in all versions of python
"""
obj = MarathonObject()
collection = {}
collection[obj] = True
assert collection[obj]
class MarathonResourceHashable(unittest.TestCase):
def test_hashable(self):
"""
Regression test for issue #203
MarathonResource defined __eq__ but not __hash__, meaning that in
in Python2.7 MarathonResources are hashable, but in Python3 they're
not
This test ensures that we are hashable in all versions of python
"""
obj = MarathonResource()
collection = {}
collection[obj] = True
assert collection[obj]
|
Add regression test for MarathonResource
|
Add regression test for MarathonResource
|
Python
|
mit
|
thefactory/marathon-python,thefactory/marathon-python
|
from marathon.models.base import MarathonObject
+ from marathon.models.base import MarathonResource
import unittest
class MarathonObjectTest(unittest.TestCase):
def test_hashable(self):
"""
Regression test for issue #203
MarathonObject defined __eq__ but not __hash__, meaning that in
in Python2.7 MarathonObjects are hashable, but in Python3 they're not,
This test ensures that we are hashable in all versions of python
"""
obj = MarathonObject()
collection = {}
collection[obj] = True
assert collection[obj]
+
+ class MarathonResourceHashable(unittest.TestCase):
+
+ def test_hashable(self):
+ """
+ Regression test for issue #203
+
+ MarathonResource defined __eq__ but not __hash__, meaning that in
+ in Python2.7 MarathonResources are hashable, but in Python3 they're
+ not
+
+ This test ensures that we are hashable in all versions of python
+ """
+ obj = MarathonResource()
+ collection = {}
+ collection[obj] = True
+ assert collection[obj]
+
|
Add regression test for MarathonResource
|
## Code Before:
from marathon.models.base import MarathonObject
import unittest
class MarathonObjectTest(unittest.TestCase):
def test_hashable(self):
"""
Regression test for issue #203
MarathonObject defined __eq__ but not __hash__, meaning that in
in Python2.7 MarathonObjects are hashable, but in Python3 they're not,
This test ensures that we are hashable in all versions of python
"""
obj = MarathonObject()
collection = {}
collection[obj] = True
assert collection[obj]
## Instruction:
Add regression test for MarathonResource
## Code After:
from marathon.models.base import MarathonObject
from marathon.models.base import MarathonResource
import unittest
class MarathonObjectTest(unittest.TestCase):
def test_hashable(self):
"""
Regression test for issue #203
MarathonObject defined __eq__ but not __hash__, meaning that in
in Python2.7 MarathonObjects are hashable, but in Python3 they're not,
This test ensures that we are hashable in all versions of python
"""
obj = MarathonObject()
collection = {}
collection[obj] = True
assert collection[obj]
class MarathonResourceHashable(unittest.TestCase):
def test_hashable(self):
"""
Regression test for issue #203
MarathonResource defined __eq__ but not __hash__, meaning that in
in Python2.7 MarathonResources are hashable, but in Python3 they're
not
This test ensures that we are hashable in all versions of python
"""
obj = MarathonResource()
collection = {}
collection[obj] = True
assert collection[obj]
|
// ... existing code ...
from marathon.models.base import MarathonObject
from marathon.models.base import MarathonResource
import unittest
// ... modified code ...
assert collection[obj]
class MarathonResourceHashable(unittest.TestCase):
def test_hashable(self):
"""
Regression test for issue #203
MarathonResource defined __eq__ but not __hash__, meaning that in
in Python2.7 MarathonResources are hashable, but in Python3 they're
not
This test ensures that we are hashable in all versions of python
"""
obj = MarathonResource()
collection = {}
collection[obj] = True
assert collection[obj]
// ... rest of the code ...
|
182f070c69e59907eeda3c261d833a492af46967
|
rojak-database/generate_media_data.py
|
rojak-database/generate_media_data.py
|
import MySQLdb as mysql
from faker import Factory
# Open database connection
db = mysql.connect('localhost', 'root', 'rojak', 'rojak_database')
# Create new db cursor
cursor = db.cursor()
sql = '''
INSERT INTO `media`(`name`, `website_url`, `logo_url`, `facebookpage_url`,
`slogan`)
VALUES ('{}', '{}', '{}', '{}', '{}');
'''
MAX_MEDIA=100
fake = Factory.create('it_IT')
for i in xrange(MAX_MEDIA):
# Generate random data for the media
media_name = fake.name() + ' Media ' + str(i)
website_name = media_name.lower().replace(' ', '')
website_name = website_name.replace("'", '')
website_url = 'https://{}.com'.format(website_name)
cat_txt = website_name
cat_img = 'http://lorempixel.com/500/500/cats/{}'.format(cat_txt)
logo_url = cat_img
facebookpage_url = 'https://facebook.com/{}'.format(website_name)
slogan = ' '.join(fake.text().split()[:5])
# Parse the SQL command
insert_sql = sql.format(media_name, website_url, logo_url,
facebookpage_url, slogan)
# insert to the database
try:
cursor.execute(insert_sql)
db.commit()
except mysql.Error as err:
print("Something went wrong: {}".format(err))
db.rollback()
# Close the DB connection
db.close()
|
import MySQLdb as mysql
from faker import Factory
# Open database connection
db = mysql.connect('localhost', 'root', 'rojak', 'rojak_database')
# Create new db cursor
cursor = db.cursor()
sql = '''
INSERT INTO `media`(`name`, `website_url`, `logo_url`, `facebookpage_url`,
`slogan`)
VALUES ('{}', '{}', '{}', '{}', '{}');
'''
MAX_MEDIA=100
fake = Factory.create()
for i in xrange(MAX_MEDIA):
# Generate random data for the media
media_name = fake.name() + ' Media ' + str(i)
website_name = media_name.lower().replace(' ', '')
website_url = 'https://{}.com'.format(website_name)
cat_txt = website_name
cat_img = 'http://lorempixel.com/500/500/cats/{}'.format(cat_txt)
logo_url = cat_img
facebookpage_url = 'https://facebook.com/{}'.format(website_name)
slogan = ' '.join(fake.text().split()[:5])
# Parse the SQL command
insert_sql = sql.format(media_name, website_url, logo_url,
facebookpage_url, slogan)
# insert to the database
try:
cursor.execute(insert_sql)
db.commit()
except mysql.Error as err:
print("Something went wrong: {}".format(err))
db.rollback()
# Close the DB connection
db.close()
|
Update the default language for the media generator
|
Update the default language for the media generator
|
Python
|
bsd-3-clause
|
CodeRiderz/rojak,bobbypriambodo/rojak,reinarduswindy/rojak,CodeRiderz/rojak,bobbypriambodo/rojak,reinarduswindy/rojak,bobbypriambodo/rojak,reinarduswindy/rojak,CodeRiderz/rojak,pyk/rojak,pyk/rojak,bobbypriambodo/rojak,pyk/rojak,bobbypriambodo/rojak,rawgni/rojak,bobbypriambodo/rojak,CodeRiderz/rojak,pyk/rojak,pyk/rojak,CodeRiderz/rojak,rawgni/rojak,pyk/rojak,rawgni/rojak,rawgni/rojak,rawgni/rojak,pyk/rojak,reinarduswindy/rojak,reinarduswindy/rojak,CodeRiderz/rojak,CodeRiderz/rojak,rawgni/rojak,rawgni/rojak,reinarduswindy/rojak,reinarduswindy/rojak,bobbypriambodo/rojak
|
import MySQLdb as mysql
from faker import Factory
# Open database connection
db = mysql.connect('localhost', 'root', 'rojak', 'rojak_database')
# Create new db cursor
cursor = db.cursor()
sql = '''
INSERT INTO `media`(`name`, `website_url`, `logo_url`, `facebookpage_url`,
`slogan`)
VALUES ('{}', '{}', '{}', '{}', '{}');
'''
MAX_MEDIA=100
- fake = Factory.create('it_IT')
+ fake = Factory.create()
for i in xrange(MAX_MEDIA):
# Generate random data for the media
media_name = fake.name() + ' Media ' + str(i)
website_name = media_name.lower().replace(' ', '')
- website_name = website_name.replace("'", '')
website_url = 'https://{}.com'.format(website_name)
cat_txt = website_name
cat_img = 'http://lorempixel.com/500/500/cats/{}'.format(cat_txt)
logo_url = cat_img
facebookpage_url = 'https://facebook.com/{}'.format(website_name)
slogan = ' '.join(fake.text().split()[:5])
# Parse the SQL command
insert_sql = sql.format(media_name, website_url, logo_url,
facebookpage_url, slogan)
# insert to the database
try:
cursor.execute(insert_sql)
db.commit()
except mysql.Error as err:
print("Something went wrong: {}".format(err))
db.rollback()
# Close the DB connection
db.close()
|
Update the default language for the media generator
|
## Code Before:
import MySQLdb as mysql
from faker import Factory
# Open database connection
db = mysql.connect('localhost', 'root', 'rojak', 'rojak_database')
# Create new db cursor
cursor = db.cursor()
sql = '''
INSERT INTO `media`(`name`, `website_url`, `logo_url`, `facebookpage_url`,
`slogan`)
VALUES ('{}', '{}', '{}', '{}', '{}');
'''
MAX_MEDIA=100
fake = Factory.create('it_IT')
for i in xrange(MAX_MEDIA):
# Generate random data for the media
media_name = fake.name() + ' Media ' + str(i)
website_name = media_name.lower().replace(' ', '')
website_name = website_name.replace("'", '')
website_url = 'https://{}.com'.format(website_name)
cat_txt = website_name
cat_img = 'http://lorempixel.com/500/500/cats/{}'.format(cat_txt)
logo_url = cat_img
facebookpage_url = 'https://facebook.com/{}'.format(website_name)
slogan = ' '.join(fake.text().split()[:5])
# Parse the SQL command
insert_sql = sql.format(media_name, website_url, logo_url,
facebookpage_url, slogan)
# insert to the database
try:
cursor.execute(insert_sql)
db.commit()
except mysql.Error as err:
print("Something went wrong: {}".format(err))
db.rollback()
# Close the DB connection
db.close()
## Instruction:
Update the default language for the media generator
## Code After:
import MySQLdb as mysql
from faker import Factory
# Open database connection
db = mysql.connect('localhost', 'root', 'rojak', 'rojak_database')
# Create new db cursor
cursor = db.cursor()
sql = '''
INSERT INTO `media`(`name`, `website_url`, `logo_url`, `facebookpage_url`,
`slogan`)
VALUES ('{}', '{}', '{}', '{}', '{}');
'''
MAX_MEDIA=100
fake = Factory.create()
for i in xrange(MAX_MEDIA):
# Generate random data for the media
media_name = fake.name() + ' Media ' + str(i)
website_name = media_name.lower().replace(' ', '')
website_url = 'https://{}.com'.format(website_name)
cat_txt = website_name
cat_img = 'http://lorempixel.com/500/500/cats/{}'.format(cat_txt)
logo_url = cat_img
facebookpage_url = 'https://facebook.com/{}'.format(website_name)
slogan = ' '.join(fake.text().split()[:5])
# Parse the SQL command
insert_sql = sql.format(media_name, website_url, logo_url,
facebookpage_url, slogan)
# insert to the database
try:
cursor.execute(insert_sql)
db.commit()
except mysql.Error as err:
print("Something went wrong: {}".format(err))
db.rollback()
# Close the DB connection
db.close()
|
# ... existing code ...
MAX_MEDIA=100
fake = Factory.create()
for i in xrange(MAX_MEDIA):
# ... modified code ...
website_name = media_name.lower().replace(' ', '')
website_url = 'https://{}.com'.format(website_name)
# ... rest of the code ...
|
b68576d307474eaf6bd8a8853bee767c391d28b9
|
conjure/connection.py
|
conjure/connection.py
|
from .exceptions import ConnectionError
from pymongo import MongoClient
from pymongo.uri_parser import parse_uri
_connections = {}
try:
import gevent
except ImportError:
gevent = None
def _get_connection(uri):
global _connections
parsed_uri = parse_uri(uri)
hosts = parsed_uri['nodelist']
hosts = ['%s:%d' % host for host in hosts]
key = ','.join(hosts)
connection = _connections.get(key)
if connection is None:
try:
connection = _connections[key] = MongoClient(uri)
except Exception as e:
raise ConnectionError(e.message)
return connection
def connect(uri):
parsed_uri = parse_uri(uri)
username = parsed_uri['username']
password = parsed_uri['password']
database = parsed_uri['database']
db = _get_connection(uri)[database]
if username and password:
db.authenticate(username, password)
return db
|
from .exceptions import ConnectionError
from pymongo import MongoClient
from pymongo.uri_parser import parse_uri
_connections = {}
try:
import gevent
except ImportError:
gevent = None
def _get_connection(uri):
global _connections
parsed_uri = parse_uri(uri)
hosts = parsed_uri['nodelist']
hosts = ['%s:%d' % host for host in hosts]
key = ','.join(hosts)
connection = _connections.get(key)
if connection is None:
try:
connection = _connections[key] = MongoClient(uri)
except Exception as e:
raise ConnectionError(e.message)
return connection
def connect(uri):
parsed_uri = parse_uri(uri)
username = parsed_uri['username']
password = parsed_uri['password']
database = parsed_uri['database']
db = _get_connection(uri)[database]
return db
|
Remove authenticate call to fix issues with pymongo 3.7
|
Remove authenticate call to fix issues with pymongo 3.7
|
Python
|
mit
|
GGOutfitters/conjure
|
from .exceptions import ConnectionError
from pymongo import MongoClient
from pymongo.uri_parser import parse_uri
_connections = {}
try:
import gevent
except ImportError:
gevent = None
def _get_connection(uri):
global _connections
parsed_uri = parse_uri(uri)
hosts = parsed_uri['nodelist']
hosts = ['%s:%d' % host for host in hosts]
key = ','.join(hosts)
connection = _connections.get(key)
if connection is None:
try:
connection = _connections[key] = MongoClient(uri)
except Exception as e:
raise ConnectionError(e.message)
return connection
def connect(uri):
parsed_uri = parse_uri(uri)
username = parsed_uri['username']
password = parsed_uri['password']
database = parsed_uri['database']
db = _get_connection(uri)[database]
- if username and password:
- db.authenticate(username, password)
-
return db
|
Remove authenticate call to fix issues with pymongo 3.7
|
## Code Before:
from .exceptions import ConnectionError
from pymongo import MongoClient
from pymongo.uri_parser import parse_uri
_connections = {}
try:
import gevent
except ImportError:
gevent = None
def _get_connection(uri):
global _connections
parsed_uri = parse_uri(uri)
hosts = parsed_uri['nodelist']
hosts = ['%s:%d' % host for host in hosts]
key = ','.join(hosts)
connection = _connections.get(key)
if connection is None:
try:
connection = _connections[key] = MongoClient(uri)
except Exception as e:
raise ConnectionError(e.message)
return connection
def connect(uri):
parsed_uri = parse_uri(uri)
username = parsed_uri['username']
password = parsed_uri['password']
database = parsed_uri['database']
db = _get_connection(uri)[database]
if username and password:
db.authenticate(username, password)
return db
## Instruction:
Remove authenticate call to fix issues with pymongo 3.7
## Code After:
from .exceptions import ConnectionError
from pymongo import MongoClient
from pymongo.uri_parser import parse_uri
_connections = {}
try:
import gevent
except ImportError:
gevent = None
def _get_connection(uri):
global _connections
parsed_uri = parse_uri(uri)
hosts = parsed_uri['nodelist']
hosts = ['%s:%d' % host for host in hosts]
key = ','.join(hosts)
connection = _connections.get(key)
if connection is None:
try:
connection = _connections[key] = MongoClient(uri)
except Exception as e:
raise ConnectionError(e.message)
return connection
def connect(uri):
parsed_uri = parse_uri(uri)
username = parsed_uri['username']
password = parsed_uri['password']
database = parsed_uri['database']
db = _get_connection(uri)[database]
return db
|
// ... existing code ...
return db
// ... rest of the code ...
|
58236d8bc6a23477d83c244fc117f493aa2651a6
|
thinglang/parser/tokens/arithmetic.py
|
thinglang/parser/tokens/arithmetic.py
|
from thinglang.common import ObtainableValue
from thinglang.parser.tokens import BaseToken
class ArithmeticOperation(BaseToken, ObtainableValue):
OPERATIONS = {
"+": lambda rhs, lhs: rhs + lhs,
"*": lambda rhs, lhs: rhs * lhs,
"-": lambda rhs, lhs: rhs - lhs,
"/": lambda rhs, lhs: rhs / lhs
}
def __init__(self, slice):
super(ArithmeticOperation, self).__init__(slice)
self.lhs, self.operator, self.rhs = slice
def evaluate(self, stack):
return self.OPERATIONS[self.operator.operator](self.lhs.evaluate(stack), self.rhs.evaluate(stack))
def describe(self):
return '{} {} {}'.format(self.lhs, self.operator, self.rhs)
|
from thinglang.common import ObtainableValue
from thinglang.parser.tokens import BaseToken
class ArithmeticOperation(BaseToken, ObtainableValue):
OPERATIONS = {
"+": lambda rhs, lhs: rhs + lhs,
"*": lambda rhs, lhs: rhs * lhs,
"-": lambda rhs, lhs: rhs - lhs,
"/": lambda rhs, lhs: rhs / lhs
}
def __init__(self, slice):
super(ArithmeticOperation, self).__init__(slice)
self.lhs, self.operator, self.rhs = slice
def evaluate(self, stack):
return self.OPERATIONS[self.operator.operator](self.lhs.evaluate(stack), self.rhs.evaluate(stack))
def describe(self):
return '|{} {} {}|'.format(self[0], self.operator, self[1])
def replace_argument(self, original, replacement):
self.arguments = [replacement if x is original else x for x in self.arguments]
def __getitem__(self, item):
return self.arguments[item]
|
Add replace method to Arithmetic operation
|
Add replace method to Arithmetic operation
|
Python
|
mit
|
ytanay/thinglang,ytanay/thinglang,ytanay/thinglang,ytanay/thinglang
|
from thinglang.common import ObtainableValue
from thinglang.parser.tokens import BaseToken
class ArithmeticOperation(BaseToken, ObtainableValue):
OPERATIONS = {
"+": lambda rhs, lhs: rhs + lhs,
"*": lambda rhs, lhs: rhs * lhs,
"-": lambda rhs, lhs: rhs - lhs,
"/": lambda rhs, lhs: rhs / lhs
}
def __init__(self, slice):
super(ArithmeticOperation, self).__init__(slice)
self.lhs, self.operator, self.rhs = slice
def evaluate(self, stack):
return self.OPERATIONS[self.operator.operator](self.lhs.evaluate(stack), self.rhs.evaluate(stack))
def describe(self):
- return '{} {} {}'.format(self.lhs, self.operator, self.rhs)
+ return '|{} {} {}|'.format(self[0], self.operator, self[1])
+
+ def replace_argument(self, original, replacement):
+ self.arguments = [replacement if x is original else x for x in self.arguments]
+
+ def __getitem__(self, item):
+ return self.arguments[item]
|
Add replace method to Arithmetic operation
|
## Code Before:
from thinglang.common import ObtainableValue
from thinglang.parser.tokens import BaseToken
class ArithmeticOperation(BaseToken, ObtainableValue):
OPERATIONS = {
"+": lambda rhs, lhs: rhs + lhs,
"*": lambda rhs, lhs: rhs * lhs,
"-": lambda rhs, lhs: rhs - lhs,
"/": lambda rhs, lhs: rhs / lhs
}
def __init__(self, slice):
super(ArithmeticOperation, self).__init__(slice)
self.lhs, self.operator, self.rhs = slice
def evaluate(self, stack):
return self.OPERATIONS[self.operator.operator](self.lhs.evaluate(stack), self.rhs.evaluate(stack))
def describe(self):
return '{} {} {}'.format(self.lhs, self.operator, self.rhs)
## Instruction:
Add replace method to Arithmetic operation
## Code After:
from thinglang.common import ObtainableValue
from thinglang.parser.tokens import BaseToken
class ArithmeticOperation(BaseToken, ObtainableValue):
OPERATIONS = {
"+": lambda rhs, lhs: rhs + lhs,
"*": lambda rhs, lhs: rhs * lhs,
"-": lambda rhs, lhs: rhs - lhs,
"/": lambda rhs, lhs: rhs / lhs
}
def __init__(self, slice):
super(ArithmeticOperation, self).__init__(slice)
self.lhs, self.operator, self.rhs = slice
def evaluate(self, stack):
return self.OPERATIONS[self.operator.operator](self.lhs.evaluate(stack), self.rhs.evaluate(stack))
def describe(self):
return '|{} {} {}|'.format(self[0], self.operator, self[1])
def replace_argument(self, original, replacement):
self.arguments = [replacement if x is original else x for x in self.arguments]
def __getitem__(self, item):
return self.arguments[item]
|
// ... existing code ...
def describe(self):
return '|{} {} {}|'.format(self[0], self.operator, self[1])
def replace_argument(self, original, replacement):
self.arguments = [replacement if x is original else x for x in self.arguments]
def __getitem__(self, item):
return self.arguments[item]
// ... rest of the code ...
|
78d36a68e0d460f3ead713a82c7d23faf7e73b9b
|
Instanssi/tickets/views.py
|
Instanssi/tickets/views.py
|
from datetime import datetime
from django.conf import settings
from django.http import HttpResponse, Http404
from django.template import RequestContext
from django.shortcuts import render_to_response, get_object_or_404
from Instanssi.tickets.models import Ticket
from Instanssi.store.models import StoreTransaction
# Logging related
import logging
logger = logging.getLogger(__name__)
# Shows information about a single ticket
def ticket(request, ticket_key):
# Find ticket
ticket = get_object_or_404(Ticket, key=ticket_key)
# Render ticket
return render_to_response('tickets/ticket.html', {
'ticket': ticket,
}, context_instance=RequestContext(request))
# Lists all tickets
def tickets(request, transaction_key):
# Get transaction
transaction = get_object_or_404(StoreTransaction, key=transaction_key)
# Get all tickets by this transaction
tickets = Ticket.objects.filter(transaction=transaction)
# Render tickets
return render_to_response('tickets/tickets.html', {
'transaction': transaction,
'tickets': tickets,
}, context_instance=RequestContext(request))
|
from datetime import datetime
from django.conf import settings
from django.http import HttpResponse, Http404
from django.template import RequestContext
from django.shortcuts import render_to_response, get_object_or_404
from Instanssi.tickets.models import Ticket
from Instanssi.store.models import StoreTransaction
# Logging related
import logging
logger = logging.getLogger(__name__)
# Shows information about a single ticket
def ticket(request, ticket_key):
# Find ticket
ticket = get_object_or_404(Ticket, key=ticket_key)
# Render ticket
return render_to_response('tickets/ticket.html', {
'ticket': ticket,
}, context_instance=RequestContext(request))
# Lists all tickets
def tickets(request, transaction_key):
# Get transaction
transaction = get_object_or_404(StoreTransaction, key=transaction_key)
if not transaction.paid:
raise Http404
# Get all tickets by this transaction
tickets = Ticket.objects.filter(transaction=transaction)
# Render tickets
return render_to_response('tickets/tickets.html', {
'transaction': transaction,
'tickets': tickets,
}, context_instance=RequestContext(request))
|
Make sure ticket is paid before it can be viewed
|
tickets: Make sure ticket is paid before it can be viewed
|
Python
|
mit
|
Instanssi/Instanssi.org,Instanssi/Instanssi.org,Instanssi/Instanssi.org,Instanssi/Instanssi.org
|
from datetime import datetime
from django.conf import settings
from django.http import HttpResponse, Http404
from django.template import RequestContext
from django.shortcuts import render_to_response, get_object_or_404
from Instanssi.tickets.models import Ticket
from Instanssi.store.models import StoreTransaction
# Logging related
import logging
logger = logging.getLogger(__name__)
# Shows information about a single ticket
def ticket(request, ticket_key):
# Find ticket
ticket = get_object_or_404(Ticket, key=ticket_key)
# Render ticket
return render_to_response('tickets/ticket.html', {
'ticket': ticket,
}, context_instance=RequestContext(request))
# Lists all tickets
def tickets(request, transaction_key):
# Get transaction
transaction = get_object_or_404(StoreTransaction, key=transaction_key)
+ if not transaction.paid:
+ raise Http404
# Get all tickets by this transaction
tickets = Ticket.objects.filter(transaction=transaction)
# Render tickets
return render_to_response('tickets/tickets.html', {
'transaction': transaction,
'tickets': tickets,
}, context_instance=RequestContext(request))
+
|
Make sure ticket is paid before it can be viewed
|
## Code Before:
from datetime import datetime
from django.conf import settings
from django.http import HttpResponse, Http404
from django.template import RequestContext
from django.shortcuts import render_to_response, get_object_or_404
from Instanssi.tickets.models import Ticket
from Instanssi.store.models import StoreTransaction
# Logging related
import logging
logger = logging.getLogger(__name__)
# Shows information about a single ticket
def ticket(request, ticket_key):
# Find ticket
ticket = get_object_or_404(Ticket, key=ticket_key)
# Render ticket
return render_to_response('tickets/ticket.html', {
'ticket': ticket,
}, context_instance=RequestContext(request))
# Lists all tickets
def tickets(request, transaction_key):
# Get transaction
transaction = get_object_or_404(StoreTransaction, key=transaction_key)
# Get all tickets by this transaction
tickets = Ticket.objects.filter(transaction=transaction)
# Render tickets
return render_to_response('tickets/tickets.html', {
'transaction': transaction,
'tickets': tickets,
}, context_instance=RequestContext(request))
## Instruction:
Make sure ticket is paid before it can be viewed
## Code After:
from datetime import datetime
from django.conf import settings
from django.http import HttpResponse, Http404
from django.template import RequestContext
from django.shortcuts import render_to_response, get_object_or_404
from Instanssi.tickets.models import Ticket
from Instanssi.store.models import StoreTransaction
# Logging related
import logging
logger = logging.getLogger(__name__)
# Shows information about a single ticket
def ticket(request, ticket_key):
# Find ticket
ticket = get_object_or_404(Ticket, key=ticket_key)
# Render ticket
return render_to_response('tickets/ticket.html', {
'ticket': ticket,
}, context_instance=RequestContext(request))
# Lists all tickets
def tickets(request, transaction_key):
# Get transaction
transaction = get_object_or_404(StoreTransaction, key=transaction_key)
if not transaction.paid:
raise Http404
# Get all tickets by this transaction
tickets = Ticket.objects.filter(transaction=transaction)
# Render tickets
return render_to_response('tickets/tickets.html', {
'transaction': transaction,
'tickets': tickets,
}, context_instance=RequestContext(request))
|
# ... existing code ...
transaction = get_object_or_404(StoreTransaction, key=transaction_key)
if not transaction.paid:
raise Http404
# ... rest of the code ...
|
0aa3af24533a0aa605d05bd034a0bfdcc55c2993
|
backend/conferences/types.py
|
backend/conferences/types.py
|
import graphene
from .models import Conference
from graphene_django import DjangoObjectType
from tickets.types import TicketType
class ConferenceType(DjangoObjectType):
tickets = graphene.List(graphene.NonNull(TicketType))
def resolve_tickets(self, info):
return self.tickets.all()
class Meta:
model = Conference
only_fields = ('id', 'start', 'end', 'name', 'slug')
|
import graphene
from .models import Conference
from graphene_django import DjangoObjectType
from tickets.types import TicketType
class ConferenceType(DjangoObjectType):
tickets = graphene.List(graphene.NonNull(TicketType))
def resolve_tickets(self, info):
return self.tickets.all()
class Meta:
model = Conference
only_fields = (
'id',
'name',
'slug',
'start',
'end',
'cfp_start',
'cfp_end',
'voting_start',
'voting_end',
'refund_start',
'refund_end'
)
|
Add dates to Conference GraphQL type
|
Add dates to Conference GraphQL type
|
Python
|
mit
|
patrick91/pycon,patrick91/pycon
|
import graphene
from .models import Conference
from graphene_django import DjangoObjectType
from tickets.types import TicketType
class ConferenceType(DjangoObjectType):
tickets = graphene.List(graphene.NonNull(TicketType))
def resolve_tickets(self, info):
return self.tickets.all()
class Meta:
model = Conference
- only_fields = ('id', 'start', 'end', 'name', 'slug')
+ only_fields = (
+ 'id',
+ 'name',
+ 'slug',
+ 'start',
+ 'end',
+ 'cfp_start',
+ 'cfp_end',
+ 'voting_start',
+ 'voting_end',
+ 'refund_start',
+ 'refund_end'
+ )
|
Add dates to Conference GraphQL type
|
## Code Before:
import graphene
from .models import Conference
from graphene_django import DjangoObjectType
from tickets.types import TicketType
class ConferenceType(DjangoObjectType):
tickets = graphene.List(graphene.NonNull(TicketType))
def resolve_tickets(self, info):
return self.tickets.all()
class Meta:
model = Conference
only_fields = ('id', 'start', 'end', 'name', 'slug')
## Instruction:
Add dates to Conference GraphQL type
## Code After:
import graphene
from .models import Conference
from graphene_django import DjangoObjectType
from tickets.types import TicketType
class ConferenceType(DjangoObjectType):
tickets = graphene.List(graphene.NonNull(TicketType))
def resolve_tickets(self, info):
return self.tickets.all()
class Meta:
model = Conference
only_fields = (
'id',
'name',
'slug',
'start',
'end',
'cfp_start',
'cfp_end',
'voting_start',
'voting_end',
'refund_start',
'refund_end'
)
|
# ... existing code ...
model = Conference
only_fields = (
'id',
'name',
'slug',
'start',
'end',
'cfp_start',
'cfp_end',
'voting_start',
'voting_end',
'refund_start',
'refund_end'
)
# ... rest of the code ...
|
c6d589859d621ac0eb2b4843a22cfe8e011bbeaf
|
braid/postgres.py
|
braid/postgres.py
|
from fabric.api import sudo, hide
from braid import package
from pipes import quote
def install():
package.install(['postgresql-9.1', 'postgresql-server-dev-9.1'])
def _runQuery(query):
with hide('running', 'output'):
return sudo('psql --no-align --no-readline --no-password --quiet '
'--tuples-only -c {}'.format(quote(query)),
user='postgres', pty=False, combine_stderr=False)
def _dbExists(name):
res = _runQuery("select count(*) from pg_database "
"where datname = '{}';".format(name))
return res == '1'
def _userExists(name):
res = _runQuery("select count(*) from pg_user "
"where usename = '{}';".format(name))
return res == '1'
def createUser(name):
if not _userExists(name):
sudo('createuser -D -R -S {}'.format(name), user='postgres', pty=False)
def createDb(name, owner):
if not _dbExists(name):
sudo('createdb -O {} {}'.format(owner, name), user='postgres',
pty=False)
def grantRead(user, database):
"""
Grant read permissions to C{user} to all tables in C{database}.
"""
def grantReadWrite(user, database):
"""
Grant read and write permissions to C{user} to all tables in C{database}.
"""
|
from fabric.api import sudo, hide
from braid import package
from pipes import quote
def install():
package.install(['postgresql-9.1', 'postgresql-server-dev-9.1'])
def _runQuery(query, database=None):
with hide('running', 'output'):
database = '--dbname={}'.format(database) if database else ''
return sudo('psql --no-align --no-readline --no-password --quiet '
'--tuples-only {} -c {}'.format(database, quote(query)),
user='postgres', pty=False, combine_stderr=False)
def _dbExists(name):
res = _runQuery("select count(*) from pg_database "
"where datname = '{}';".format(name))
return res == '1'
def _userExists(name):
res = _runQuery("select count(*) from pg_user "
"where usename = '{}';".format(name))
return res == '1'
def createUser(name):
if not _userExists(name):
sudo('createuser -D -R -S {}'.format(name), user='postgres', pty=False)
def createDb(name, owner):
if not _dbExists(name):
sudo('createdb -O {} {}'.format(owner, name), user='postgres',
pty=False)
def grantRead(user, database):
"""
Grant read permissions to C{user} to all tables in C{database}.
"""
def grantReadWrite(user, database):
"""
Grant read and write permissions to C{user} to all tables in C{database}.
"""
|
Allow to specify a database when running a query
|
Allow to specify a database when running a query
|
Python
|
mit
|
alex/braid,alex/braid
|
from fabric.api import sudo, hide
from braid import package
from pipes import quote
def install():
package.install(['postgresql-9.1', 'postgresql-server-dev-9.1'])
- def _runQuery(query):
+ def _runQuery(query, database=None):
with hide('running', 'output'):
+ database = '--dbname={}'.format(database) if database else ''
return sudo('psql --no-align --no-readline --no-password --quiet '
- '--tuples-only -c {}'.format(quote(query)),
+ '--tuples-only {} -c {}'.format(database, quote(query)),
user='postgres', pty=False, combine_stderr=False)
def _dbExists(name):
res = _runQuery("select count(*) from pg_database "
"where datname = '{}';".format(name))
return res == '1'
def _userExists(name):
res = _runQuery("select count(*) from pg_user "
"where usename = '{}';".format(name))
return res == '1'
def createUser(name):
if not _userExists(name):
sudo('createuser -D -R -S {}'.format(name), user='postgres', pty=False)
def createDb(name, owner):
if not _dbExists(name):
sudo('createdb -O {} {}'.format(owner, name), user='postgres',
pty=False)
def grantRead(user, database):
"""
Grant read permissions to C{user} to all tables in C{database}.
"""
def grantReadWrite(user, database):
"""
Grant read and write permissions to C{user} to all tables in C{database}.
"""
|
Allow to specify a database when running a query
|
## Code Before:
from fabric.api import sudo, hide
from braid import package
from pipes import quote
def install():
package.install(['postgresql-9.1', 'postgresql-server-dev-9.1'])
def _runQuery(query):
with hide('running', 'output'):
return sudo('psql --no-align --no-readline --no-password --quiet '
'--tuples-only -c {}'.format(quote(query)),
user='postgres', pty=False, combine_stderr=False)
def _dbExists(name):
res = _runQuery("select count(*) from pg_database "
"where datname = '{}';".format(name))
return res == '1'
def _userExists(name):
res = _runQuery("select count(*) from pg_user "
"where usename = '{}';".format(name))
return res == '1'
def createUser(name):
if not _userExists(name):
sudo('createuser -D -R -S {}'.format(name), user='postgres', pty=False)
def createDb(name, owner):
if not _dbExists(name):
sudo('createdb -O {} {}'.format(owner, name), user='postgres',
pty=False)
def grantRead(user, database):
"""
Grant read permissions to C{user} to all tables in C{database}.
"""
def grantReadWrite(user, database):
"""
Grant read and write permissions to C{user} to all tables in C{database}.
"""
## Instruction:
Allow to specify a database when running a query
## Code After:
from fabric.api import sudo, hide
from braid import package
from pipes import quote
def install():
package.install(['postgresql-9.1', 'postgresql-server-dev-9.1'])
def _runQuery(query, database=None):
with hide('running', 'output'):
database = '--dbname={}'.format(database) if database else ''
return sudo('psql --no-align --no-readline --no-password --quiet '
'--tuples-only {} -c {}'.format(database, quote(query)),
user='postgres', pty=False, combine_stderr=False)
def _dbExists(name):
res = _runQuery("select count(*) from pg_database "
"where datname = '{}';".format(name))
return res == '1'
def _userExists(name):
res = _runQuery("select count(*) from pg_user "
"where usename = '{}';".format(name))
return res == '1'
def createUser(name):
if not _userExists(name):
sudo('createuser -D -R -S {}'.format(name), user='postgres', pty=False)
def createDb(name, owner):
if not _dbExists(name):
sudo('createdb -O {} {}'.format(owner, name), user='postgres',
pty=False)
def grantRead(user, database):
"""
Grant read permissions to C{user} to all tables in C{database}.
"""
def grantReadWrite(user, database):
"""
Grant read and write permissions to C{user} to all tables in C{database}.
"""
|
// ... existing code ...
def _runQuery(query, database=None):
with hide('running', 'output'):
database = '--dbname={}'.format(database) if database else ''
return sudo('psql --no-align --no-readline --no-password --quiet '
'--tuples-only {} -c {}'.format(database, quote(query)),
user='postgres', pty=False, combine_stderr=False)
// ... rest of the code ...
|
79cb9edf45ed77cdaa851e45d71f10c69db41221
|
benchexec/tools/yogar-cbmc-parallel.py
|
benchexec/tools/yogar-cbmc-parallel.py
|
import benchexec.util as util
yogar_cbmc = __import__("benchexec.tools.yogar-cbmc", fromlist=["Tool"])
class Tool(yogar_cbmc.Tool):
def executable(self):
return util.find_executable('yogar-cbmc-parallel')
def name(self):
return 'Yogar-CBMC-Parallel'
def cmdline(self, executable, options, tasks, propertyfile, rlimits):
return [executable] + options + tasks
|
import benchexec.util as util
yogar_cbmc = __import__("benchexec.tools.yogar-cbmc", fromlist=["Tool"])
class Tool(yogar_cbmc.Tool):
REQUIRED_PATHS = [
"yogar-cbmc"
]
def executable(self):
return util.find_executable('yogar-cbmc-parallel')
def name(self):
return 'Yogar-CBMC-Parallel'
def cmdline(self, executable, options, tasks, propertyfile, rlimits):
return [executable] + options + tasks
|
Add forgotten program file for deployment
|
Add forgotten program file for deployment
|
Python
|
apache-2.0
|
ultimate-pa/benchexec,sosy-lab/benchexec,sosy-lab/benchexec,sosy-lab/benchexec,dbeyer/benchexec,ultimate-pa/benchexec,ultimate-pa/benchexec,dbeyer/benchexec,ultimate-pa/benchexec,ultimate-pa/benchexec,sosy-lab/benchexec,ultimate-pa/benchexec,sosy-lab/benchexec,dbeyer/benchexec,dbeyer/benchexec,sosy-lab/benchexec
|
import benchexec.util as util
yogar_cbmc = __import__("benchexec.tools.yogar-cbmc", fromlist=["Tool"])
class Tool(yogar_cbmc.Tool):
+
+ REQUIRED_PATHS = [
+ "yogar-cbmc"
+ ]
def executable(self):
return util.find_executable('yogar-cbmc-parallel')
def name(self):
return 'Yogar-CBMC-Parallel'
def cmdline(self, executable, options, tasks, propertyfile, rlimits):
return [executable] + options + tasks
|
Add forgotten program file for deployment
|
## Code Before:
import benchexec.util as util
yogar_cbmc = __import__("benchexec.tools.yogar-cbmc", fromlist=["Tool"])
class Tool(yogar_cbmc.Tool):
def executable(self):
return util.find_executable('yogar-cbmc-parallel')
def name(self):
return 'Yogar-CBMC-Parallel'
def cmdline(self, executable, options, tasks, propertyfile, rlimits):
return [executable] + options + tasks
## Instruction:
Add forgotten program file for deployment
## Code After:
import benchexec.util as util
yogar_cbmc = __import__("benchexec.tools.yogar-cbmc", fromlist=["Tool"])
class Tool(yogar_cbmc.Tool):
REQUIRED_PATHS = [
"yogar-cbmc"
]
def executable(self):
return util.find_executable('yogar-cbmc-parallel')
def name(self):
return 'Yogar-CBMC-Parallel'
def cmdline(self, executable, options, tasks, propertyfile, rlimits):
return [executable] + options + tasks
|
// ... existing code ...
class Tool(yogar_cbmc.Tool):
REQUIRED_PATHS = [
"yogar-cbmc"
]
// ... rest of the code ...
|
2c8dafec701d80ddd9a3d1855a14a8eef0c44790
|
tests/modules/contrib/test_network_traffic.py
|
tests/modules/contrib/test_network_traffic.py
|
import pytest
pytest.importorskip("psutil")
pytest.importorskip("netifaces")
def test_load_module():
__import__("modules.contrib.network_traffic")
|
import pytest
from unittest import TestCase, mock
import core.config
import core.widget
import modules.contrib.network_traffic
from types import SimpleNamespace
pytest.importorskip("psutil")
pytest.importorskip("netifaces")
def io_counters_mock(recv, sent):
return {
'lo': SimpleNamespace(
bytes_sent = sent,
bytes_recv = recv
)
}
def gateways_response():
return {
'default': {
1: ('10.0.0.10', 'lo')
}
}
def build_module():
return modules.contrib.network_traffic.Module(config=core.config.Config([]), theme=None)
class TestNetworkTrafficUnit(TestCase):
def test_load_module(self):
__import__("modules.contrib.network_traffic")
@mock.patch('psutil.net_io_counters')
@mock.patch('netifaces.gateways')
@mock.patch('netifaces.AF_INET', 1)
def test_update_rates(self, gateways_mock, net_io_counters_mock):
net_io_counters_mock.return_value = io_counters_mock(0, 0)
gateways_mock.return_value = gateways_response()
module = build_module()
net_io_counters_mock.return_value = io_counters_mock(2842135, 1932215)
module.update()
assert module.widgets()[1].full_text() == '1.84MiB/s'
assert module.widgets()[0].full_text() == '2.71MiB/s'
def test_initial_download_rate(self):
module = build_module()
assert module.widgets()[0].full_text() == '0.00B/s'
def test_initial_upload_rate(self):
module = build_module()
assert module.widgets()[1].full_text() == '0.00B/s'
|
Add Network Traffic module tests
|
Add Network Traffic module tests
|
Python
|
mit
|
tobi-wan-kenobi/bumblebee-status,tobi-wan-kenobi/bumblebee-status
|
import pytest
+ from unittest import TestCase, mock
+
+ import core.config
+ import core.widget
+ import modules.contrib.network_traffic
+
+ from types import SimpleNamespace
pytest.importorskip("psutil")
-
pytest.importorskip("netifaces")
+ def io_counters_mock(recv, sent):
+ return {
+ 'lo': SimpleNamespace(
+ bytes_sent = sent,
+ bytes_recv = recv
+ )
+ }
+
+ def gateways_response():
+ return {
+ 'default': {
+ 1: ('10.0.0.10', 'lo')
+ }
+ }
+
+ def build_module():
+ return modules.contrib.network_traffic.Module(config=core.config.Config([]), theme=None)
+
+ class TestNetworkTrafficUnit(TestCase):
- def test_load_module():
+ def test_load_module(self):
- __import__("modules.contrib.network_traffic")
+ __import__("modules.contrib.network_traffic")
+
+ @mock.patch('psutil.net_io_counters')
+ @mock.patch('netifaces.gateways')
+ @mock.patch('netifaces.AF_INET', 1)
+ def test_update_rates(self, gateways_mock, net_io_counters_mock):
+ net_io_counters_mock.return_value = io_counters_mock(0, 0)
+ gateways_mock.return_value = gateways_response()
+
+ module = build_module()
+
+ net_io_counters_mock.return_value = io_counters_mock(2842135, 1932215)
+ module.update()
+
+ assert module.widgets()[1].full_text() == '1.84MiB/s'
+ assert module.widgets()[0].full_text() == '2.71MiB/s'
+
+ def test_initial_download_rate(self):
+ module = build_module()
+ assert module.widgets()[0].full_text() == '0.00B/s'
+
+ def test_initial_upload_rate(self):
+ module = build_module()
+ assert module.widgets()[1].full_text() == '0.00B/s'
|
Add Network Traffic module tests
|
## Code Before:
import pytest
pytest.importorskip("psutil")
pytest.importorskip("netifaces")
def test_load_module():
__import__("modules.contrib.network_traffic")
## Instruction:
Add Network Traffic module tests
## Code After:
import pytest
from unittest import TestCase, mock
import core.config
import core.widget
import modules.contrib.network_traffic
from types import SimpleNamespace
pytest.importorskip("psutil")
pytest.importorskip("netifaces")
def io_counters_mock(recv, sent):
return {
'lo': SimpleNamespace(
bytes_sent = sent,
bytes_recv = recv
)
}
def gateways_response():
return {
'default': {
1: ('10.0.0.10', 'lo')
}
}
def build_module():
return modules.contrib.network_traffic.Module(config=core.config.Config([]), theme=None)
class TestNetworkTrafficUnit(TestCase):
def test_load_module(self):
__import__("modules.contrib.network_traffic")
@mock.patch('psutil.net_io_counters')
@mock.patch('netifaces.gateways')
@mock.patch('netifaces.AF_INET', 1)
def test_update_rates(self, gateways_mock, net_io_counters_mock):
net_io_counters_mock.return_value = io_counters_mock(0, 0)
gateways_mock.return_value = gateways_response()
module = build_module()
net_io_counters_mock.return_value = io_counters_mock(2842135, 1932215)
module.update()
assert module.widgets()[1].full_text() == '1.84MiB/s'
assert module.widgets()[0].full_text() == '2.71MiB/s'
def test_initial_download_rate(self):
module = build_module()
assert module.widgets()[0].full_text() == '0.00B/s'
def test_initial_upload_rate(self):
module = build_module()
assert module.widgets()[1].full_text() == '0.00B/s'
|
# ... existing code ...
import pytest
from unittest import TestCase, mock
import core.config
import core.widget
import modules.contrib.network_traffic
from types import SimpleNamespace
# ... modified code ...
pytest.importorskip("psutil")
pytest.importorskip("netifaces")
...
def io_counters_mock(recv, sent):
return {
'lo': SimpleNamespace(
bytes_sent = sent,
bytes_recv = recv
)
}
def gateways_response():
return {
'default': {
1: ('10.0.0.10', 'lo')
}
}
def build_module():
return modules.contrib.network_traffic.Module(config=core.config.Config([]), theme=None)
class TestNetworkTrafficUnit(TestCase):
def test_load_module(self):
__import__("modules.contrib.network_traffic")
@mock.patch('psutil.net_io_counters')
@mock.patch('netifaces.gateways')
@mock.patch('netifaces.AF_INET', 1)
def test_update_rates(self, gateways_mock, net_io_counters_mock):
net_io_counters_mock.return_value = io_counters_mock(0, 0)
gateways_mock.return_value = gateways_response()
module = build_module()
net_io_counters_mock.return_value = io_counters_mock(2842135, 1932215)
module.update()
assert module.widgets()[1].full_text() == '1.84MiB/s'
assert module.widgets()[0].full_text() == '2.71MiB/s'
def test_initial_download_rate(self):
module = build_module()
assert module.widgets()[0].full_text() == '0.00B/s'
def test_initial_upload_rate(self):
module = build_module()
assert module.widgets()[1].full_text() == '0.00B/s'
# ... rest of the code ...
|
4761d359a28630d0fe378d50e52aad66e88d3a36
|
DeepFried2/utils.py
|
DeepFried2/utils.py
|
import theano as _th
import numpy as _np
def create_param(shape, init, fan=None, name=None, type=_th.config.floatX):
return _th.shared(init(shape, fan).astype(type), name=name)
def create_param_and_grad(shape, init, fan=None, name=None, type=_th.config.floatX):
val = init(shape, fan).astype(type)
param = _th.shared(val, name=name)
grad_name = 'grad_' + name if name is not None else None
grad_param = _th.shared(_np.zeros_like(val), name=grad_name)
return param, grad_param
def create_param_state_as(other, initial_value=0, prefix='state_for_'):
return _th.shared(other.get_value()*0 + initial_value,
broadcastable=other.broadcastable,
name=prefix + str(other.name)
)
def count_params(module):
params, _ = module.parameters()
return sum(p.get_value().size for p in params)
def save_params(module, where):
params, _ = module.parameters()
_np.savez_compressed(where, params=[p.get_value() for p in params])
def load_params(module, fromwhere):
params, _ = module.parameters()
with _np.load(fromwhere) as f:
for p, v in zip(params, f['params']):
p.set_value(v)
|
import theano as _th
import numpy as _np
def create_param(shape, init, fan=None, name=None, type=_th.config.floatX):
return _th.shared(init(shape, fan).astype(type), name=name)
def create_param_and_grad(shape, init, fan=None, name=None, type=_th.config.floatX):
val = init(shape, fan).astype(type)
param = _th.shared(val, name=name)
grad_name = 'grad_' + name if name is not None else None
grad_param = _th.shared(_np.zeros_like(val), name=grad_name)
return param, grad_param
def create_param_state_as(other, initial_value=0, prefix='state_for_'):
return _th.shared(other.get_value()*0 + initial_value,
broadcastable=other.broadcastable,
name=prefix + str(other.name)
)
def count_params(module):
params, _ = module.parameters()
return sum(p.get_value().size for p in params)
def save_params(module, where, compress=False):
params, _ = module.parameters()
savefn = _np.savez_compressed if compress else _np.savez
savefn(where, params=[p.get_value() for p in params])
def load_params(module, fromwhere):
params, _ = module.parameters()
with _np.load(fromwhere) as f:
for p, v in zip(params, f['params']):
p.set_value(v)
|
Make the compression optional, as it slows down.
|
Make the compression optional, as it slows down.
|
Python
|
mit
|
elPistolero/DeepFried2,lucasb-eyer/DeepFried2,Pandoro/DeepFried2,yobibyte/DeepFried2
|
import theano as _th
import numpy as _np
def create_param(shape, init, fan=None, name=None, type=_th.config.floatX):
return _th.shared(init(shape, fan).astype(type), name=name)
def create_param_and_grad(shape, init, fan=None, name=None, type=_th.config.floatX):
val = init(shape, fan).astype(type)
param = _th.shared(val, name=name)
grad_name = 'grad_' + name if name is not None else None
grad_param = _th.shared(_np.zeros_like(val), name=grad_name)
return param, grad_param
def create_param_state_as(other, initial_value=0, prefix='state_for_'):
return _th.shared(other.get_value()*0 + initial_value,
broadcastable=other.broadcastable,
name=prefix + str(other.name)
)
def count_params(module):
params, _ = module.parameters()
return sum(p.get_value().size for p in params)
- def save_params(module, where):
+ def save_params(module, where, compress=False):
params, _ = module.parameters()
+
+ savefn = _np.savez_compressed if compress else _np.savez
- _np.savez_compressed(where, params=[p.get_value() for p in params])
+ savefn(where, params=[p.get_value() for p in params])
def load_params(module, fromwhere):
params, _ = module.parameters()
with _np.load(fromwhere) as f:
for p, v in zip(params, f['params']):
p.set_value(v)
|
Make the compression optional, as it slows down.
|
## Code Before:
import theano as _th
import numpy as _np
def create_param(shape, init, fan=None, name=None, type=_th.config.floatX):
return _th.shared(init(shape, fan).astype(type), name=name)
def create_param_and_grad(shape, init, fan=None, name=None, type=_th.config.floatX):
val = init(shape, fan).astype(type)
param = _th.shared(val, name=name)
grad_name = 'grad_' + name if name is not None else None
grad_param = _th.shared(_np.zeros_like(val), name=grad_name)
return param, grad_param
def create_param_state_as(other, initial_value=0, prefix='state_for_'):
return _th.shared(other.get_value()*0 + initial_value,
broadcastable=other.broadcastable,
name=prefix + str(other.name)
)
def count_params(module):
params, _ = module.parameters()
return sum(p.get_value().size for p in params)
def save_params(module, where):
params, _ = module.parameters()
_np.savez_compressed(where, params=[p.get_value() for p in params])
def load_params(module, fromwhere):
params, _ = module.parameters()
with _np.load(fromwhere) as f:
for p, v in zip(params, f['params']):
p.set_value(v)
## Instruction:
Make the compression optional, as it slows down.
## Code After:
import theano as _th
import numpy as _np
def create_param(shape, init, fan=None, name=None, type=_th.config.floatX):
return _th.shared(init(shape, fan).astype(type), name=name)
def create_param_and_grad(shape, init, fan=None, name=None, type=_th.config.floatX):
val = init(shape, fan).astype(type)
param = _th.shared(val, name=name)
grad_name = 'grad_' + name if name is not None else None
grad_param = _th.shared(_np.zeros_like(val), name=grad_name)
return param, grad_param
def create_param_state_as(other, initial_value=0, prefix='state_for_'):
return _th.shared(other.get_value()*0 + initial_value,
broadcastable=other.broadcastable,
name=prefix + str(other.name)
)
def count_params(module):
params, _ = module.parameters()
return sum(p.get_value().size for p in params)
def save_params(module, where, compress=False):
params, _ = module.parameters()
savefn = _np.savez_compressed if compress else _np.savez
savefn(where, params=[p.get_value() for p in params])
def load_params(module, fromwhere):
params, _ = module.parameters()
with _np.load(fromwhere) as f:
for p, v in zip(params, f['params']):
p.set_value(v)
|
# ... existing code ...
def save_params(module, where, compress=False):
params, _ = module.parameters()
savefn = _np.savez_compressed if compress else _np.savez
savefn(where, params=[p.get_value() for p in params])
# ... rest of the code ...
|
e4ef3df9401bde3c2087a7659a54246de8ec95c6
|
src/api/urls.py
|
src/api/urls.py
|
from rest_framework.routers import SimpleRouter
#
from bingo_server.api import views as bingo_server_views
router = SimpleRouter()
router.register('games', bingo_server_views.GameViewSet)
urlpatterns = router.urls
|
from rest_framework.routers import DefaultRouter
#
from bingo_server.api import views as bingo_server_views
router = DefaultRouter()
router.register('games', bingo_server_views.GameViewSet)
urlpatterns = router.urls
|
Add root view to API
|
Add root view to API
|
Python
|
mit
|
steakholders-tm/bingo-server
|
- from rest_framework.routers import SimpleRouter
+ from rest_framework.routers import DefaultRouter
#
from bingo_server.api import views as bingo_server_views
- router = SimpleRouter()
+ router = DefaultRouter()
router.register('games', bingo_server_views.GameViewSet)
urlpatterns = router.urls
|
Add root view to API
|
## Code Before:
from rest_framework.routers import SimpleRouter
#
from bingo_server.api import views as bingo_server_views
router = SimpleRouter()
router.register('games', bingo_server_views.GameViewSet)
urlpatterns = router.urls
## Instruction:
Add root view to API
## Code After:
from rest_framework.routers import DefaultRouter
#
from bingo_server.api import views as bingo_server_views
router = DefaultRouter()
router.register('games', bingo_server_views.GameViewSet)
urlpatterns = router.urls
|
// ... existing code ...
from rest_framework.routers import DefaultRouter
// ... modified code ...
router = DefaultRouter()
// ... rest of the code ...
|
37d7656019d11b3b05d59f184d72e1dd6d4ccaf7
|
contones/srs.py
|
contones/srs.py
|
"""Spatial reference systems"""
from osgeo import osr
class BaseSpatialReference(osr.SpatialReference):
"""Base class for extending osr.SpatialReference."""
def __repr__(self):
return self.wkt
@property
def srid(self):
"""Returns the EPSG ID as int if it exists."""
epsg_id = (self.GetAuthorityCode('PROJCS') or
self.GetAuthorityCode('GEOGCS'))
try:
return int(epsg_id)
except TypeError:
return
@property
def wkt(self):
"""Returns this projection in WKT format."""
return self.ExportToWkt()
@property
def proj4(self):
"""Returns this projection as a proj4 string."""
return self.ExportToProj4()
class SpatialReference(object):
"""A spatial reference."""
def __new__(cls, sref):
"""Returns a new BaseSpatialReference instance
This allows for customized construction of osr.SpatialReference which
has no init method which precludes the use of super().
"""
sr = BaseSpatialReference()
if isinstance(sref, int):
sr.ImportFromEPSG(sref)
elif isinstance(sref, str):
if sref.strip().startswith('+proj='):
sr.ImportFromProj4(sref)
else:
sr.ImportFromWkt(sref)
# Add EPSG authority if applicable
sr.AutoIdentifyEPSG()
else:
raise TypeError('Cannot create SpatialReference '
'from {}'.format(str(sref)))
return sr
|
"""Spatial reference systems"""
__all__ = ['SpatialReference']
from osgeo import osr
class BaseSpatialReference(osr.SpatialReference):
"""Base class for extending osr.SpatialReference."""
def __repr__(self):
return self.wkt
def __eq__(self, another):
return bool(self.IsSame(another))
def __ne__(self, another):
return not self.__eq__(another)
@property
def srid(self):
"""Returns the EPSG ID as int if it exists."""
epsg_id = (self.GetAuthorityCode('PROJCS') or
self.GetAuthorityCode('GEOGCS'))
try:
return int(epsg_id)
except TypeError:
return
@property
def wkt(self):
"""Returns this projection in WKT format."""
return self.ExportToWkt()
@property
def proj4(self):
"""Returns this projection as a proj4 string."""
return self.ExportToProj4()
class SpatialReference(object):
"""A spatial reference."""
def __new__(cls, sref):
"""Returns a new BaseSpatialReference instance
This allows for customized construction of osr.SpatialReference which
has no init method which precludes the use of super().
"""
sr = BaseSpatialReference()
if isinstance(sref, int):
sr.ImportFromEPSG(sref)
elif isinstance(sref, str):
if sref.strip().startswith('+proj='):
sr.ImportFromProj4(sref)
else:
sr.ImportFromWkt(sref)
# Add EPSG authority if applicable
sr.AutoIdentifyEPSG()
else:
raise TypeError('Cannot create SpatialReference '
'from {}'.format(str(sref)))
return sr
|
Add equality methods to SpatialReference
|
Add equality methods to SpatialReference
|
Python
|
bsd-3-clause
|
bkg/greenwich
|
"""Spatial reference systems"""
+ __all__ = ['SpatialReference']
+
from osgeo import osr
class BaseSpatialReference(osr.SpatialReference):
"""Base class for extending osr.SpatialReference."""
def __repr__(self):
return self.wkt
+
+ def __eq__(self, another):
+ return bool(self.IsSame(another))
+
+ def __ne__(self, another):
+ return not self.__eq__(another)
@property
def srid(self):
"""Returns the EPSG ID as int if it exists."""
epsg_id = (self.GetAuthorityCode('PROJCS') or
self.GetAuthorityCode('GEOGCS'))
try:
return int(epsg_id)
except TypeError:
return
@property
def wkt(self):
"""Returns this projection in WKT format."""
return self.ExportToWkt()
@property
def proj4(self):
"""Returns this projection as a proj4 string."""
return self.ExportToProj4()
class SpatialReference(object):
"""A spatial reference."""
def __new__(cls, sref):
"""Returns a new BaseSpatialReference instance
This allows for customized construction of osr.SpatialReference which
has no init method which precludes the use of super().
"""
sr = BaseSpatialReference()
if isinstance(sref, int):
sr.ImportFromEPSG(sref)
elif isinstance(sref, str):
if sref.strip().startswith('+proj='):
sr.ImportFromProj4(sref)
else:
sr.ImportFromWkt(sref)
# Add EPSG authority if applicable
sr.AutoIdentifyEPSG()
else:
raise TypeError('Cannot create SpatialReference '
'from {}'.format(str(sref)))
return sr
|
Add equality methods to SpatialReference
|
## Code Before:
"""Spatial reference systems"""
from osgeo import osr
class BaseSpatialReference(osr.SpatialReference):
"""Base class for extending osr.SpatialReference."""
def __repr__(self):
return self.wkt
@property
def srid(self):
"""Returns the EPSG ID as int if it exists."""
epsg_id = (self.GetAuthorityCode('PROJCS') or
self.GetAuthorityCode('GEOGCS'))
try:
return int(epsg_id)
except TypeError:
return
@property
def wkt(self):
"""Returns this projection in WKT format."""
return self.ExportToWkt()
@property
def proj4(self):
"""Returns this projection as a proj4 string."""
return self.ExportToProj4()
class SpatialReference(object):
"""A spatial reference."""
def __new__(cls, sref):
"""Returns a new BaseSpatialReference instance
This allows for customized construction of osr.SpatialReference which
has no init method which precludes the use of super().
"""
sr = BaseSpatialReference()
if isinstance(sref, int):
sr.ImportFromEPSG(sref)
elif isinstance(sref, str):
if sref.strip().startswith('+proj='):
sr.ImportFromProj4(sref)
else:
sr.ImportFromWkt(sref)
# Add EPSG authority if applicable
sr.AutoIdentifyEPSG()
else:
raise TypeError('Cannot create SpatialReference '
'from {}'.format(str(sref)))
return sr
## Instruction:
Add equality methods to SpatialReference
## Code After:
"""Spatial reference systems"""
__all__ = ['SpatialReference']
from osgeo import osr
class BaseSpatialReference(osr.SpatialReference):
"""Base class for extending osr.SpatialReference."""
def __repr__(self):
return self.wkt
def __eq__(self, another):
return bool(self.IsSame(another))
def __ne__(self, another):
return not self.__eq__(another)
@property
def srid(self):
"""Returns the EPSG ID as int if it exists."""
epsg_id = (self.GetAuthorityCode('PROJCS') or
self.GetAuthorityCode('GEOGCS'))
try:
return int(epsg_id)
except TypeError:
return
@property
def wkt(self):
"""Returns this projection in WKT format."""
return self.ExportToWkt()
@property
def proj4(self):
"""Returns this projection as a proj4 string."""
return self.ExportToProj4()
class SpatialReference(object):
"""A spatial reference."""
def __new__(cls, sref):
"""Returns a new BaseSpatialReference instance
This allows for customized construction of osr.SpatialReference which
has no init method which precludes the use of super().
"""
sr = BaseSpatialReference()
if isinstance(sref, int):
sr.ImportFromEPSG(sref)
elif isinstance(sref, str):
if sref.strip().startswith('+proj='):
sr.ImportFromProj4(sref)
else:
sr.ImportFromWkt(sref)
# Add EPSG authority if applicable
sr.AutoIdentifyEPSG()
else:
raise TypeError('Cannot create SpatialReference '
'from {}'.format(str(sref)))
return sr
|
...
"""Spatial reference systems"""
__all__ = ['SpatialReference']
from osgeo import osr
...
return self.wkt
def __eq__(self, another):
return bool(self.IsSame(another))
def __ne__(self, another):
return not self.__eq__(another)
...
|
c6862c5f864db4e77dd835f074efdd284667e6fd
|
util/ldjpp.py
|
util/ldjpp.py
|
from __future__ import print_function
import argparse
import json
parser = argparse.ArgumentParser(description='Pretty-print LDJSON.')
parser.add_argument('--indent', metavar='N', type=int, default=2,
dest='indent', help='indentation for pretty-printing')
parser.add_argument('--file', metavar='FILE', required=True, dest='file',
type=argparse.FileType('r'), help='input LDJSON file')
parser.add_argument('--sort', action='store_true', dest='sortkeys',
help='sort object keys')
args = parser.parse_args()
for line in args.file:
record = json.loads(line)
print(json.dumps(record, indent=args.indent, sort_keys=args.sortkeys))
|
from __future__ import print_function
import click
import json
from collections import OrderedDict
def json_loader(sortkeys):
def _loader(line):
if sortkeys:
return json.loads(line)
else:
# if --no-sortkeys, let's preserve file order
return json.JSONDecoder(object_pairs_hook=OrderedDict).decode(line)
return _loader
@click.command()
@click.option('indent', '-i', '--indent', default=2,
help='indentation for pretty-printing')
@click.option('--sortkeys/--no-sortkeys', default=False,
help='sort object keys')
@click.argument('infile', type=click.File())
def cli(indent, sortkeys, infile):
"""Pretty-print LDJSON."""
loader = json_loader(sortkeys)
for line in infile:
record = loader(line)
print(json.dumps(record, indent=indent, sort_keys=sortkeys))
if __name__ == '__main__':
cli()
|
Use click instead of argparse
|
Use click instead of argparse
|
Python
|
mit
|
mhyfritz/goontools,mhyfritz/goontools,mhyfritz/goontools
|
from __future__ import print_function
- import argparse
+ import click
import json
+ from collections import OrderedDict
- parser = argparse.ArgumentParser(description='Pretty-print LDJSON.')
- parser.add_argument('--indent', metavar='N', type=int, default=2,
- dest='indent', help='indentation for pretty-printing')
- parser.add_argument('--file', metavar='FILE', required=True, dest='file',
- type=argparse.FileType('r'), help='input LDJSON file')
- parser.add_argument('--sort', action='store_true', dest='sortkeys',
- help='sort object keys')
- args = parser.parse_args()
+ def json_loader(sortkeys):
+ def _loader(line):
+ if sortkeys:
+ return json.loads(line)
+ else:
+ # if --no-sortkeys, let's preserve file order
+ return json.JSONDecoder(object_pairs_hook=OrderedDict).decode(line)
+ return _loader
- for line in args.file:
- record = json.loads(line)
- print(json.dumps(record, indent=args.indent, sort_keys=args.sortkeys))
+
+ @click.command()
+ @click.option('indent', '-i', '--indent', default=2,
+ help='indentation for pretty-printing')
+ @click.option('--sortkeys/--no-sortkeys', default=False,
+ help='sort object keys')
+ @click.argument('infile', type=click.File())
+ def cli(indent, sortkeys, infile):
+ """Pretty-print LDJSON."""
+ loader = json_loader(sortkeys)
+ for line in infile:
+ record = loader(line)
+ print(json.dumps(record, indent=indent, sort_keys=sortkeys))
+
+ if __name__ == '__main__':
+ cli()
+
|
Use click instead of argparse
|
## Code Before:
from __future__ import print_function
import argparse
import json
parser = argparse.ArgumentParser(description='Pretty-print LDJSON.')
parser.add_argument('--indent', metavar='N', type=int, default=2,
dest='indent', help='indentation for pretty-printing')
parser.add_argument('--file', metavar='FILE', required=True, dest='file',
type=argparse.FileType('r'), help='input LDJSON file')
parser.add_argument('--sort', action='store_true', dest='sortkeys',
help='sort object keys')
args = parser.parse_args()
for line in args.file:
record = json.loads(line)
print(json.dumps(record, indent=args.indent, sort_keys=args.sortkeys))
## Instruction:
Use click instead of argparse
## Code After:
from __future__ import print_function
import click
import json
from collections import OrderedDict
def json_loader(sortkeys):
def _loader(line):
if sortkeys:
return json.loads(line)
else:
# if --no-sortkeys, let's preserve file order
return json.JSONDecoder(object_pairs_hook=OrderedDict).decode(line)
return _loader
@click.command()
@click.option('indent', '-i', '--indent', default=2,
help='indentation for pretty-printing')
@click.option('--sortkeys/--no-sortkeys', default=False,
help='sort object keys')
@click.argument('infile', type=click.File())
def cli(indent, sortkeys, infile):
"""Pretty-print LDJSON."""
loader = json_loader(sortkeys)
for line in infile:
record = loader(line)
print(json.dumps(record, indent=indent, sort_keys=sortkeys))
if __name__ == '__main__':
cli()
|
# ... existing code ...
from __future__ import print_function
import click
import json
from collections import OrderedDict
def json_loader(sortkeys):
def _loader(line):
if sortkeys:
return json.loads(line)
else:
# if --no-sortkeys, let's preserve file order
return json.JSONDecoder(object_pairs_hook=OrderedDict).decode(line)
return _loader
@click.command()
@click.option('indent', '-i', '--indent', default=2,
help='indentation for pretty-printing')
@click.option('--sortkeys/--no-sortkeys', default=False,
help='sort object keys')
@click.argument('infile', type=click.File())
def cli(indent, sortkeys, infile):
"""Pretty-print LDJSON."""
loader = json_loader(sortkeys)
for line in infile:
record = loader(line)
print(json.dumps(record, indent=indent, sort_keys=sortkeys))
if __name__ == '__main__':
cli()
# ... rest of the code ...
|
7f412fef594a514d13519a0f048c55b293fd84b3
|
abandoned/models.py
|
abandoned/models.py
|
from django.db import models
class Author(models.Model):
author_name = models.CharField(max_length=200, unique=True)
author_link = models.URLField()
def __str__(self):
return str(self.name)
class Reason(models.Model):
reason = models.CharField(max_length=200)
def __str__(self):
return str(self.reason)
class Tag(models.Model):
text = models.CharField(max_length=100, unique=True)
def __str__(self):
return str(self.text)
class Language(models.Model):
language_name = models.CharField(max_length=100, unique=True)
def __str__(self):
return str(self.name)
class Project(models.Model):
name = models.CharField(max_length=400)
link = models.URLField(unique=True)
author = models.ForeignKey(Author, related_name='projects')
description = models.TextField()
reason = models.ForeignKey(Reason, related_name='projects')
language = models.ForeignKey(Language, related_name='projects', null=True)
tags = models.ManyToManyField(Tag, related_name='projects')
upvotes = models.IntegerField(default=0)
date_added = models.DateTimeField(auto_now_add=True)
def __str__(self):
return str(self.name)
|
from django.db import models
class Author(models.Model):
author_name = models.CharField(max_length=200, unique=True)
author_link = models.URLField()
def __str__(self):
return str(self.name)
class Reason(models.Model):
reason = models.CharField(max_length=200)
def __str__(self):
return str(self.reason)
class Tag(models.Model):
text = models.CharField(max_length=100, unique=True)
def __str__(self):
return str(self.text)
def save(self, *args, **kwargs):
self.text = self.text.lower()
super(Tag, self).save(*args, **kwargs)
class Language(models.Model):
language_name = models.CharField(max_length=100, unique=True)
def __str__(self):
return str(self.name)
class Project(models.Model):
name = models.CharField(max_length=400)
link = models.URLField(unique=True)
author = models.ForeignKey(Author, related_name='projects')
description = models.TextField()
reason = models.ForeignKey(Reason, related_name='projects')
language = models.ForeignKey(Language, related_name='projects', null=True)
tags = models.ManyToManyField(Tag, related_name='projects')
upvotes = models.IntegerField(default=0)
date_added = models.DateTimeField(auto_now_add=True)
def __str__(self):
return str(self.name)
|
Tag text is now converted to lowercase before being saved
|
Tag text is now converted to lowercase before being saved
|
Python
|
mit
|
Kunstmord/abandoned,Kunstmord/abandoned,Kunstmord/abandoned
|
from django.db import models
class Author(models.Model):
author_name = models.CharField(max_length=200, unique=True)
author_link = models.URLField()
def __str__(self):
return str(self.name)
class Reason(models.Model):
reason = models.CharField(max_length=200)
def __str__(self):
return str(self.reason)
class Tag(models.Model):
text = models.CharField(max_length=100, unique=True)
def __str__(self):
return str(self.text)
+ def save(self, *args, **kwargs):
+ self.text = self.text.lower()
+ super(Tag, self).save(*args, **kwargs)
+
class Language(models.Model):
language_name = models.CharField(max_length=100, unique=True)
def __str__(self):
return str(self.name)
class Project(models.Model):
name = models.CharField(max_length=400)
link = models.URLField(unique=True)
author = models.ForeignKey(Author, related_name='projects')
description = models.TextField()
reason = models.ForeignKey(Reason, related_name='projects')
language = models.ForeignKey(Language, related_name='projects', null=True)
tags = models.ManyToManyField(Tag, related_name='projects')
upvotes = models.IntegerField(default=0)
date_added = models.DateTimeField(auto_now_add=True)
def __str__(self):
return str(self.name)
|
Tag text is now converted to lowercase before being saved
|
## Code Before:
from django.db import models
class Author(models.Model):
author_name = models.CharField(max_length=200, unique=True)
author_link = models.URLField()
def __str__(self):
return str(self.name)
class Reason(models.Model):
reason = models.CharField(max_length=200)
def __str__(self):
return str(self.reason)
class Tag(models.Model):
text = models.CharField(max_length=100, unique=True)
def __str__(self):
return str(self.text)
class Language(models.Model):
language_name = models.CharField(max_length=100, unique=True)
def __str__(self):
return str(self.name)
class Project(models.Model):
name = models.CharField(max_length=400)
link = models.URLField(unique=True)
author = models.ForeignKey(Author, related_name='projects')
description = models.TextField()
reason = models.ForeignKey(Reason, related_name='projects')
language = models.ForeignKey(Language, related_name='projects', null=True)
tags = models.ManyToManyField(Tag, related_name='projects')
upvotes = models.IntegerField(default=0)
date_added = models.DateTimeField(auto_now_add=True)
def __str__(self):
return str(self.name)
## Instruction:
Tag text is now converted to lowercase before being saved
## Code After:
from django.db import models
class Author(models.Model):
author_name = models.CharField(max_length=200, unique=True)
author_link = models.URLField()
def __str__(self):
return str(self.name)
class Reason(models.Model):
reason = models.CharField(max_length=200)
def __str__(self):
return str(self.reason)
class Tag(models.Model):
text = models.CharField(max_length=100, unique=True)
def __str__(self):
return str(self.text)
def save(self, *args, **kwargs):
self.text = self.text.lower()
super(Tag, self).save(*args, **kwargs)
class Language(models.Model):
language_name = models.CharField(max_length=100, unique=True)
def __str__(self):
return str(self.name)
class Project(models.Model):
name = models.CharField(max_length=400)
link = models.URLField(unique=True)
author = models.ForeignKey(Author, related_name='projects')
description = models.TextField()
reason = models.ForeignKey(Reason, related_name='projects')
language = models.ForeignKey(Language, related_name='projects', null=True)
tags = models.ManyToManyField(Tag, related_name='projects')
upvotes = models.IntegerField(default=0)
date_added = models.DateTimeField(auto_now_add=True)
def __str__(self):
return str(self.name)
|
...
def save(self, *args, **kwargs):
self.text = self.text.lower()
super(Tag, self).save(*args, **kwargs)
...
|
591a40b6e1f4ac8b1d21050ccfa10779dc9dbf7c
|
analytic_code.py
|
analytic_code.py
|
from openerp.osv import fields, osv
class analytic_code(osv.Model):
_name = "analytic.code"
_columns = dict(
name=fields.char("Name", size=128, translate=True, required=True),
nd_id=fields.many2one(
"analytic.dimension", ondelete="restrict"),
active=fields.boolean('Active'),
nd_name=fields.related('nd_id', 'name', type="char",
string="Dimension Name", store=False),
description=fields.char('Description', size=512),
)
_defaults = {
'active': 1,
}
|
from openerp.osv import fields, osv
class analytic_code(osv.Model):
_name = "analytic.code"
_columns = dict(
name=fields.char("Name", size=128, translate=True, required=True),
nd_id=fields.many2one(
"analytic.dimension", "Dimensions", ondelete="restrict"),
active=fields.boolean('Active'),
nd_name=fields.related('nd_id', 'name', type="char",
string="Dimension Name", store=False),
description=fields.char('Description', size=512),
)
_defaults = {
'active': 1,
}
|
Add string to display the name of the field Dimension during the import
|
Add string to display the name of the field Dimension during the import
|
Python
|
agpl-3.0
|
xcgd/analytic_structure
|
from openerp.osv import fields, osv
class analytic_code(osv.Model):
_name = "analytic.code"
_columns = dict(
name=fields.char("Name", size=128, translate=True, required=True),
nd_id=fields.many2one(
- "analytic.dimension", ondelete="restrict"),
+ "analytic.dimension", "Dimensions", ondelete="restrict"),
active=fields.boolean('Active'),
nd_name=fields.related('nd_id', 'name', type="char",
string="Dimension Name", store=False),
description=fields.char('Description', size=512),
)
_defaults = {
'active': 1,
}
|
Add string to display the name of the field Dimension during the import
|
## Code Before:
from openerp.osv import fields, osv
class analytic_code(osv.Model):
_name = "analytic.code"
_columns = dict(
name=fields.char("Name", size=128, translate=True, required=True),
nd_id=fields.many2one(
"analytic.dimension", ondelete="restrict"),
active=fields.boolean('Active'),
nd_name=fields.related('nd_id', 'name', type="char",
string="Dimension Name", store=False),
description=fields.char('Description', size=512),
)
_defaults = {
'active': 1,
}
## Instruction:
Add string to display the name of the field Dimension during the import
## Code After:
from openerp.osv import fields, osv
class analytic_code(osv.Model):
_name = "analytic.code"
_columns = dict(
name=fields.char("Name", size=128, translate=True, required=True),
nd_id=fields.many2one(
"analytic.dimension", "Dimensions", ondelete="restrict"),
active=fields.boolean('Active'),
nd_name=fields.related('nd_id', 'name', type="char",
string="Dimension Name", store=False),
description=fields.char('Description', size=512),
)
_defaults = {
'active': 1,
}
|
# ... existing code ...
nd_id=fields.many2one(
"analytic.dimension", "Dimensions", ondelete="restrict"),
active=fields.boolean('Active'),
# ... rest of the code ...
|
082a2d481c0ae118dfcb1456bb7f095d05a5eb0e
|
mycroft/tts/dummy_tts.py
|
mycroft/tts/dummy_tts.py
|
"""A Dummy TTS without any audio output."""
from mycroft.util.log import LOG
from .tts import TTS, TTSValidator
class DummyTTS(TTS):
def __init__(self, lang, config):
super().__init__(lang, config, DummyValidator(self), 'wav')
def execute(self, sentence, ident=None, listen=False):
"""Don't do anything, return nothing."""
LOG.info('Mycroft: {}'.format(sentence))
return None
class DummyValidator(TTSValidator):
"""Do no tests."""
def __init__(self, tts):
super().__init__(tts)
def validate_lang(self):
pass
def validate_connection(self):
pass
def get_tts_class(self):
return DummyTTS
|
"""A Dummy TTS without any audio output."""
from mycroft.util.log import LOG
from .tts import TTS, TTSValidator
class DummyTTS(TTS):
def __init__(self, lang, config):
super().__init__(lang, config, DummyValidator(self), 'wav')
def execute(self, sentence, ident=None, listen=False):
"""Don't do anything, return nothing."""
LOG.info('Mycroft: {}'.format(sentence))
self.end_audio(listen)
return None
class DummyValidator(TTSValidator):
"""Do no tests."""
def __init__(self, tts):
super().__init__(tts)
def validate_lang(self):
pass
def validate_connection(self):
pass
def get_tts_class(self):
return DummyTTS
|
Mark that audio has completed in dummy tts
|
Mark that audio has completed in dummy tts
|
Python
|
apache-2.0
|
forslund/mycroft-core,forslund/mycroft-core,MycroftAI/mycroft-core,MycroftAI/mycroft-core
|
"""A Dummy TTS without any audio output."""
from mycroft.util.log import LOG
from .tts import TTS, TTSValidator
class DummyTTS(TTS):
def __init__(self, lang, config):
super().__init__(lang, config, DummyValidator(self), 'wav')
def execute(self, sentence, ident=None, listen=False):
"""Don't do anything, return nothing."""
LOG.info('Mycroft: {}'.format(sentence))
+ self.end_audio(listen)
return None
class DummyValidator(TTSValidator):
"""Do no tests."""
def __init__(self, tts):
super().__init__(tts)
def validate_lang(self):
pass
def validate_connection(self):
pass
def get_tts_class(self):
return DummyTTS
|
Mark that audio has completed in dummy tts
|
## Code Before:
"""A Dummy TTS without any audio output."""
from mycroft.util.log import LOG
from .tts import TTS, TTSValidator
class DummyTTS(TTS):
def __init__(self, lang, config):
super().__init__(lang, config, DummyValidator(self), 'wav')
def execute(self, sentence, ident=None, listen=False):
"""Don't do anything, return nothing."""
LOG.info('Mycroft: {}'.format(sentence))
return None
class DummyValidator(TTSValidator):
"""Do no tests."""
def __init__(self, tts):
super().__init__(tts)
def validate_lang(self):
pass
def validate_connection(self):
pass
def get_tts_class(self):
return DummyTTS
## Instruction:
Mark that audio has completed in dummy tts
## Code After:
"""A Dummy TTS without any audio output."""
from mycroft.util.log import LOG
from .tts import TTS, TTSValidator
class DummyTTS(TTS):
def __init__(self, lang, config):
super().__init__(lang, config, DummyValidator(self), 'wav')
def execute(self, sentence, ident=None, listen=False):
"""Don't do anything, return nothing."""
LOG.info('Mycroft: {}'.format(sentence))
self.end_audio(listen)
return None
class DummyValidator(TTSValidator):
"""Do no tests."""
def __init__(self, tts):
super().__init__(tts)
def validate_lang(self):
pass
def validate_connection(self):
pass
def get_tts_class(self):
return DummyTTS
|
// ... existing code ...
LOG.info('Mycroft: {}'.format(sentence))
self.end_audio(listen)
return None
// ... rest of the code ...
|
c21fe453911af190f3cbd93356396d4f5e65195e
|
mopidy/backends/gstreamer.py
|
mopidy/backends/gstreamer.py
|
import logging
import gst
from mopidy import config
from mopidy.backends import BaseBackend
from mopidy.models import Artist, Album, Track, Playlist
logger = logging.getLogger(u'backends.gstreamer')
class GStreamerBackend(BaseBackend):
def __init__(self, *args, **kwargs):
super(GStreamerBackend, self).__init__(*args, **kwargs)
playlist = []
player = gst.element_factory_make("playbin2", "player")
fakesink = gst.element_factory_make("fakesink", "fakesink")
player.set_property("video-sink", fakesink)
self.player = player
def _play_current_track(self):
self.player.set_property("uri", self._current_track.uri)
self.player.set_state(gst.STATE_PLAYING)
def _play(self):
if self._current_track is not None:
self._play_current_track()
return True
else:
return False
def _stop(self):
self.player.set_state(gst.STATE_NULL)
return True
|
import logging
import gst
from mopidy import config
from mopidy.backends import BaseBackend
from mopidy.models import Artist, Album, Track, Playlist
logger = logging.getLogger(u'backends.gstreamer')
class GStreamerBackend(BaseBackend):
def __init__(self, *args, **kwargs):
super(GStreamerBackend, self).__init__(*args, **kwargs)
playlist = []
player = gst.element_factory_make("playbin2", "player")
fakesink = gst.element_factory_make("fakesink", "fakesink")
player.set_property("video-sink", fakesink)
self.player = player
def _play(self):
if self._current_track is None:
return False
self.player.set_property("uri", self._current_track.uri)
self.player.set_state(gst.STATE_PLAYING)
return True
def _stop(self):
self.player.set_state(gst.STATE_NULL)
return True
|
Clean play code for GStreamer
|
Clean play code for GStreamer
|
Python
|
apache-2.0
|
vrs01/mopidy,bacontext/mopidy,vrs01/mopidy,quartz55/mopidy,priestd09/mopidy,swak/mopidy,liamw9534/mopidy,hkariti/mopidy,pacificIT/mopidy,ZenithDK/mopidy,hkariti/mopidy,dbrgn/mopidy,kingosticks/mopidy,ZenithDK/mopidy,diandiankan/mopidy,vrs01/mopidy,hkariti/mopidy,pacificIT/mopidy,ali/mopidy,SuperStarPL/mopidy,adamcik/mopidy,swak/mopidy,glogiotatidis/mopidy,adamcik/mopidy,SuperStarPL/mopidy,mokieyue/mopidy,rawdlite/mopidy,priestd09/mopidy,diandiankan/mopidy,swak/mopidy,tkem/mopidy,diandiankan/mopidy,dbrgn/mopidy,mopidy/mopidy,woutervanwijk/mopidy,jodal/mopidy,bacontext/mopidy,jcass77/mopidy,ali/mopidy,jcass77/mopidy,bacontext/mopidy,ali/mopidy,SuperStarPL/mopidy,abarisain/mopidy,mopidy/mopidy,mopidy/mopidy,bacontext/mopidy,quartz55/mopidy,quartz55/mopidy,quartz55/mopidy,ZenithDK/mopidy,rawdlite/mopidy,ali/mopidy,abarisain/mopidy,hkariti/mopidy,tkem/mopidy,ZenithDK/mopidy,tkem/mopidy,rawdlite/mopidy,jmarsik/mopidy,jodal/mopidy,diandiankan/mopidy,priestd09/mopidy,tkem/mopidy,liamw9534/mopidy,bencevans/mopidy,dbrgn/mopidy,pacificIT/mopidy,glogiotatidis/mopidy,swak/mopidy,glogiotatidis/mopidy,glogiotatidis/mopidy,mokieyue/mopidy,dbrgn/mopidy,bencevans/mopidy,SuperStarPL/mopidy,adamcik/mopidy,jmarsik/mopidy,jcass77/mopidy,kingosticks/mopidy,rawdlite/mopidy,vrs01/mopidy,mokieyue/mopidy,bencevans/mopidy,bencevans/mopidy,jmarsik/mopidy,jmarsik/mopidy,kingosticks/mopidy,woutervanwijk/mopidy,mokieyue/mopidy,jodal/mopidy,pacificIT/mopidy
|
import logging
import gst
from mopidy import config
from mopidy.backends import BaseBackend
from mopidy.models import Artist, Album, Track, Playlist
logger = logging.getLogger(u'backends.gstreamer')
class GStreamerBackend(BaseBackend):
def __init__(self, *args, **kwargs):
super(GStreamerBackend, self).__init__(*args, **kwargs)
playlist = []
player = gst.element_factory_make("playbin2", "player")
fakesink = gst.element_factory_make("fakesink", "fakesink")
player.set_property("video-sink", fakesink)
self.player = player
+ def _play(self):
+ if self._current_track is None:
+ return False
- def _play_current_track(self):
self.player.set_property("uri", self._current_track.uri)
self.player.set_state(gst.STATE_PLAYING)
- def _play(self):
- if self._current_track is not None:
- self._play_current_track()
- return True
+ return True
- else:
- return False
def _stop(self):
self.player.set_state(gst.STATE_NULL)
return True
|
Clean play code for GStreamer
|
## Code Before:
import logging
import gst
from mopidy import config
from mopidy.backends import BaseBackend
from mopidy.models import Artist, Album, Track, Playlist
logger = logging.getLogger(u'backends.gstreamer')
class GStreamerBackend(BaseBackend):
def __init__(self, *args, **kwargs):
super(GStreamerBackend, self).__init__(*args, **kwargs)
playlist = []
player = gst.element_factory_make("playbin2", "player")
fakesink = gst.element_factory_make("fakesink", "fakesink")
player.set_property("video-sink", fakesink)
self.player = player
def _play_current_track(self):
self.player.set_property("uri", self._current_track.uri)
self.player.set_state(gst.STATE_PLAYING)
def _play(self):
if self._current_track is not None:
self._play_current_track()
return True
else:
return False
def _stop(self):
self.player.set_state(gst.STATE_NULL)
return True
## Instruction:
Clean play code for GStreamer
## Code After:
import logging
import gst
from mopidy import config
from mopidy.backends import BaseBackend
from mopidy.models import Artist, Album, Track, Playlist
logger = logging.getLogger(u'backends.gstreamer')
class GStreamerBackend(BaseBackend):
def __init__(self, *args, **kwargs):
super(GStreamerBackend, self).__init__(*args, **kwargs)
playlist = []
player = gst.element_factory_make("playbin2", "player")
fakesink = gst.element_factory_make("fakesink", "fakesink")
player.set_property("video-sink", fakesink)
self.player = player
def _play(self):
if self._current_track is None:
return False
self.player.set_property("uri", self._current_track.uri)
self.player.set_state(gst.STATE_PLAYING)
return True
def _stop(self):
self.player.set_state(gst.STATE_NULL)
return True
|
# ... existing code ...
def _play(self):
if self._current_track is None:
return False
self.player.set_property("uri", self._current_track.uri)
# ... modified code ...
return True
# ... rest of the code ...
|
fafd048452ebfb3379ab428cc74e795d3406478f
|
apps/comments/models.py
|
apps/comments/models.py
|
from django.db import models
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes import generic
from django.db.models.signals import post_save
from notification import models as notification
from ..core.models import BaseModel
class Comment(BaseModel):
user = models.ForeignKey('profiles.User', related_name='users')
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
content_object = generic.GenericForeignKey('content_type', 'object_id')
comment = models.TextField()
def __unicode__(self):
return '{0}: {1}...'.format(self.user.first_name, self.comment[:50])
class Meta:
ordering = ('date_created',)
def comment_saved(sender, instance, created, **kwargs):
mentor = instance.content_object.mentor
protege = instance.content_object.protege
meeting_url = instance.content_object.get_url_with_domain()
if created:
if instance.user == mentor:
recipient = protege
elif instance.user == protege:
recipient = mentor
notification.send(
[recipient],
'comment',
{'comment': instance,
'recipient': recipient,
'meeting_url': meeting_url})
post_save.connect(comment_saved, sender=Comment)
|
from django.db import models
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes import generic
from django.db.models.signals import post_save
from notification import models as notification
from ..core.models import BaseModel
class Comment(BaseModel):
user = models.ForeignKey('profiles.User', related_name='users')
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
content_object = generic.GenericForeignKey('content_type', 'object_id')
comment = models.TextField()
def __unicode__(self):
return '{0}: {1}...'.format(self.user.first_name, self.comment[:50])
class Meta:
ordering = ('date_created',)
def comment_saved(sender, instance, created, **kwargs):
mentor = instance.content_object.mentor
protege = instance.content_object.protege
meeting_url = instance.content_object.get_url_with_domain()
if instance.user == mentor:
recipient = protege
elif instance.user == protege:
recipient = mentor
if created and recipient:
notification.send(
[recipient],
'comment',
{'comment': instance,
'recipient': recipient,
'meeting_url': meeting_url})
post_save.connect(comment_saved, sender=Comment)
|
Make sure we have a recipient before sending notification
|
Make sure we have a recipient before sending notification
|
Python
|
mit
|
SoPR/horas,SoPR/horas,SoPR/horas,SoPR/horas
|
from django.db import models
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes import generic
from django.db.models.signals import post_save
from notification import models as notification
from ..core.models import BaseModel
class Comment(BaseModel):
user = models.ForeignKey('profiles.User', related_name='users')
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
content_object = generic.GenericForeignKey('content_type', 'object_id')
comment = models.TextField()
def __unicode__(self):
return '{0}: {1}...'.format(self.user.first_name, self.comment[:50])
class Meta:
ordering = ('date_created',)
def comment_saved(sender, instance, created, **kwargs):
mentor = instance.content_object.mentor
protege = instance.content_object.protege
meeting_url = instance.content_object.get_url_with_domain()
- if created:
- if instance.user == mentor:
+ if instance.user == mentor:
- recipient = protege
+ recipient = protege
- elif instance.user == protege:
+ elif instance.user == protege:
- recipient = mentor
+ recipient = mentor
+ if created and recipient:
notification.send(
[recipient],
'comment',
{'comment': instance,
'recipient': recipient,
'meeting_url': meeting_url})
post_save.connect(comment_saved, sender=Comment)
|
Make sure we have a recipient before sending notification
|
## Code Before:
from django.db import models
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes import generic
from django.db.models.signals import post_save
from notification import models as notification
from ..core.models import BaseModel
class Comment(BaseModel):
user = models.ForeignKey('profiles.User', related_name='users')
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
content_object = generic.GenericForeignKey('content_type', 'object_id')
comment = models.TextField()
def __unicode__(self):
return '{0}: {1}...'.format(self.user.first_name, self.comment[:50])
class Meta:
ordering = ('date_created',)
def comment_saved(sender, instance, created, **kwargs):
mentor = instance.content_object.mentor
protege = instance.content_object.protege
meeting_url = instance.content_object.get_url_with_domain()
if created:
if instance.user == mentor:
recipient = protege
elif instance.user == protege:
recipient = mentor
notification.send(
[recipient],
'comment',
{'comment': instance,
'recipient': recipient,
'meeting_url': meeting_url})
post_save.connect(comment_saved, sender=Comment)
## Instruction:
Make sure we have a recipient before sending notification
## Code After:
from django.db import models
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes import generic
from django.db.models.signals import post_save
from notification import models as notification
from ..core.models import BaseModel
class Comment(BaseModel):
user = models.ForeignKey('profiles.User', related_name='users')
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
content_object = generic.GenericForeignKey('content_type', 'object_id')
comment = models.TextField()
def __unicode__(self):
return '{0}: {1}...'.format(self.user.first_name, self.comment[:50])
class Meta:
ordering = ('date_created',)
def comment_saved(sender, instance, created, **kwargs):
mentor = instance.content_object.mentor
protege = instance.content_object.protege
meeting_url = instance.content_object.get_url_with_domain()
if instance.user == mentor:
recipient = protege
elif instance.user == protege:
recipient = mentor
if created and recipient:
notification.send(
[recipient],
'comment',
{'comment': instance,
'recipient': recipient,
'meeting_url': meeting_url})
post_save.connect(comment_saved, sender=Comment)
|
# ... existing code ...
if instance.user == mentor:
recipient = protege
elif instance.user == protege:
recipient = mentor
if created and recipient:
notification.send(
# ... rest of the code ...
|
09d780474d00f3a8f4c2295154d74dae2023c1d3
|
samples/storage_sample/storage/__init__.py
|
samples/storage_sample/storage/__init__.py
|
"""Common imports for generated storage client library."""
# pylint:disable=wildcard-import
import pkgutil
from apitools.base.py import *
from storage_v1 import *
from storage_v1_client import *
from storage_v1_messages import *
__path__ = pkgutil.extend_path(__path__, __name__)
|
"""Common imports for generated storage client library."""
# pylint:disable=wildcard-import
import pkgutil
from apitools.base.py import *
from storage_v1_client import *
from storage_v1_messages import *
__path__ = pkgutil.extend_path(__path__, __name__)
|
Drop the CLI from the sample storage client imports.
|
Drop the CLI from the sample storage client imports.
|
Python
|
apache-2.0
|
cherba/apitools,craigcitro/apitools,b-daniels/apitools,betamos/apitools,kevinli7/apitools,houglum/apitools,pcostell/apitools,thobrla/apitools,google/apitools
|
"""Common imports for generated storage client library."""
# pylint:disable=wildcard-import
import pkgutil
from apitools.base.py import *
- from storage_v1 import *
from storage_v1_client import *
from storage_v1_messages import *
__path__ = pkgutil.extend_path(__path__, __name__)
|
Drop the CLI from the sample storage client imports.
|
## Code Before:
"""Common imports for generated storage client library."""
# pylint:disable=wildcard-import
import pkgutil
from apitools.base.py import *
from storage_v1 import *
from storage_v1_client import *
from storage_v1_messages import *
__path__ = pkgutil.extend_path(__path__, __name__)
## Instruction:
Drop the CLI from the sample storage client imports.
## Code After:
"""Common imports for generated storage client library."""
# pylint:disable=wildcard-import
import pkgutil
from apitools.base.py import *
from storage_v1_client import *
from storage_v1_messages import *
__path__ = pkgutil.extend_path(__path__, __name__)
|
// ... existing code ...
from apitools.base.py import *
from storage_v1_client import *
// ... rest of the code ...
|
a4eb952cc2e583d3b7786f5dea101d1e013c8159
|
services/controllers/utils.py
|
services/controllers/utils.py
|
def map_range(x, in_min, in_max, out_min, out_max):
out_delta = out_max - out_min
in_delta = in_max - in_min
return (x - in_min) * out_delta / in_delta + out_min
|
def lerp(a, b, t):
return (1.0 - t) * a + t * b
def map_range(x, in_min, in_max, out_min, out_max):
out_delta = out_max - out_min
in_delta = in_max - in_min
return (x - in_min) * out_delta / in_delta + out_min
|
Add function for linear interpolation (lerp)
|
Add function for linear interpolation (lerp)
|
Python
|
bsd-3-clause
|
gizmo-cda/g2x-submarine-v2,gizmo-cda/g2x-submarine-v2,gizmo-cda/g2x-submarine-v2,gizmo-cda/g2x-submarine-v2
|
+ def lerp(a, b, t):
+ return (1.0 - t) * a + t * b
+
+
def map_range(x, in_min, in_max, out_min, out_max):
out_delta = out_max - out_min
in_delta = in_max - in_min
return (x - in_min) * out_delta / in_delta + out_min
|
Add function for linear interpolation (lerp)
|
## Code Before:
def map_range(x, in_min, in_max, out_min, out_max):
out_delta = out_max - out_min
in_delta = in_max - in_min
return (x - in_min) * out_delta / in_delta + out_min
## Instruction:
Add function for linear interpolation (lerp)
## Code After:
def lerp(a, b, t):
return (1.0 - t) * a + t * b
def map_range(x, in_min, in_max, out_min, out_max):
out_delta = out_max - out_min
in_delta = in_max - in_min
return (x - in_min) * out_delta / in_delta + out_min
|
# ... existing code ...
def lerp(a, b, t):
return (1.0 - t) * a + t * b
def map_range(x, in_min, in_max, out_min, out_max):
# ... rest of the code ...
|
681cc1dc53851a2d127b4c00fc4e7d9e54bd8fba
|
cms/envs/devstack_docker.py
|
cms/envs/devstack_docker.py
|
""" Overrides for Docker-based devstack. """
from .devstack import * # pylint: disable=wildcard-import, unused-wildcard-import
# Docker does not support the syslog socket at /dev/log. Rely on the console.
LOGGING['handlers']['local'] = LOGGING['handlers']['tracking'] = {
'class': 'logging.NullHandler',
}
LOGGING['loggers']['tracking']['handlers'] = ['console']
HOST = 'edx.devstack.edxapp:18000'
SITE_NAME = HOST
LMS_ROOT_URL = 'http://{}:18000'.format(HOST)
OAUTH_OIDC_ISSUER = '{}/oauth2'.format(LMS_ROOT_URL)
JWT_AUTH.update({
'JWT_SECRET_KEY': 'lms-secret',
'JWT_ISSUER': OAUTH_OIDC_ISSUER,
'JWT_AUDIENCE': 'lms-key',
})
|
""" Overrides for Docker-based devstack. """
from .devstack import * # pylint: disable=wildcard-import, unused-wildcard-import
# Docker does not support the syslog socket at /dev/log. Rely on the console.
LOGGING['handlers']['local'] = LOGGING['handlers']['tracking'] = {
'class': 'logging.NullHandler',
}
LOGGING['loggers']['tracking']['handlers'] = ['console']
HOST = 'edx.devstack.edxapp:18000'
SITE_NAME = HOST
LMS_ROOT_URL = 'http://{}:18000'.format(HOST)
# This is the public-facing host used for previews
LMS_BASE = 'localhost:18000'
OAUTH_OIDC_ISSUER = '{}/oauth2'.format(LMS_ROOT_URL)
JWT_AUTH.update({
'JWT_SECRET_KEY': 'lms-secret',
'JWT_ISSUER': OAUTH_OIDC_ISSUER,
'JWT_AUDIENCE': 'lms-key',
})
|
Set LMS_BASE setting for Studio
|
Set LMS_BASE setting for Studio
This allows previews in LMS to work properly.
ECOM-6634
|
Python
|
agpl-3.0
|
jolyonb/edx-platform,ahmedaljazzar/edx-platform,proversity-org/edx-platform,raccoongang/edx-platform,edx/edx-platform,hastexo/edx-platform,fintech-circle/edx-platform,Stanford-Online/edx-platform,eduNEXT/edunext-platform,jolyonb/edx-platform,eduNEXT/edx-platform,ESOedX/edx-platform,a-parhom/edx-platform,prarthitm/edxplatform,gymnasium/edx-platform,appsembler/edx-platform,kmoocdev2/edx-platform,EDUlib/edx-platform,CredoReference/edx-platform,philanthropy-u/edx-platform,fintech-circle/edx-platform,miptliot/edx-platform,TeachAtTUM/edx-platform,cpennington/edx-platform,arbrandes/edx-platform,miptliot/edx-platform,romain-li/edx-platform,ESOedX/edx-platform,Lektorium-LLC/edx-platform,msegado/edx-platform,gsehub/edx-platform,TeachAtTUM/edx-platform,procangroup/edx-platform,gsehub/edx-platform,mitocw/edx-platform,edx-solutions/edx-platform,procangroup/edx-platform,mitocw/edx-platform,teltek/edx-platform,cpennington/edx-platform,CredoReference/edx-platform,msegado/edx-platform,angelapper/edx-platform,lduarte1991/edx-platform,proversity-org/edx-platform,gsehub/edx-platform,eduNEXT/edx-platform,arbrandes/edx-platform,angelapper/edx-platform,msegado/edx-platform,Edraak/edraak-platform,Edraak/edraak-platform,fintech-circle/edx-platform,BehavioralInsightsTeam/edx-platform,philanthropy-u/edx-platform,pepeportela/edx-platform,ahmedaljazzar/edx-platform,edx/edx-platform,msegado/edx-platform,gymnasium/edx-platform,ahmedaljazzar/edx-platform,ESOedX/edx-platform,stvstnfrd/edx-platform,msegado/edx-platform,Stanford-Online/edx-platform,teltek/edx-platform,Lektorium-LLC/edx-platform,romain-li/edx-platform,philanthropy-u/edx-platform,CredoReference/edx-platform,a-parhom/edx-platform,cpennington/edx-platform,appsembler/edx-platform,romain-li/edx-platform,edx-solutions/edx-platform,arbrandes/edx-platform,kmoocdev2/edx-platform,pepeportela/edx-platform,BehavioralInsightsTeam/edx-platform,hastexo/edx-platform,teltek/edx-platform,hastexo/edx-platform,gymnasium/edx-platform,prarthitm/edxplatform,appsembler/edx-platform,romain-li/edx-platform,appsembler/edx-platform,TeachAtTUM/edx-platform,proversity-org/edx-platform,pabloborrego93/edx-platform,eduNEXT/edx-platform,Lektorium-LLC/edx-platform,romain-li/edx-platform,Edraak/edraak-platform,edx/edx-platform,kmoocdev2/edx-platform,pabloborrego93/edx-platform,philanthropy-u/edx-platform,eduNEXT/edunext-platform,EDUlib/edx-platform,gsehub/edx-platform,arbrandes/edx-platform,pepeportela/edx-platform,stvstnfrd/edx-platform,procangroup/edx-platform,Stanford-Online/edx-platform,Lektorium-LLC/edx-platform,kmoocdev2/edx-platform,pepeportela/edx-platform,raccoongang/edx-platform,BehavioralInsightsTeam/edx-platform,prarthitm/edxplatform,edx-solutions/edx-platform,lduarte1991/edx-platform,angelapper/edx-platform,TeachAtTUM/edx-platform,stvstnfrd/edx-platform,lduarte1991/edx-platform,stvstnfrd/edx-platform,ESOedX/edx-platform,raccoongang/edx-platform,Stanford-Online/edx-platform,BehavioralInsightsTeam/edx-platform,a-parhom/edx-platform,hastexo/edx-platform,CredoReference/edx-platform,raccoongang/edx-platform,kmoocdev2/edx-platform,edx/edx-platform,proversity-org/edx-platform,miptliot/edx-platform,mitocw/edx-platform,angelapper/edx-platform,Edraak/edraak-platform,cpennington/edx-platform,teltek/edx-platform,eduNEXT/edunext-platform,fintech-circle/edx-platform,procangroup/edx-platform,edx-solutions/edx-platform,lduarte1991/edx-platform,miptliot/edx-platform,ahmedaljazzar/edx-platform,a-parhom/edx-platform,eduNEXT/edx-platform,jolyonb/edx-platform,eduNEXT/edunext-platform,EDUlib/edx-platform,jolyonb/edx-platform,mitocw/edx-platform,prarthitm/edxplatform,pabloborrego93/edx-platform,gymnasium/edx-platform,EDUlib/edx-platform,pabloborrego93/edx-platform
|
""" Overrides for Docker-based devstack. """
from .devstack import * # pylint: disable=wildcard-import, unused-wildcard-import
# Docker does not support the syslog socket at /dev/log. Rely on the console.
LOGGING['handlers']['local'] = LOGGING['handlers']['tracking'] = {
'class': 'logging.NullHandler',
}
LOGGING['loggers']['tracking']['handlers'] = ['console']
HOST = 'edx.devstack.edxapp:18000'
SITE_NAME = HOST
LMS_ROOT_URL = 'http://{}:18000'.format(HOST)
+ # This is the public-facing host used for previews
+ LMS_BASE = 'localhost:18000'
+
OAUTH_OIDC_ISSUER = '{}/oauth2'.format(LMS_ROOT_URL)
JWT_AUTH.update({
'JWT_SECRET_KEY': 'lms-secret',
'JWT_ISSUER': OAUTH_OIDC_ISSUER,
'JWT_AUDIENCE': 'lms-key',
})
|
Set LMS_BASE setting for Studio
|
## Code Before:
""" Overrides for Docker-based devstack. """
from .devstack import * # pylint: disable=wildcard-import, unused-wildcard-import
# Docker does not support the syslog socket at /dev/log. Rely on the console.
LOGGING['handlers']['local'] = LOGGING['handlers']['tracking'] = {
'class': 'logging.NullHandler',
}
LOGGING['loggers']['tracking']['handlers'] = ['console']
HOST = 'edx.devstack.edxapp:18000'
SITE_NAME = HOST
LMS_ROOT_URL = 'http://{}:18000'.format(HOST)
OAUTH_OIDC_ISSUER = '{}/oauth2'.format(LMS_ROOT_URL)
JWT_AUTH.update({
'JWT_SECRET_KEY': 'lms-secret',
'JWT_ISSUER': OAUTH_OIDC_ISSUER,
'JWT_AUDIENCE': 'lms-key',
})
## Instruction:
Set LMS_BASE setting for Studio
## Code After:
""" Overrides for Docker-based devstack. """
from .devstack import * # pylint: disable=wildcard-import, unused-wildcard-import
# Docker does not support the syslog socket at /dev/log. Rely on the console.
LOGGING['handlers']['local'] = LOGGING['handlers']['tracking'] = {
'class': 'logging.NullHandler',
}
LOGGING['loggers']['tracking']['handlers'] = ['console']
HOST = 'edx.devstack.edxapp:18000'
SITE_NAME = HOST
LMS_ROOT_URL = 'http://{}:18000'.format(HOST)
# This is the public-facing host used for previews
LMS_BASE = 'localhost:18000'
OAUTH_OIDC_ISSUER = '{}/oauth2'.format(LMS_ROOT_URL)
JWT_AUTH.update({
'JWT_SECRET_KEY': 'lms-secret',
'JWT_ISSUER': OAUTH_OIDC_ISSUER,
'JWT_AUDIENCE': 'lms-key',
})
|
...
# This is the public-facing host used for previews
LMS_BASE = 'localhost:18000'
OAUTH_OIDC_ISSUER = '{}/oauth2'.format(LMS_ROOT_URL)
...
|
1b75e25746305ec47a72874e854744c395cceec6
|
src/ocspdash/constants.py
|
src/ocspdash/constants.py
|
import os
import requests.utils
from . import __name__, __version__
OCSPDASH_API_VERSION = 'v0'
OCSPDASH_DIRECTORY = os.path.join(os.path.expanduser('~'), '.ocspdash')
if not os.path.exists(OCSPDASH_DIRECTORY):
os.makedirs(OCSPDASH_DIRECTORY)
OCSPDASH_DATABASE_PATH = os.path.join(OCSPDASH_DIRECTORY, 'ocspdash.db')
OCSPDASH_DATABASE_CONNECTION = 'sqlite:///' + OCSPDASH_DATABASE_PATH
CENSYS_RATE_LIMIT = 0.2 # max requests per second
OCSPDASH_USER_AGENT = ' '.join([requests.utils.default_user_agent(), f'{__name__}/{__version__}'])
|
import os
import requests.utils
from . import __name__, __version__
OCSPDASH_API_VERSION = 'v0'
OCSPDASH_DIRECTORY = os.environ.get('OCSPDASH_DIRECTORY', os.path.join(os.path.expanduser('~'), '.ocspdash'))
if not os.path.exists(OCSPDASH_DIRECTORY):
os.makedirs(OCSPDASH_DIRECTORY)
OCSPDASH_DATABASE_CONNECTION = 'sqlite:///' + os.path.join(OCSPDASH_DIRECTORY, 'ocspdash.db')
CENSYS_RATE_LIMIT = float(os.environ.get('OCSPDASH_RATE', 0.2)) # max requests per second
OCSPDASH_USER_AGENT = ' '.join([requests.utils.default_user_agent(), f'{__name__}/{__version__}'])
|
Allow config to be set from environment
|
Allow config to be set from environment
|
Python
|
mit
|
scolby33/OCSPdash,scolby33/OCSPdash,scolby33/OCSPdash
|
import os
import requests.utils
from . import __name__, __version__
OCSPDASH_API_VERSION = 'v0'
- OCSPDASH_DIRECTORY = os.path.join(os.path.expanduser('~'), '.ocspdash')
+ OCSPDASH_DIRECTORY = os.environ.get('OCSPDASH_DIRECTORY', os.path.join(os.path.expanduser('~'), '.ocspdash'))
if not os.path.exists(OCSPDASH_DIRECTORY):
os.makedirs(OCSPDASH_DIRECTORY)
- OCSPDASH_DATABASE_PATH = os.path.join(OCSPDASH_DIRECTORY, 'ocspdash.db')
+ OCSPDASH_DATABASE_CONNECTION = 'sqlite:///' + os.path.join(OCSPDASH_DIRECTORY, 'ocspdash.db')
- OCSPDASH_DATABASE_CONNECTION = 'sqlite:///' + OCSPDASH_DATABASE_PATH
- CENSYS_RATE_LIMIT = 0.2 # max requests per second
+ CENSYS_RATE_LIMIT = float(os.environ.get('OCSPDASH_RATE', 0.2)) # max requests per second
OCSPDASH_USER_AGENT = ' '.join([requests.utils.default_user_agent(), f'{__name__}/{__version__}'])
|
Allow config to be set from environment
|
## Code Before:
import os
import requests.utils
from . import __name__, __version__
OCSPDASH_API_VERSION = 'v0'
OCSPDASH_DIRECTORY = os.path.join(os.path.expanduser('~'), '.ocspdash')
if not os.path.exists(OCSPDASH_DIRECTORY):
os.makedirs(OCSPDASH_DIRECTORY)
OCSPDASH_DATABASE_PATH = os.path.join(OCSPDASH_DIRECTORY, 'ocspdash.db')
OCSPDASH_DATABASE_CONNECTION = 'sqlite:///' + OCSPDASH_DATABASE_PATH
CENSYS_RATE_LIMIT = 0.2 # max requests per second
OCSPDASH_USER_AGENT = ' '.join([requests.utils.default_user_agent(), f'{__name__}/{__version__}'])
## Instruction:
Allow config to be set from environment
## Code After:
import os
import requests.utils
from . import __name__, __version__
OCSPDASH_API_VERSION = 'v0'
OCSPDASH_DIRECTORY = os.environ.get('OCSPDASH_DIRECTORY', os.path.join(os.path.expanduser('~'), '.ocspdash'))
if not os.path.exists(OCSPDASH_DIRECTORY):
os.makedirs(OCSPDASH_DIRECTORY)
OCSPDASH_DATABASE_CONNECTION = 'sqlite:///' + os.path.join(OCSPDASH_DIRECTORY, 'ocspdash.db')
CENSYS_RATE_LIMIT = float(os.environ.get('OCSPDASH_RATE', 0.2)) # max requests per second
OCSPDASH_USER_AGENT = ' '.join([requests.utils.default_user_agent(), f'{__name__}/{__version__}'])
|
// ... existing code ...
OCSPDASH_DIRECTORY = os.environ.get('OCSPDASH_DIRECTORY', os.path.join(os.path.expanduser('~'), '.ocspdash'))
// ... modified code ...
OCSPDASH_DATABASE_CONNECTION = 'sqlite:///' + os.path.join(OCSPDASH_DIRECTORY, 'ocspdash.db')
CENSYS_RATE_LIMIT = float(os.environ.get('OCSPDASH_RATE', 0.2)) # max requests per second
// ... rest of the code ...
|
9d0e9af5844772c18ca24d4012642d4518b66dfc
|
tests/test_judicious.py
|
tests/test_judicious.py
|
"""Tests for `judicious` package."""
import pytest
import judicious
@pytest.fixture
def response():
"""Sample pytest fixture.
See more at: http://doc.pytest.org/en/latest/fixture.html
"""
# import requests
# return requests.get('https://github.com/audreyr/cookiecutter-pypackage')
def test_content(response):
"""Sample pytest test function with the pytest fixture as an argument."""
# from bs4 import BeautifulSoup
# assert 'GitHub' in BeautifulSoup(response.content).title.string
|
"""Tests for `judicious` package."""
import random
import pytest
import judicious
def test_seeding():
r1 = random.random()
r2 = random.random()
judicious.seed("70d911d5-6d93-3c42-f9a4-53e493a79bff")
r3 = random.random()
r4 = random.random()
judicious.seed("70d911d5-6d93-3c42-f9a4-53e493a79bff")
r5 = random.random()
r6 = random.random()
judicious.seed()
r7 = random.random()
r8 = random.random()
assert(r1 != r3)
assert(r2 != r4)
assert(r3 == r5)
assert(r4 == r6)
assert(r5 != r7)
assert(r6 != r8)
@pytest.fixture
def response():
"""Sample pytest fixture.
See more at: http://doc.pytest.org/en/latest/fixture.html
"""
# import requests
# return requests.get('https://github.com/audreyr/cookiecutter-pypackage')
def test_content(response):
"""Sample pytest test function with the pytest fixture as an argument."""
# from bs4 import BeautifulSoup
# assert 'GitHub' in BeautifulSoup(response.content).title.string
|
Add test of seeding PRNG
|
Add test of seeding PRNG
|
Python
|
mit
|
suchow/judicious,suchow/judicious,suchow/judicious
|
"""Tests for `judicious` package."""
+ import random
+
import pytest
+ import judicious
- import judicious
+
+ def test_seeding():
+ r1 = random.random()
+ r2 = random.random()
+ judicious.seed("70d911d5-6d93-3c42-f9a4-53e493a79bff")
+ r3 = random.random()
+ r4 = random.random()
+ judicious.seed("70d911d5-6d93-3c42-f9a4-53e493a79bff")
+ r5 = random.random()
+ r6 = random.random()
+ judicious.seed()
+ r7 = random.random()
+ r8 = random.random()
+
+ assert(r1 != r3)
+ assert(r2 != r4)
+ assert(r3 == r5)
+ assert(r4 == r6)
+ assert(r5 != r7)
+ assert(r6 != r8)
@pytest.fixture
def response():
"""Sample pytest fixture.
See more at: http://doc.pytest.org/en/latest/fixture.html
"""
# import requests
# return requests.get('https://github.com/audreyr/cookiecutter-pypackage')
def test_content(response):
"""Sample pytest test function with the pytest fixture as an argument."""
# from bs4 import BeautifulSoup
# assert 'GitHub' in BeautifulSoup(response.content).title.string
|
Add test of seeding PRNG
|
## Code Before:
"""Tests for `judicious` package."""
import pytest
import judicious
@pytest.fixture
def response():
"""Sample pytest fixture.
See more at: http://doc.pytest.org/en/latest/fixture.html
"""
# import requests
# return requests.get('https://github.com/audreyr/cookiecutter-pypackage')
def test_content(response):
"""Sample pytest test function with the pytest fixture as an argument."""
# from bs4 import BeautifulSoup
# assert 'GitHub' in BeautifulSoup(response.content).title.string
## Instruction:
Add test of seeding PRNG
## Code After:
"""Tests for `judicious` package."""
import random
import pytest
import judicious
def test_seeding():
r1 = random.random()
r2 = random.random()
judicious.seed("70d911d5-6d93-3c42-f9a4-53e493a79bff")
r3 = random.random()
r4 = random.random()
judicious.seed("70d911d5-6d93-3c42-f9a4-53e493a79bff")
r5 = random.random()
r6 = random.random()
judicious.seed()
r7 = random.random()
r8 = random.random()
assert(r1 != r3)
assert(r2 != r4)
assert(r3 == r5)
assert(r4 == r6)
assert(r5 != r7)
assert(r6 != r8)
@pytest.fixture
def response():
"""Sample pytest fixture.
See more at: http://doc.pytest.org/en/latest/fixture.html
"""
# import requests
# return requests.get('https://github.com/audreyr/cookiecutter-pypackage')
def test_content(response):
"""Sample pytest test function with the pytest fixture as an argument."""
# from bs4 import BeautifulSoup
# assert 'GitHub' in BeautifulSoup(response.content).title.string
|
...
import random
import pytest
...
import judicious
def test_seeding():
r1 = random.random()
r2 = random.random()
judicious.seed("70d911d5-6d93-3c42-f9a4-53e493a79bff")
r3 = random.random()
r4 = random.random()
judicious.seed("70d911d5-6d93-3c42-f9a4-53e493a79bff")
r5 = random.random()
r6 = random.random()
judicious.seed()
r7 = random.random()
r8 = random.random()
assert(r1 != r3)
assert(r2 != r4)
assert(r3 == r5)
assert(r4 == r6)
assert(r5 != r7)
assert(r6 != r8)
...
|
cf19d5a52237e6098dedc3c0bbfdaa3aedd180e0
|
loginza/models.py
|
loginza/models.py
|
from django.contrib.auth.models import User
from django.db import models
from django.utils import simplejson as json
from .signals import post_associate
class IdentityManager(models.Manager):
def from_loginza_data(self, loginza_data):
data = json.dumps(loginza_data)
identity, created = self.get_or_create(
identity = loginza_data['identity'],
provider = loginza_data['provider'],
defaults = {'data': data}
)
if not created:
identity.data = data
identity.save()
return identity
class Identity(models.Model):
identity = models.CharField(max_length=255)
provider = models.CharField(max_length=255)
user = models.ForeignKey(User, null=True)
data = models.TextField()
objects = IdentityManager()
class Meta:
unique_together = (('identity', 'provider'),)
def associate(self, user):
self.user = user
self.save()
post_associate.send(sender=type(self), instance=self)
def create_user(self, username, email, password=None):
existing_users = 0
while True:
existing_users += 1
try:
User.objects.get(username=username)
except User.DoesNotExist:
break
username = '%s_%d' % (username, existing_users)
user = User.objects.create_user(username, email, password)
self.associate(user)
return user
|
from django.contrib.auth.models import User
from django.db import models
from django.utils import simplejson as json
from .signals import post_associate
class IdentityManager(models.Manager):
def from_loginza_data(self, loginza_data):
data = json.dumps(loginza_data)
identity, created = self.get_or_create(
identity = loginza_data['identity'],
provider = loginza_data['provider'],
defaults = {'data': data}
)
if not created:
identity.data = data
identity.save()
return identity
class Identity(models.Model):
identity = models.CharField(max_length=255)
provider = models.CharField(max_length=255)
user = models.ForeignKey(User, null=True)
data = models.TextField()
objects = IdentityManager()
class Meta:
unique_together = (('identity', 'provider'),)
def associate(self, user):
self.user = user
self.save()
post_associate.send(sender=type(self), instance=self)
def create_user(self, username, email, password=None):
existing_users = 0
new_username = None
while True:
existing_users += 1
qs = User.objects.all()
qs = qs.filter(username=new_username or username)
if not qs.exists():
break
new_username = '%s_%d' % (username, existing_users)
user = User.objects.create_user(new_username or username, email, password)
self.associate(user)
return user
|
Fix user creation with unique username
|
Fix user creation with unique username
|
Python
|
isc
|
xobb1t/django-loginza-auth
|
from django.contrib.auth.models import User
from django.db import models
from django.utils import simplejson as json
from .signals import post_associate
class IdentityManager(models.Manager):
def from_loginza_data(self, loginza_data):
data = json.dumps(loginza_data)
identity, created = self.get_or_create(
identity = loginza_data['identity'],
provider = loginza_data['provider'],
defaults = {'data': data}
)
if not created:
identity.data = data
identity.save()
return identity
class Identity(models.Model):
identity = models.CharField(max_length=255)
provider = models.CharField(max_length=255)
user = models.ForeignKey(User, null=True)
data = models.TextField()
objects = IdentityManager()
class Meta:
unique_together = (('identity', 'provider'),)
def associate(self, user):
self.user = user
self.save()
post_associate.send(sender=type(self), instance=self)
def create_user(self, username, email, password=None):
existing_users = 0
+ new_username = None
while True:
existing_users += 1
- try:
- User.objects.get(username=username)
- except User.DoesNotExist:
+ qs = User.objects.all()
+ qs = qs.filter(username=new_username or username)
+ if not qs.exists():
break
- username = '%s_%d' % (username, existing_users)
+ new_username = '%s_%d' % (username, existing_users)
- user = User.objects.create_user(username, email, password)
+ user = User.objects.create_user(new_username or username, email, password)
self.associate(user)
return user
|
Fix user creation with unique username
|
## Code Before:
from django.contrib.auth.models import User
from django.db import models
from django.utils import simplejson as json
from .signals import post_associate
class IdentityManager(models.Manager):
def from_loginza_data(self, loginza_data):
data = json.dumps(loginza_data)
identity, created = self.get_or_create(
identity = loginza_data['identity'],
provider = loginza_data['provider'],
defaults = {'data': data}
)
if not created:
identity.data = data
identity.save()
return identity
class Identity(models.Model):
identity = models.CharField(max_length=255)
provider = models.CharField(max_length=255)
user = models.ForeignKey(User, null=True)
data = models.TextField()
objects = IdentityManager()
class Meta:
unique_together = (('identity', 'provider'),)
def associate(self, user):
self.user = user
self.save()
post_associate.send(sender=type(self), instance=self)
def create_user(self, username, email, password=None):
existing_users = 0
while True:
existing_users += 1
try:
User.objects.get(username=username)
except User.DoesNotExist:
break
username = '%s_%d' % (username, existing_users)
user = User.objects.create_user(username, email, password)
self.associate(user)
return user
## Instruction:
Fix user creation with unique username
## Code After:
from django.contrib.auth.models import User
from django.db import models
from django.utils import simplejson as json
from .signals import post_associate
class IdentityManager(models.Manager):
def from_loginza_data(self, loginza_data):
data = json.dumps(loginza_data)
identity, created = self.get_or_create(
identity = loginza_data['identity'],
provider = loginza_data['provider'],
defaults = {'data': data}
)
if not created:
identity.data = data
identity.save()
return identity
class Identity(models.Model):
identity = models.CharField(max_length=255)
provider = models.CharField(max_length=255)
user = models.ForeignKey(User, null=True)
data = models.TextField()
objects = IdentityManager()
class Meta:
unique_together = (('identity', 'provider'),)
def associate(self, user):
self.user = user
self.save()
post_associate.send(sender=type(self), instance=self)
def create_user(self, username, email, password=None):
existing_users = 0
new_username = None
while True:
existing_users += 1
qs = User.objects.all()
qs = qs.filter(username=new_username or username)
if not qs.exists():
break
new_username = '%s_%d' % (username, existing_users)
user = User.objects.create_user(new_username or username, email, password)
self.associate(user)
return user
|
# ... existing code ...
existing_users = 0
new_username = None
while True:
# ... modified code ...
existing_users += 1
qs = User.objects.all()
qs = qs.filter(username=new_username or username)
if not qs.exists():
break
new_username = '%s_%d' % (username, existing_users)
user = User.objects.create_user(new_username or username, email, password)
self.associate(user)
# ... rest of the code ...
|
ead9192b4c2acb21df917dfe116785343e9a59a6
|
scripts/patches/transfer.py
|
scripts/patches/transfer.py
|
patches = [
{
"op": "move",
"from": "/ResourceTypes/AWS::Transfer::Server/Properties/Protocols/ItemType",
"path": "/ResourceTypes/AWS::Transfer::Server/Properties/Protocols/PrimitiveItemType",
},
{
"op": "replace",
"path": "/ResourceTypes/AWS::Transfer::Server/Properties/Protocols/PrimitiveItemType",
"value": "String",
},
{
"op": "move",
"from": "/ResourceTypes/AWS::Transfer::User/Properties/SshPublicKeys/ItemType",
"path": "/ResourceTypes/AWS::Transfer::User/Properties/SshPublicKeys/PrimitiveItemType",
},
{
"op": "replace",
"path": "/ResourceTypes/AWS::Transfer::User/Properties/SshPublicKeys/PrimitiveItemType",
"value": "String",
},
]
|
patches = [
{
"op": "move",
"from": "/ResourceTypes/AWS::Transfer::Server/Properties/Protocols/ItemType",
"path": "/ResourceTypes/AWS::Transfer::Server/Properties/Protocols/PrimitiveItemType",
},
{
"op": "replace",
"path": "/ResourceTypes/AWS::Transfer::Server/Properties/Protocols/PrimitiveItemType",
"value": "String",
},
{
"op": "move",
"from": "/ResourceTypes/AWS::Transfer::User/Properties/SshPublicKeys/ItemType",
"path": "/ResourceTypes/AWS::Transfer::User/Properties/SshPublicKeys/PrimitiveItemType",
},
{
"op": "replace",
"path": "/ResourceTypes/AWS::Transfer::User/Properties/SshPublicKeys/PrimitiveItemType",
"value": "String",
},
{
"op": "move",
"from": "/PropertyTypes/AWS::Transfer::Server.ProtocolDetails/Properties/As2Transports/ItemType",
"path": "/PropertyTypes/AWS::Transfer::Server.ProtocolDetails/Properties/As2Transports/PrimitiveItemType",
},
{
"op": "replace",
"path": "/PropertyTypes/AWS::Transfer::Server.ProtocolDetails/Properties/As2Transports/PrimitiveItemType",
"value": "String",
},
]
|
Fix spec issue with Transfer::Server ProtocolDetails
|
Fix spec issue with Transfer::Server ProtocolDetails
|
Python
|
bsd-2-clause
|
cloudtools/troposphere,cloudtools/troposphere
|
patches = [
{
"op": "move",
"from": "/ResourceTypes/AWS::Transfer::Server/Properties/Protocols/ItemType",
"path": "/ResourceTypes/AWS::Transfer::Server/Properties/Protocols/PrimitiveItemType",
},
{
"op": "replace",
"path": "/ResourceTypes/AWS::Transfer::Server/Properties/Protocols/PrimitiveItemType",
"value": "String",
},
{
"op": "move",
"from": "/ResourceTypes/AWS::Transfer::User/Properties/SshPublicKeys/ItemType",
"path": "/ResourceTypes/AWS::Transfer::User/Properties/SshPublicKeys/PrimitiveItemType",
},
{
"op": "replace",
"path": "/ResourceTypes/AWS::Transfer::User/Properties/SshPublicKeys/PrimitiveItemType",
"value": "String",
},
+ {
+ "op": "move",
+ "from": "/PropertyTypes/AWS::Transfer::Server.ProtocolDetails/Properties/As2Transports/ItemType",
+ "path": "/PropertyTypes/AWS::Transfer::Server.ProtocolDetails/Properties/As2Transports/PrimitiveItemType",
+ },
+ {
+ "op": "replace",
+ "path": "/PropertyTypes/AWS::Transfer::Server.ProtocolDetails/Properties/As2Transports/PrimitiveItemType",
+ "value": "String",
+ },
]
|
Fix spec issue with Transfer::Server ProtocolDetails
|
## Code Before:
patches = [
{
"op": "move",
"from": "/ResourceTypes/AWS::Transfer::Server/Properties/Protocols/ItemType",
"path": "/ResourceTypes/AWS::Transfer::Server/Properties/Protocols/PrimitiveItemType",
},
{
"op": "replace",
"path": "/ResourceTypes/AWS::Transfer::Server/Properties/Protocols/PrimitiveItemType",
"value": "String",
},
{
"op": "move",
"from": "/ResourceTypes/AWS::Transfer::User/Properties/SshPublicKeys/ItemType",
"path": "/ResourceTypes/AWS::Transfer::User/Properties/SshPublicKeys/PrimitiveItemType",
},
{
"op": "replace",
"path": "/ResourceTypes/AWS::Transfer::User/Properties/SshPublicKeys/PrimitiveItemType",
"value": "String",
},
]
## Instruction:
Fix spec issue with Transfer::Server ProtocolDetails
## Code After:
patches = [
{
"op": "move",
"from": "/ResourceTypes/AWS::Transfer::Server/Properties/Protocols/ItemType",
"path": "/ResourceTypes/AWS::Transfer::Server/Properties/Protocols/PrimitiveItemType",
},
{
"op": "replace",
"path": "/ResourceTypes/AWS::Transfer::Server/Properties/Protocols/PrimitiveItemType",
"value": "String",
},
{
"op": "move",
"from": "/ResourceTypes/AWS::Transfer::User/Properties/SshPublicKeys/ItemType",
"path": "/ResourceTypes/AWS::Transfer::User/Properties/SshPublicKeys/PrimitiveItemType",
},
{
"op": "replace",
"path": "/ResourceTypes/AWS::Transfer::User/Properties/SshPublicKeys/PrimitiveItemType",
"value": "String",
},
{
"op": "move",
"from": "/PropertyTypes/AWS::Transfer::Server.ProtocolDetails/Properties/As2Transports/ItemType",
"path": "/PropertyTypes/AWS::Transfer::Server.ProtocolDetails/Properties/As2Transports/PrimitiveItemType",
},
{
"op": "replace",
"path": "/PropertyTypes/AWS::Transfer::Server.ProtocolDetails/Properties/As2Transports/PrimitiveItemType",
"value": "String",
},
]
|
# ... existing code ...
},
{
"op": "move",
"from": "/PropertyTypes/AWS::Transfer::Server.ProtocolDetails/Properties/As2Transports/ItemType",
"path": "/PropertyTypes/AWS::Transfer::Server.ProtocolDetails/Properties/As2Transports/PrimitiveItemType",
},
{
"op": "replace",
"path": "/PropertyTypes/AWS::Transfer::Server.ProtocolDetails/Properties/As2Transports/PrimitiveItemType",
"value": "String",
},
]
# ... rest of the code ...
|
af7122220447b1abe771f37400daeb4370603dd4
|
collection_pipelines/core.py
|
collection_pipelines/core.py
|
import functools
def coroutine(fn):
def wrapper(*args, **kwargs):
generator = fn(*args, **kwargs)
next(generator)
return generator
return wrapper
class CollectionPipelineProcessor:
sink = None
start_source = None
receiver = None
def process(self, item):
raise NotImplementedError
def on_done(self):
if self.receiver:
self.receiver.close()
def source(self, start_source):
self.start_source = start_source
@coroutine
def make_generator(self):
while True:
try:
item = yield
self.process(item)
except GeneratorExit:
self.on_done()
break
def __or__(self, other):
self.sink = other
def exec():
self.receiver = self.sink.make_generator()
self.start_source()
other.source(exec)
return other
|
import functools
def coroutine(fn):
def wrapper(*args, **kwargs):
generator = fn(*args, **kwargs)
next(generator)
return generator
return wrapper
class CollectionPipelineProcessor:
sink = None
start_source = None
receiver = None
def process(self, item):
raise NotImplementedError
def on_done(self):
if self.receiver:
self.receiver.close()
def source(self, start_source):
self.start_source = start_source
@coroutine
def make_generator(self):
while True:
try:
item = yield
self.process(item)
except GeneratorExit:
self.on_done()
break
def __or__(self, other):
self.sink = other
def exec():
self.receiver = self.sink.make_generator()
self.start_source()
other.source(exec)
return other
class CollectionPipelineOutput(CollectionPipelineProcessor):
"""Pipeline processor that ends the chain and starts outputing stream.
Output processor immediately starts consuming from the source.
Thus triggering the whole pipeline start.
"""
def source(self, start_source):
start_source()
|
Add base class for output pipeline processors
|
Add base class for output pipeline processors
|
Python
|
mit
|
povilasb/pycollection-pipelines
|
import functools
def coroutine(fn):
def wrapper(*args, **kwargs):
generator = fn(*args, **kwargs)
next(generator)
return generator
return wrapper
class CollectionPipelineProcessor:
sink = None
start_source = None
receiver = None
def process(self, item):
raise NotImplementedError
def on_done(self):
if self.receiver:
self.receiver.close()
def source(self, start_source):
self.start_source = start_source
@coroutine
def make_generator(self):
while True:
try:
item = yield
self.process(item)
except GeneratorExit:
self.on_done()
break
def __or__(self, other):
self.sink = other
def exec():
self.receiver = self.sink.make_generator()
self.start_source()
other.source(exec)
return other
+
+ class CollectionPipelineOutput(CollectionPipelineProcessor):
+ """Pipeline processor that ends the chain and starts outputing stream.
+
+ Output processor immediately starts consuming from the source.
+ Thus triggering the whole pipeline start.
+ """
+ def source(self, start_source):
+ start_source()
+
|
Add base class for output pipeline processors
|
## Code Before:
import functools
def coroutine(fn):
def wrapper(*args, **kwargs):
generator = fn(*args, **kwargs)
next(generator)
return generator
return wrapper
class CollectionPipelineProcessor:
sink = None
start_source = None
receiver = None
def process(self, item):
raise NotImplementedError
def on_done(self):
if self.receiver:
self.receiver.close()
def source(self, start_source):
self.start_source = start_source
@coroutine
def make_generator(self):
while True:
try:
item = yield
self.process(item)
except GeneratorExit:
self.on_done()
break
def __or__(self, other):
self.sink = other
def exec():
self.receiver = self.sink.make_generator()
self.start_source()
other.source(exec)
return other
## Instruction:
Add base class for output pipeline processors
## Code After:
import functools
def coroutine(fn):
def wrapper(*args, **kwargs):
generator = fn(*args, **kwargs)
next(generator)
return generator
return wrapper
class CollectionPipelineProcessor:
sink = None
start_source = None
receiver = None
def process(self, item):
raise NotImplementedError
def on_done(self):
if self.receiver:
self.receiver.close()
def source(self, start_source):
self.start_source = start_source
@coroutine
def make_generator(self):
while True:
try:
item = yield
self.process(item)
except GeneratorExit:
self.on_done()
break
def __or__(self, other):
self.sink = other
def exec():
self.receiver = self.sink.make_generator()
self.start_source()
other.source(exec)
return other
class CollectionPipelineOutput(CollectionPipelineProcessor):
"""Pipeline processor that ends the chain and starts outputing stream.
Output processor immediately starts consuming from the source.
Thus triggering the whole pipeline start.
"""
def source(self, start_source):
start_source()
|
...
return other
class CollectionPipelineOutput(CollectionPipelineProcessor):
"""Pipeline processor that ends the chain and starts outputing stream.
Output processor immediately starts consuming from the source.
Thus triggering the whole pipeline start.
"""
def source(self, start_source):
start_source()
...
|
9581334db472c8ad8dbff0766ec74ed6dfa20d6f
|
tests/test_api_request.py
|
tests/test_api_request.py
|
from binance.client import Client
from binance.exceptions import BinanceAPIException, BinanceRequestException
import pytest
import requests_mock
client = Client('api_key', 'api_secret')
def test_invalid_json():
"""Test Invalid response Exception"""
with pytest.raises(BinanceRequestException):
with requests_mock.mock() as m:
m.get('https://www.binance.com/exchange/public/product', text='<head></html>')
client.get_products()
def test_api_exception():
"""Test API response Exception"""
with pytest.raises(BinanceAPIException):
with requests_mock.mock() as m:
json_obj = {"code": 1002, "msg": "Invalid API call"}
m.get('https://www.binance.com/api/v1/time', json=json_obj, status_code=400)
client.get_server_time()
|
from binance.client import Client
from binance.exceptions import BinanceAPIException, BinanceRequestException, BinanceWithdrawException
import pytest
import requests_mock
client = Client('api_key', 'api_secret')
def test_invalid_json():
"""Test Invalid response Exception"""
with pytest.raises(BinanceRequestException):
with requests_mock.mock() as m:
m.get('https://www.binance.com/exchange/public/product', text='<head></html>')
client.get_products()
def test_api_exception():
"""Test API response Exception"""
with pytest.raises(BinanceAPIException):
with requests_mock.mock() as m:
json_obj = {"code": 1002, "msg": "Invalid API call"}
m.get('https://www.binance.com/api/v1/time', json=json_obj, status_code=400)
client.get_server_time()
def test_withdraw_api_exception():
"""Test Withdraw API response Exception"""
with pytest.raises(BinanceWithdrawException):
with requests_mock.mock() as m:
json_obj = {"success": False, "msg": "Insufficient funds"}
m.register_uri('POST', requests_mock.ANY, json=json_obj, status_code=200)
client.withdraw(asset='BTC', address='BTCADDRESS', amount=100)
|
Add test for withdraw exception response
|
Add test for withdraw exception response
|
Python
|
mit
|
sammchardy/python-binance
|
from binance.client import Client
- from binance.exceptions import BinanceAPIException, BinanceRequestException
+ from binance.exceptions import BinanceAPIException, BinanceRequestException, BinanceWithdrawException
import pytest
import requests_mock
client = Client('api_key', 'api_secret')
def test_invalid_json():
"""Test Invalid response Exception"""
with pytest.raises(BinanceRequestException):
with requests_mock.mock() as m:
m.get('https://www.binance.com/exchange/public/product', text='<head></html>')
client.get_products()
def test_api_exception():
"""Test API response Exception"""
with pytest.raises(BinanceAPIException):
with requests_mock.mock() as m:
json_obj = {"code": 1002, "msg": "Invalid API call"}
m.get('https://www.binance.com/api/v1/time', json=json_obj, status_code=400)
client.get_server_time()
+
+ def test_withdraw_api_exception():
+ """Test Withdraw API response Exception"""
+
+ with pytest.raises(BinanceWithdrawException):
+
+ with requests_mock.mock() as m:
+ json_obj = {"success": False, "msg": "Insufficient funds"}
+ m.register_uri('POST', requests_mock.ANY, json=json_obj, status_code=200)
+ client.withdraw(asset='BTC', address='BTCADDRESS', amount=100)
+
|
Add test for withdraw exception response
|
## Code Before:
from binance.client import Client
from binance.exceptions import BinanceAPIException, BinanceRequestException
import pytest
import requests_mock
client = Client('api_key', 'api_secret')
def test_invalid_json():
"""Test Invalid response Exception"""
with pytest.raises(BinanceRequestException):
with requests_mock.mock() as m:
m.get('https://www.binance.com/exchange/public/product', text='<head></html>')
client.get_products()
def test_api_exception():
"""Test API response Exception"""
with pytest.raises(BinanceAPIException):
with requests_mock.mock() as m:
json_obj = {"code": 1002, "msg": "Invalid API call"}
m.get('https://www.binance.com/api/v1/time', json=json_obj, status_code=400)
client.get_server_time()
## Instruction:
Add test for withdraw exception response
## Code After:
from binance.client import Client
from binance.exceptions import BinanceAPIException, BinanceRequestException, BinanceWithdrawException
import pytest
import requests_mock
client = Client('api_key', 'api_secret')
def test_invalid_json():
"""Test Invalid response Exception"""
with pytest.raises(BinanceRequestException):
with requests_mock.mock() as m:
m.get('https://www.binance.com/exchange/public/product', text='<head></html>')
client.get_products()
def test_api_exception():
"""Test API response Exception"""
with pytest.raises(BinanceAPIException):
with requests_mock.mock() as m:
json_obj = {"code": 1002, "msg": "Invalid API call"}
m.get('https://www.binance.com/api/v1/time', json=json_obj, status_code=400)
client.get_server_time()
def test_withdraw_api_exception():
"""Test Withdraw API response Exception"""
with pytest.raises(BinanceWithdrawException):
with requests_mock.mock() as m:
json_obj = {"success": False, "msg": "Insufficient funds"}
m.register_uri('POST', requests_mock.ANY, json=json_obj, status_code=200)
client.withdraw(asset='BTC', address='BTCADDRESS', amount=100)
|
// ... existing code ...
from binance.client import Client
from binance.exceptions import BinanceAPIException, BinanceRequestException, BinanceWithdrawException
import pytest
// ... modified code ...
client.get_server_time()
def test_withdraw_api_exception():
"""Test Withdraw API response Exception"""
with pytest.raises(BinanceWithdrawException):
with requests_mock.mock() as m:
json_obj = {"success": False, "msg": "Insufficient funds"}
m.register_uri('POST', requests_mock.ANY, json=json_obj, status_code=200)
client.withdraw(asset='BTC', address='BTCADDRESS', amount=100)
// ... rest of the code ...
|
e58efc792984b7ba366ebea745caa70e6660a41b
|
scrapyard/yts.py
|
scrapyard/yts.py
|
import cache
import network
import scraper
YTS_URL = 'http://yts.re'
################################################################################
def movie(movie_info):
magnet_infos = []
json_data = network.json_get_cached_optional(YTS_URL + '/api/listimdb.json', expiration=cache.HOUR, params={ 'imdb_id': movie_info['imdb_id'] })
if 'MovieList' in json_data:
for json_item in json_data['MovieList']:
title = '{0} ({1}) {2} - YIFY'.format(json_item['MovieTitleClean'], json_item['MovieYear'], json_item['Quality'])
magnet_infos.append(scraper.Magnet(json_item['TorrentMagnetUrl'], title, int(json_item['TorrentSeeds']), int(json_item['TorrentPeers'])))
return magnet_infos
|
import cache
import network
import scraper
import urllib
YTS_URL = 'http://yts.re'
################################################################################
def movie(movie_info):
magnet_infos = []
json_data = network.json_get_cached_optional(YTS_URL + '/api/v2/list_movies.json', expiration=cache.HOUR, params={ 'query_term': movie_info['imdb_id'] })
if 'data' in json_data:
if 'movies' in json_data['data']:
for movie_item in json_data['data']['movies']:
if 'imdb_code' in movie_item and movie_item['imdb_code'] == movie_info['imdb_id'] and 'torrents' in movie_item:
for torrent_item in movie_item['torrents']:
magnet_title = '{0} ({1}) {2} - YIFY'.format(movie_item['title'], movie_item['year'], torrent_item['quality'])
magnet_url = 'magnet:?xt=urn:btih:{0}&dn={1}&tr=http://exodus.desync.com:6969/announce&tr=udp://tracker.openbittorrent.com:80/announce&tr=udp://open.demonii.com:1337/announce&tr=udp://exodus.desync.com:6969/announce&tr=udp://tracker.yify-torrents.com/announce'.format(torrent_item['hash'], urllib.quote(magnet_title))
magnet_infos.append(scraper.Magnet(magnet_url, None, torrent_item['seeds'], torrent_item['peers']))
return magnet_infos
|
Upgrade YTS to API v2
|
Upgrade YTS to API v2
|
Python
|
mit
|
sharkone/scrapyard
|
import cache
import network
import scraper
+ import urllib
YTS_URL = 'http://yts.re'
################################################################################
def movie(movie_info):
magnet_infos = []
- json_data = network.json_get_cached_optional(YTS_URL + '/api/listimdb.json', expiration=cache.HOUR, params={ 'imdb_id': movie_info['imdb_id'] })
+ json_data = network.json_get_cached_optional(YTS_URL + '/api/v2/list_movies.json', expiration=cache.HOUR, params={ 'query_term': movie_info['imdb_id'] })
- if 'MovieList' in json_data:
+ if 'data' in json_data:
+ if 'movies' in json_data['data']:
- for json_item in json_data['MovieList']:
+ for movie_item in json_data['data']['movies']:
- title = '{0} ({1}) {2} - YIFY'.format(json_item['MovieTitleClean'], json_item['MovieYear'], json_item['Quality'])
- magnet_infos.append(scraper.Magnet(json_item['TorrentMagnetUrl'], title, int(json_item['TorrentSeeds']), int(json_item['TorrentPeers'])))
+ if 'imdb_code' in movie_item and movie_item['imdb_code'] == movie_info['imdb_id'] and 'torrents' in movie_item:
+ for torrent_item in movie_item['torrents']:
+ magnet_title = '{0} ({1}) {2} - YIFY'.format(movie_item['title'], movie_item['year'], torrent_item['quality'])
+ magnet_url = 'magnet:?xt=urn:btih:{0}&dn={1}&tr=http://exodus.desync.com:6969/announce&tr=udp://tracker.openbittorrent.com:80/announce&tr=udp://open.demonii.com:1337/announce&tr=udp://exodus.desync.com:6969/announce&tr=udp://tracker.yify-torrents.com/announce'.format(torrent_item['hash'], urllib.quote(magnet_title))
+ magnet_infos.append(scraper.Magnet(magnet_url, None, torrent_item['seeds'], torrent_item['peers']))
return magnet_infos
|
Upgrade YTS to API v2
|
## Code Before:
import cache
import network
import scraper
YTS_URL = 'http://yts.re'
################################################################################
def movie(movie_info):
magnet_infos = []
json_data = network.json_get_cached_optional(YTS_URL + '/api/listimdb.json', expiration=cache.HOUR, params={ 'imdb_id': movie_info['imdb_id'] })
if 'MovieList' in json_data:
for json_item in json_data['MovieList']:
title = '{0} ({1}) {2} - YIFY'.format(json_item['MovieTitleClean'], json_item['MovieYear'], json_item['Quality'])
magnet_infos.append(scraper.Magnet(json_item['TorrentMagnetUrl'], title, int(json_item['TorrentSeeds']), int(json_item['TorrentPeers'])))
return magnet_infos
## Instruction:
Upgrade YTS to API v2
## Code After:
import cache
import network
import scraper
import urllib
YTS_URL = 'http://yts.re'
################################################################################
def movie(movie_info):
magnet_infos = []
json_data = network.json_get_cached_optional(YTS_URL + '/api/v2/list_movies.json', expiration=cache.HOUR, params={ 'query_term': movie_info['imdb_id'] })
if 'data' in json_data:
if 'movies' in json_data['data']:
for movie_item in json_data['data']['movies']:
if 'imdb_code' in movie_item and movie_item['imdb_code'] == movie_info['imdb_id'] and 'torrents' in movie_item:
for torrent_item in movie_item['torrents']:
magnet_title = '{0} ({1}) {2} - YIFY'.format(movie_item['title'], movie_item['year'], torrent_item['quality'])
magnet_url = 'magnet:?xt=urn:btih:{0}&dn={1}&tr=http://exodus.desync.com:6969/announce&tr=udp://tracker.openbittorrent.com:80/announce&tr=udp://open.demonii.com:1337/announce&tr=udp://exodus.desync.com:6969/announce&tr=udp://tracker.yify-torrents.com/announce'.format(torrent_item['hash'], urllib.quote(magnet_title))
magnet_infos.append(scraper.Magnet(magnet_url, None, torrent_item['seeds'], torrent_item['peers']))
return magnet_infos
|
...
import scraper
import urllib
...
json_data = network.json_get_cached_optional(YTS_URL + '/api/v2/list_movies.json', expiration=cache.HOUR, params={ 'query_term': movie_info['imdb_id'] })
if 'data' in json_data:
if 'movies' in json_data['data']:
for movie_item in json_data['data']['movies']:
if 'imdb_code' in movie_item and movie_item['imdb_code'] == movie_info['imdb_id'] and 'torrents' in movie_item:
for torrent_item in movie_item['torrents']:
magnet_title = '{0} ({1}) {2} - YIFY'.format(movie_item['title'], movie_item['year'], torrent_item['quality'])
magnet_url = 'magnet:?xt=urn:btih:{0}&dn={1}&tr=http://exodus.desync.com:6969/announce&tr=udp://tracker.openbittorrent.com:80/announce&tr=udp://open.demonii.com:1337/announce&tr=udp://exodus.desync.com:6969/announce&tr=udp://tracker.yify-torrents.com/announce'.format(torrent_item['hash'], urllib.quote(magnet_title))
magnet_infos.append(scraper.Magnet(magnet_url, None, torrent_item['seeds'], torrent_item['peers']))
...
|
aa370f5eb39b587d71e511cb618951875896e75a
|
ckanext/ckanext-apicatalog_scheming/ckanext/apicatalog_scheming/tests/test_plugin.py
|
ckanext/ckanext-apicatalog_scheming/ckanext/apicatalog_scheming/tests/test_plugin.py
|
import pytest
from ckan.tests.factories import User, Dataset, Organization
from ckan.plugins.toolkit import get_action
@pytest.mark.usefixtures('with_plugins', 'clean_db', 'clean_index')
def test_allowed_organization_user_should_see_subsystem():
organization1 = Organization()
user2 = User()
org2_users = [{"name": user2["name"], "capacity": "admin"}]
organization2 = Organization(users=org2_users)
dataset = Dataset(private=True, owner_org=organization1['id'], allowed_organizations=organization2['name'])
results = get_action(u"package_search")(
{u"user": user2["name"]}, {u"include_private": True}
)["results"]
names = [r["name"] for r in results]
assert names == [dataset['name']]
|
import pytest
from ckan.tests.factories import User, Dataset, Organization
from ckan.plugins.toolkit import get_action
@pytest.mark.usefixtures('with_plugins', 'clean_db', 'clean_index')
def test_allowed_organization_user_should_see_subsystem():
organization1 = Organization()
user2 = User()
org2_users = [{"name": user2["name"], "capacity": "admin"}]
organization2 = Organization(users=org2_users)
dataset = Dataset(private=True, owner_org=organization1['id'], allowed_organizations=organization2['name'])
results = get_action(u"package_search")(
{u"user": user2["name"]}, {u"include_private": True}
)["results"]
names = [r["name"] for r in results]
assert names == [dataset['name']]
@pytest.mark.usefixtures('with_plugins', 'clean_db', 'clean_index')
def test_not_allowed_organization_user_should_not_see_subsystem():
organization1 = Organization()
user2 = User()
org2_users = [{"name": user2["name"], "capacity": "admin"}]
Organization(users=org2_users)
Dataset(private=True, owner_org=organization1['id'], allowed_organizations="")
results = get_action(u"package_search")(
{u"user": user2["name"]}, {u"include_private": True}
)["results"]
names = [r["name"] for r in results]
assert names == []
|
Add not allowed organization test
|
LIKA-410: Add not allowed organization test
|
Python
|
mit
|
vrk-kpa/api-catalog,vrk-kpa/api-catalog,vrk-kpa/api-catalog,vrk-kpa/api-catalog
|
import pytest
from ckan.tests.factories import User, Dataset, Organization
from ckan.plugins.toolkit import get_action
@pytest.mark.usefixtures('with_plugins', 'clean_db', 'clean_index')
def test_allowed_organization_user_should_see_subsystem():
organization1 = Organization()
user2 = User()
org2_users = [{"name": user2["name"], "capacity": "admin"}]
organization2 = Organization(users=org2_users)
dataset = Dataset(private=True, owner_org=organization1['id'], allowed_organizations=organization2['name'])
results = get_action(u"package_search")(
{u"user": user2["name"]}, {u"include_private": True}
)["results"]
names = [r["name"] for r in results]
assert names == [dataset['name']]
+
+ @pytest.mark.usefixtures('with_plugins', 'clean_db', 'clean_index')
+ def test_not_allowed_organization_user_should_not_see_subsystem():
+ organization1 = Organization()
+
+ user2 = User()
+ org2_users = [{"name": user2["name"], "capacity": "admin"}]
+
+ Organization(users=org2_users)
+
+ Dataset(private=True, owner_org=organization1['id'], allowed_organizations="")
+
+ results = get_action(u"package_search")(
+ {u"user": user2["name"]}, {u"include_private": True}
+ )["results"]
+
+ names = [r["name"] for r in results]
+ assert names == []
+
|
Add not allowed organization test
|
## Code Before:
import pytest
from ckan.tests.factories import User, Dataset, Organization
from ckan.plugins.toolkit import get_action
@pytest.mark.usefixtures('with_plugins', 'clean_db', 'clean_index')
def test_allowed_organization_user_should_see_subsystem():
organization1 = Organization()
user2 = User()
org2_users = [{"name": user2["name"], "capacity": "admin"}]
organization2 = Organization(users=org2_users)
dataset = Dataset(private=True, owner_org=organization1['id'], allowed_organizations=organization2['name'])
results = get_action(u"package_search")(
{u"user": user2["name"]}, {u"include_private": True}
)["results"]
names = [r["name"] for r in results]
assert names == [dataset['name']]
## Instruction:
Add not allowed organization test
## Code After:
import pytest
from ckan.tests.factories import User, Dataset, Organization
from ckan.plugins.toolkit import get_action
@pytest.mark.usefixtures('with_plugins', 'clean_db', 'clean_index')
def test_allowed_organization_user_should_see_subsystem():
organization1 = Organization()
user2 = User()
org2_users = [{"name": user2["name"], "capacity": "admin"}]
organization2 = Organization(users=org2_users)
dataset = Dataset(private=True, owner_org=organization1['id'], allowed_organizations=organization2['name'])
results = get_action(u"package_search")(
{u"user": user2["name"]}, {u"include_private": True}
)["results"]
names = [r["name"] for r in results]
assert names == [dataset['name']]
@pytest.mark.usefixtures('with_plugins', 'clean_db', 'clean_index')
def test_not_allowed_organization_user_should_not_see_subsystem():
organization1 = Organization()
user2 = User()
org2_users = [{"name": user2["name"], "capacity": "admin"}]
Organization(users=org2_users)
Dataset(private=True, owner_org=organization1['id'], allowed_organizations="")
results = get_action(u"package_search")(
{u"user": user2["name"]}, {u"include_private": True}
)["results"]
names = [r["name"] for r in results]
assert names == []
|
// ... existing code ...
assert names == [dataset['name']]
@pytest.mark.usefixtures('with_plugins', 'clean_db', 'clean_index')
def test_not_allowed_organization_user_should_not_see_subsystem():
organization1 = Organization()
user2 = User()
org2_users = [{"name": user2["name"], "capacity": "admin"}]
Organization(users=org2_users)
Dataset(private=True, owner_org=organization1['id'], allowed_organizations="")
results = get_action(u"package_search")(
{u"user": user2["name"]}, {u"include_private": True}
)["results"]
names = [r["name"] for r in results]
assert names == []
// ... rest of the code ...
|
b0b4bad0ca68ebd1927229e85e7116fb63126c65
|
src/olympia/zadmin/helpers.py
|
src/olympia/zadmin/helpers.py
|
from jingo import register
from olympia.amo.urlresolvers import reverse
@register.function
def admin_site_links():
return {
'addons': [
('Search for add-ons by name or id',
reverse('zadmin.addon-search')),
('Featured add-ons', reverse('zadmin.features')),
('Discovery Pane promo modules',
reverse('discovery.module_admin')),
('Monthly Pick', reverse('zadmin.monthly_pick')),
('Bulk add-on validation', reverse('zadmin.validation')),
('Fake mail', reverse('zadmin.mail')),
('ACR Reports', reverse('zadmin.compat')),
('Email Add-on Developers', reverse('zadmin.email_devs')),
],
'users': [
('Configure groups', reverse('admin:access_group_changelist')),
],
'settings': [
('View site settings', reverse('zadmin.settings')),
('Django admin pages', reverse('zadmin.home')),
('Site Events', reverse('zadmin.site_events')),
],
'tools': [
('View request environment', reverse('amo.env')),
('Manage elasticsearch', reverse('zadmin.elastic')),
('Purge data from memcache', reverse('zadmin.memcache')),
('View event log', reverse('admin:editors_eventlog_changelist')),
('View addon log', reverse('admin:devhub_activitylog_changelist')),
('Generate error', reverse('zadmin.generate-error')),
('Site Status', reverse('amo.monitor')),
],
}
|
from jingo import register
from olympia.amo.urlresolvers import reverse
@register.function
def admin_site_links():
return {
'addons': [
('Search for add-ons by name or id',
reverse('zadmin.addon-search')),
('Featured add-ons', reverse('zadmin.features')),
('Discovery Pane promo modules',
reverse('discovery.module_admin')),
('Monthly Pick', reverse('zadmin.monthly_pick')),
('Bulk add-on validation', reverse('zadmin.validation')),
('Fake mail', reverse('zadmin.mail')),
('ACR Reports', reverse('zadmin.compat')),
('Email Add-on Developers', reverse('zadmin.email_devs')),
],
'users': [
('Configure groups', reverse('admin:access_group_changelist')),
],
'settings': [
('View site settings', reverse('zadmin.settings')),
('Django admin pages', reverse('zadmin.home')),
('Site Events', reverse('zadmin.site_events')),
],
'tools': [
('View request environment', reverse('amo.env')),
('Manage elasticsearch', reverse('zadmin.elastic')),
('Purge data from memcache', reverse('zadmin.memcache')),
('View event log', reverse('admin:editors_eventlog_changelist')),
('View addon log', reverse('admin:devhub_activitylog_changelist')),
('Site Status', reverse('amo.monitor')),
],
}
|
Remove generate error page from admin site
|
Remove generate error page from admin site
|
Python
|
bsd-3-clause
|
bqbn/addons-server,wagnerand/olympia,harry-7/addons-server,wagnerand/addons-server,harikishen/addons-server,psiinon/addons-server,lavish205/olympia,mstriemer/addons-server,kumar303/addons-server,Prashant-Surya/addons-server,mstriemer/olympia,mozilla/addons-server,harikishen/addons-server,Revanth47/addons-server,mstriemer/addons-server,mstriemer/olympia,lavish205/olympia,lavish205/olympia,wagnerand/olympia,diox/olympia,eviljeff/olympia,aviarypl/mozilla-l10n-addons-server,mozilla/olympia,tsl143/addons-server,Revanth47/addons-server,wagnerand/addons-server,psiinon/addons-server,eviljeff/olympia,wagnerand/addons-server,harry-7/addons-server,kumar303/addons-server,wagnerand/olympia,eviljeff/olympia,Prashant-Surya/addons-server,bqbn/addons-server,kumar303/addons-server,Revanth47/addons-server,kumar303/olympia,harry-7/addons-server,kumar303/olympia,aviarypl/mozilla-l10n-addons-server,kumar303/addons-server,mstriemer/addons-server,harikishen/addons-server,mstriemer/olympia,Prashant-Surya/addons-server,mozilla/olympia,diox/olympia,psiinon/addons-server,harry-7/addons-server,wagnerand/olympia,aviarypl/mozilla-l10n-addons-server,kumar303/olympia,mstriemer/olympia,mozilla/addons-server,bqbn/addons-server,Revanth47/addons-server,mstriemer/addons-server,diox/olympia,harikishen/addons-server,wagnerand/addons-server,diox/olympia,atiqueahmedziad/addons-server,psiinon/addons-server,eviljeff/olympia,tsl143/addons-server,mozilla/olympia,kumar303/olympia,lavish205/olympia,atiqueahmedziad/addons-server,tsl143/addons-server,tsl143/addons-server,mozilla/addons-server,mozilla/olympia,bqbn/addons-server,aviarypl/mozilla-l10n-addons-server,Prashant-Surya/addons-server,atiqueahmedziad/addons-server,mozilla/addons-server,atiqueahmedziad/addons-server
|
from jingo import register
from olympia.amo.urlresolvers import reverse
@register.function
def admin_site_links():
return {
'addons': [
('Search for add-ons by name or id',
reverse('zadmin.addon-search')),
('Featured add-ons', reverse('zadmin.features')),
('Discovery Pane promo modules',
reverse('discovery.module_admin')),
('Monthly Pick', reverse('zadmin.monthly_pick')),
('Bulk add-on validation', reverse('zadmin.validation')),
('Fake mail', reverse('zadmin.mail')),
('ACR Reports', reverse('zadmin.compat')),
('Email Add-on Developers', reverse('zadmin.email_devs')),
],
'users': [
('Configure groups', reverse('admin:access_group_changelist')),
],
'settings': [
('View site settings', reverse('zadmin.settings')),
('Django admin pages', reverse('zadmin.home')),
('Site Events', reverse('zadmin.site_events')),
],
'tools': [
('View request environment', reverse('amo.env')),
('Manage elasticsearch', reverse('zadmin.elastic')),
('Purge data from memcache', reverse('zadmin.memcache')),
('View event log', reverse('admin:editors_eventlog_changelist')),
('View addon log', reverse('admin:devhub_activitylog_changelist')),
- ('Generate error', reverse('zadmin.generate-error')),
('Site Status', reverse('amo.monitor')),
],
}
|
Remove generate error page from admin site
|
## Code Before:
from jingo import register
from olympia.amo.urlresolvers import reverse
@register.function
def admin_site_links():
return {
'addons': [
('Search for add-ons by name or id',
reverse('zadmin.addon-search')),
('Featured add-ons', reverse('zadmin.features')),
('Discovery Pane promo modules',
reverse('discovery.module_admin')),
('Monthly Pick', reverse('zadmin.monthly_pick')),
('Bulk add-on validation', reverse('zadmin.validation')),
('Fake mail', reverse('zadmin.mail')),
('ACR Reports', reverse('zadmin.compat')),
('Email Add-on Developers', reverse('zadmin.email_devs')),
],
'users': [
('Configure groups', reverse('admin:access_group_changelist')),
],
'settings': [
('View site settings', reverse('zadmin.settings')),
('Django admin pages', reverse('zadmin.home')),
('Site Events', reverse('zadmin.site_events')),
],
'tools': [
('View request environment', reverse('amo.env')),
('Manage elasticsearch', reverse('zadmin.elastic')),
('Purge data from memcache', reverse('zadmin.memcache')),
('View event log', reverse('admin:editors_eventlog_changelist')),
('View addon log', reverse('admin:devhub_activitylog_changelist')),
('Generate error', reverse('zadmin.generate-error')),
('Site Status', reverse('amo.monitor')),
],
}
## Instruction:
Remove generate error page from admin site
## Code After:
from jingo import register
from olympia.amo.urlresolvers import reverse
@register.function
def admin_site_links():
return {
'addons': [
('Search for add-ons by name or id',
reverse('zadmin.addon-search')),
('Featured add-ons', reverse('zadmin.features')),
('Discovery Pane promo modules',
reverse('discovery.module_admin')),
('Monthly Pick', reverse('zadmin.monthly_pick')),
('Bulk add-on validation', reverse('zadmin.validation')),
('Fake mail', reverse('zadmin.mail')),
('ACR Reports', reverse('zadmin.compat')),
('Email Add-on Developers', reverse('zadmin.email_devs')),
],
'users': [
('Configure groups', reverse('admin:access_group_changelist')),
],
'settings': [
('View site settings', reverse('zadmin.settings')),
('Django admin pages', reverse('zadmin.home')),
('Site Events', reverse('zadmin.site_events')),
],
'tools': [
('View request environment', reverse('amo.env')),
('Manage elasticsearch', reverse('zadmin.elastic')),
('Purge data from memcache', reverse('zadmin.memcache')),
('View event log', reverse('admin:editors_eventlog_changelist')),
('View addon log', reverse('admin:devhub_activitylog_changelist')),
('Site Status', reverse('amo.monitor')),
],
}
|
...
('View addon log', reverse('admin:devhub_activitylog_changelist')),
('Site Status', reverse('amo.monitor')),
...
|
56b23bc44655e4a965939ceb5908cd84cfd9de88
|
src/room.py
|
src/room.py
|
class Room(object):
""" This class is responsible for managing the people in a room """
|
class Room(object):
""" This class is responsible for managing the people in a room """
def __init__(self, room_type, room_name):
self.residents = []
self.room_name = room_name
self.room_type =room_type
if room_type == "office":
self.maximum_no_of_people = 6
else:
self.maximum_no_of_people = 4
|
Add init method to class Room
|
Add init method to class Room
|
Python
|
mit
|
EdwinKato/Space-Allocator,EdwinKato/Space-Allocator
|
class Room(object):
""" This class is responsible for managing the people in a room """
+ def __init__(self, room_type, room_name):
+ self.residents = []
+ self.room_name = room_name
+ self.room_type =room_type
+ if room_type == "office":
+ self.maximum_no_of_people = 6
+ else:
+ self.maximum_no_of_people = 4
|
Add init method to class Room
|
## Code Before:
class Room(object):
""" This class is responsible for managing the people in a room """
## Instruction:
Add init method to class Room
## Code After:
class Room(object):
""" This class is responsible for managing the people in a room """
def __init__(self, room_type, room_name):
self.residents = []
self.room_name = room_name
self.room_type =room_type
if room_type == "office":
self.maximum_no_of_people = 6
else:
self.maximum_no_of_people = 4
|
...
def __init__(self, room_type, room_name):
self.residents = []
self.room_name = room_name
self.room_type =room_type
if room_type == "office":
self.maximum_no_of_people = 6
else:
self.maximum_no_of_people = 4
...
|
c32118b2157e6c2cfd435461ee23edfa79aa917e
|
api/__init__.py
|
api/__init__.py
|
import ConfigParser
from peewee import *
config = ConfigParser.RawConfigParser()
config.read('server.conf')
database = SqliteDatabase('gallery.db')
from collection import CollectionModel
from album import AlbumModel
from user import UserModel, UserResource
from photo import PhotoModel
database.create_tables([PhotoModel, AlbumModel, UserModel, CollectionModel], True)
if UserModel.select().count() == 0:
UserModel.create(
admin = True,
name = 'Admin',
username = 'Admin',
password = '$2a$12$pMtKl1b7h1sFKbMdBvPqbuza1tJN2ZNNAFMEs1RQmwqYTbBwrrKpy'
)
from boto.s3.connection import S3Connection
s3 = S3Connection(config.get('S3', 'AccessKey'), config.get('S3', 'SecretKey'))
if s3.lookup(config.get('S3', 'Bucket')) is None:
s3.create_bucket(config.get('S3', 'Bucket'))
from flask import Flask
from flask.ext.restful import Api
app = Flask(__name__)
api = Api(app)
|
import ConfigParser
from peewee import *
config = ConfigParser.RawConfigParser()
config.read('server.conf')
database = SqliteDatabase('gallery.db', threadlocals=True)
from collection import CollectionModel
from album import AlbumModel
from user import UserModel, UsersResource
from photo import PhotoModel
database.create_tables([PhotoModel, AlbumModel, UserModel, CollectionModel], True)
if UserModel.select().count() == 0:
UserModel.create(
admin = True,
name = 'Admin',
username = 'Admin',
password = '$2a$12$pMtKl1b7h1sFKbMdBvPqbuza1tJN2ZNNAFMEs1RQmwqYTbBwrrKpy'
)
from boto.s3.connection import S3Connection
s3 = S3Connection(config.get('S3', 'AccessKey'), config.get('S3', 'SecretKey'))
if s3.lookup(config.get('S3', 'Bucket')) is None:
s3.create_bucket(config.get('S3', 'Bucket'))
from flask import Flask
from flask.ext.restful import Api
app = Flask(__name__)
api = Api(app)
api.add_resource(UsersResource, '/users/')
|
Set local threads to true for peewee
|
Set local threads to true for peewee
|
Python
|
unlicense
|
karousel/karousel
|
import ConfigParser
from peewee import *
config = ConfigParser.RawConfigParser()
config.read('server.conf')
- database = SqliteDatabase('gallery.db')
+ database = SqliteDatabase('gallery.db', threadlocals=True)
from collection import CollectionModel
from album import AlbumModel
- from user import UserModel, UserResource
+ from user import UserModel, UsersResource
from photo import PhotoModel
database.create_tables([PhotoModel, AlbumModel, UserModel, CollectionModel], True)
if UserModel.select().count() == 0:
UserModel.create(
admin = True,
name = 'Admin',
username = 'Admin',
password = '$2a$12$pMtKl1b7h1sFKbMdBvPqbuza1tJN2ZNNAFMEs1RQmwqYTbBwrrKpy'
)
from boto.s3.connection import S3Connection
s3 = S3Connection(config.get('S3', 'AccessKey'), config.get('S3', 'SecretKey'))
if s3.lookup(config.get('S3', 'Bucket')) is None:
s3.create_bucket(config.get('S3', 'Bucket'))
from flask import Flask
from flask.ext.restful import Api
app = Flask(__name__)
api = Api(app)
+ api.add_resource(UsersResource, '/users/')
+
|
Set local threads to true for peewee
|
## Code Before:
import ConfigParser
from peewee import *
config = ConfigParser.RawConfigParser()
config.read('server.conf')
database = SqliteDatabase('gallery.db')
from collection import CollectionModel
from album import AlbumModel
from user import UserModel, UserResource
from photo import PhotoModel
database.create_tables([PhotoModel, AlbumModel, UserModel, CollectionModel], True)
if UserModel.select().count() == 0:
UserModel.create(
admin = True,
name = 'Admin',
username = 'Admin',
password = '$2a$12$pMtKl1b7h1sFKbMdBvPqbuza1tJN2ZNNAFMEs1RQmwqYTbBwrrKpy'
)
from boto.s3.connection import S3Connection
s3 = S3Connection(config.get('S3', 'AccessKey'), config.get('S3', 'SecretKey'))
if s3.lookup(config.get('S3', 'Bucket')) is None:
s3.create_bucket(config.get('S3', 'Bucket'))
from flask import Flask
from flask.ext.restful import Api
app = Flask(__name__)
api = Api(app)
## Instruction:
Set local threads to true for peewee
## Code After:
import ConfigParser
from peewee import *
config = ConfigParser.RawConfigParser()
config.read('server.conf')
database = SqliteDatabase('gallery.db', threadlocals=True)
from collection import CollectionModel
from album import AlbumModel
from user import UserModel, UsersResource
from photo import PhotoModel
database.create_tables([PhotoModel, AlbumModel, UserModel, CollectionModel], True)
if UserModel.select().count() == 0:
UserModel.create(
admin = True,
name = 'Admin',
username = 'Admin',
password = '$2a$12$pMtKl1b7h1sFKbMdBvPqbuza1tJN2ZNNAFMEs1RQmwqYTbBwrrKpy'
)
from boto.s3.connection import S3Connection
s3 = S3Connection(config.get('S3', 'AccessKey'), config.get('S3', 'SecretKey'))
if s3.lookup(config.get('S3', 'Bucket')) is None:
s3.create_bucket(config.get('S3', 'Bucket'))
from flask import Flask
from flask.ext.restful import Api
app = Flask(__name__)
api = Api(app)
api.add_resource(UsersResource, '/users/')
|
# ... existing code ...
database = SqliteDatabase('gallery.db', threadlocals=True)
# ... modified code ...
from album import AlbumModel
from user import UserModel, UsersResource
from photo import PhotoModel
...
api = Api(app)
api.add_resource(UsersResource, '/users/')
# ... rest of the code ...
|
47a7770bd3c5552d61f69b7df62bf4c36de56dc8
|
wysteria/__init__.py
|
wysteria/__init__.py
|
from wysteria.client import Client, TlsConfig
from wysteria import errors
from wysteria.constants import FACET_COLLECTION
from wysteria.constants import FACET_ITEM_TYPE
from wysteria.constants import FACET_ITEM_VARIANT
__all__ = [
"Client",
"TlsConfig",
"errors",
"FACET_COLLECTION",
"FACET_ITEM_TYPE",
"FACET_ITEM_VARIANT",
]
|
from wysteria.client import Client
from wysteria import errors
from wysteria.constants import FACET_COLLECTION
from wysteria.constants import FACET_ITEM_TYPE
from wysteria.constants import FACET_ITEM_VARIANT
from wysteria.constants import FACET_LINK_TYPE
from wysteria.constants import VALUE_LINK_TYPE_VERSION
from wysteria.constants import VALUE_LINK_TYPE_ITEM
from wysteria.utils import default_client
from wysteria.utils import from_config
__all__ = [
"Client",
"errors",
"default_client",
"from_config",
"FACET_COLLECTION",
"FACET_ITEM_TYPE",
"FACET_ITEM_VARIANT",
"FACET_LINK_TYPE",
"VALUE_LINK_TYPE_VERSION",
"VALUE_LINK_TYPE_ITEM",
]
|
Add module level imports and doc strings
|
Add module level imports and doc strings
|
Python
|
bsd-3-clause
|
voidshard/pywysteria,voidshard/pywysteria
|
- from wysteria.client import Client, TlsConfig
+ from wysteria.client import Client
from wysteria import errors
from wysteria.constants import FACET_COLLECTION
from wysteria.constants import FACET_ITEM_TYPE
from wysteria.constants import FACET_ITEM_VARIANT
+ from wysteria.constants import FACET_LINK_TYPE
+ from wysteria.constants import VALUE_LINK_TYPE_VERSION
+ from wysteria.constants import VALUE_LINK_TYPE_ITEM
+ from wysteria.utils import default_client
+ from wysteria.utils import from_config
__all__ = [
"Client",
- "TlsConfig",
"errors",
+ "default_client",
+ "from_config",
"FACET_COLLECTION",
"FACET_ITEM_TYPE",
"FACET_ITEM_VARIANT",
+ "FACET_LINK_TYPE",
+ "VALUE_LINK_TYPE_VERSION",
+ "VALUE_LINK_TYPE_ITEM",
]
|
Add module level imports and doc strings
|
## Code Before:
from wysteria.client import Client, TlsConfig
from wysteria import errors
from wysteria.constants import FACET_COLLECTION
from wysteria.constants import FACET_ITEM_TYPE
from wysteria.constants import FACET_ITEM_VARIANT
__all__ = [
"Client",
"TlsConfig",
"errors",
"FACET_COLLECTION",
"FACET_ITEM_TYPE",
"FACET_ITEM_VARIANT",
]
## Instruction:
Add module level imports and doc strings
## Code After:
from wysteria.client import Client
from wysteria import errors
from wysteria.constants import FACET_COLLECTION
from wysteria.constants import FACET_ITEM_TYPE
from wysteria.constants import FACET_ITEM_VARIANT
from wysteria.constants import FACET_LINK_TYPE
from wysteria.constants import VALUE_LINK_TYPE_VERSION
from wysteria.constants import VALUE_LINK_TYPE_ITEM
from wysteria.utils import default_client
from wysteria.utils import from_config
__all__ = [
"Client",
"errors",
"default_client",
"from_config",
"FACET_COLLECTION",
"FACET_ITEM_TYPE",
"FACET_ITEM_VARIANT",
"FACET_LINK_TYPE",
"VALUE_LINK_TYPE_VERSION",
"VALUE_LINK_TYPE_ITEM",
]
|
...
from wysteria.client import Client
from wysteria import errors
...
from wysteria.constants import FACET_ITEM_VARIANT
from wysteria.constants import FACET_LINK_TYPE
from wysteria.constants import VALUE_LINK_TYPE_VERSION
from wysteria.constants import VALUE_LINK_TYPE_ITEM
from wysteria.utils import default_client
from wysteria.utils import from_config
...
"Client",
"errors",
"default_client",
"from_config",
"FACET_COLLECTION",
...
"FACET_ITEM_VARIANT",
"FACET_LINK_TYPE",
"VALUE_LINK_TYPE_VERSION",
"VALUE_LINK_TYPE_ITEM",
]
...
|
cf58ebf492cd0dfaf640d2fd8d3cf4e5b2706424
|
alembic/versions/47dd43c1491_create_category_tabl.py
|
alembic/versions/47dd43c1491_create_category_tabl.py
|
# revision identifiers, used by Alembic.
revision = '47dd43c1491'
down_revision = '27bf0aefa49d'
from alembic import op
import sqlalchemy as sa
import datetime
def make_timestamp():
now = datetime.datetime.utcnow()
return now.isoformat()
def upgrade():
op.create_table(
'category',
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('name', sa.Text, nullable=False, unique=True),
sa.Column('short_name', sa.Text, nullable=False, unique=True),
sa.Column('created', sa.Text, default=make_timestamp),
)
def downgrade():
op.drop_table('category')
|
# revision identifiers, used by Alembic.
revision = '47dd43c1491'
down_revision = '27bf0aefa49d'
from alembic import op
import sqlalchemy as sa
import datetime
def make_timestamp():
now = datetime.datetime.utcnow()
return now.isoformat()
def upgrade():
op.create_table(
'category',
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('name', sa.Text, nullable=False, unique=True),
sa.Column('short_name', sa.Text, nullable=False, unique=True),
sa.Column('description', sa.Text, nullable=False),
sa.Column('created', sa.Text, default=make_timestamp),
)
# Add two categories
query = 'INSERT INTO category (name, short_name, description) VALUES (\'Thinking\', \'thinking\', \'Applications where you can help using your skills\')'
op.execute(query)
query = 'INSERT INTO category (name, short_name, description) VALUES (\'Sensing\', \'sensing\', \'Applications where you can help gathering data\')'
op.execute(query)
def downgrade():
op.drop_table('category')
|
Add description to the table and populate it with two categories
|
Add description to the table and populate it with two categories
|
Python
|
agpl-3.0
|
geotagx/geotagx-pybossa-archive,OpenNewsLabs/pybossa,PyBossa/pybossa,proyectos-analizo-info/pybossa-analizo-info,Scifabric/pybossa,CulturePlex/pybossa,geotagx/pybossa,proyectos-analizo-info/pybossa-analizo-info,CulturePlex/pybossa,OpenNewsLabs/pybossa,geotagx/geotagx-pybossa-archive,harihpr/tweetclickers,geotagx/geotagx-pybossa-archive,PyBossa/pybossa,geotagx/pybossa,harihpr/tweetclickers,stefanhahmann/pybossa,stefanhahmann/pybossa,inteligencia-coletiva-lsd/pybossa,proyectos-analizo-info/pybossa-analizo-info,Scifabric/pybossa,jean/pybossa,inteligencia-coletiva-lsd/pybossa,jean/pybossa,CulturePlex/pybossa,geotagx/geotagx-pybossa-archive,geotagx/geotagx-pybossa-archive
|
# revision identifiers, used by Alembic.
revision = '47dd43c1491'
down_revision = '27bf0aefa49d'
from alembic import op
import sqlalchemy as sa
import datetime
def make_timestamp():
now = datetime.datetime.utcnow()
return now.isoformat()
def upgrade():
op.create_table(
'category',
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('name', sa.Text, nullable=False, unique=True),
sa.Column('short_name', sa.Text, nullable=False, unique=True),
+ sa.Column('description', sa.Text, nullable=False),
sa.Column('created', sa.Text, default=make_timestamp),
)
+
+ # Add two categories
+ query = 'INSERT INTO category (name, short_name, description) VALUES (\'Thinking\', \'thinking\', \'Applications where you can help using your skills\')'
+ op.execute(query)
+ query = 'INSERT INTO category (name, short_name, description) VALUES (\'Sensing\', \'sensing\', \'Applications where you can help gathering data\')'
+ op.execute(query)
def downgrade():
op.drop_table('category')
|
Add description to the table and populate it with two categories
|
## Code Before:
# revision identifiers, used by Alembic.
revision = '47dd43c1491'
down_revision = '27bf0aefa49d'
from alembic import op
import sqlalchemy as sa
import datetime
def make_timestamp():
now = datetime.datetime.utcnow()
return now.isoformat()
def upgrade():
op.create_table(
'category',
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('name', sa.Text, nullable=False, unique=True),
sa.Column('short_name', sa.Text, nullable=False, unique=True),
sa.Column('created', sa.Text, default=make_timestamp),
)
def downgrade():
op.drop_table('category')
## Instruction:
Add description to the table and populate it with two categories
## Code After:
# revision identifiers, used by Alembic.
revision = '47dd43c1491'
down_revision = '27bf0aefa49d'
from alembic import op
import sqlalchemy as sa
import datetime
def make_timestamp():
now = datetime.datetime.utcnow()
return now.isoformat()
def upgrade():
op.create_table(
'category',
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('name', sa.Text, nullable=False, unique=True),
sa.Column('short_name', sa.Text, nullable=False, unique=True),
sa.Column('description', sa.Text, nullable=False),
sa.Column('created', sa.Text, default=make_timestamp),
)
# Add two categories
query = 'INSERT INTO category (name, short_name, description) VALUES (\'Thinking\', \'thinking\', \'Applications where you can help using your skills\')'
op.execute(query)
query = 'INSERT INTO category (name, short_name, description) VALUES (\'Sensing\', \'sensing\', \'Applications where you can help gathering data\')'
op.execute(query)
def downgrade():
op.drop_table('category')
|
// ... existing code ...
sa.Column('short_name', sa.Text, nullable=False, unique=True),
sa.Column('description', sa.Text, nullable=False),
sa.Column('created', sa.Text, default=make_timestamp),
// ... modified code ...
)
# Add two categories
query = 'INSERT INTO category (name, short_name, description) VALUES (\'Thinking\', \'thinking\', \'Applications where you can help using your skills\')'
op.execute(query)
query = 'INSERT INTO category (name, short_name, description) VALUES (\'Sensing\', \'sensing\', \'Applications where you can help gathering data\')'
op.execute(query)
// ... rest of the code ...
|
5eced1c1cb9253d73e3246dccb4c33e5ba154fd3
|
rcbi/rcbi/spiders/FlyduinoSpider.py
|
rcbi/rcbi/spiders/FlyduinoSpider.py
|
import scrapy
from scrapy import log
from scrapy.contrib.spiders import SitemapSpider, Rule
from scrapy.contrib.linkextractors import LinkExtractor
from rcbi.items import Part
MANUFACTURERS = ["Rctimer", "RCTimer", "BaseCam", "Elgae", "ELGAE", "ArduFlyer", "Boscam", "T-Motor", "HQProp", "Suppo", "Flyduino", "SLS", "Frsky"]
CORRECT = {"Rctimer": "RCTimer", "ELGAE": "Elgae", "Frsky": "FrSky"}
class FlyduinoSpider(SitemapSpider):
name = "flyduino"
allowed_domains = ["flyduino.net"]
sitemap_urls = ["http://flyduino.net/sitemap.xml"]
def parse(self, response):
item = Part()
item["site"] = "flyduino"
item["url"] = response.url
product_name = response.css("div.hproduct")
if not product_name:
return
item["name"] = product_name[0].xpath("//h1/text()").extract()[0]
for m in MANUFACTURERS:
if item["name"].startswith(m):
if m in CORRECT:
m = CORRECT[m]
item["manufacturer"] = m
item["name"] = item["name"][len(m):].strip()
break
return item
|
import scrapy
from scrapy import log
from scrapy.spiders import CrawlSpider, Rule
from scrapy.linkextractors import LinkExtractor
from rcbi.items import Part
MANUFACTURERS = ["Rctimer", "RCTimer", "BaseCam", "Elgae", "ELGAE", "ArduFlyer", "Boscam", "T-Motor", "HQProp", "Suppo", "Flyduino", "SLS", "Frsky"]
CORRECT = {"Rctimer": "RCTimer", "ELGAE": "Elgae", "Frsky": "FrSky"}
class FlyduinoSpider(CrawlSpider):
name = "flyduino"
allowed_domains = ["flyduino.net"]
start_urls = ["http://flyduino.net/"]
rules = (
# Extract links matching 'category.php' (but not matching 'subsection.php')
# and follow links from them (since no callback means follow=True by default).
Rule(LinkExtractor(restrict_css=".categories")),
# Extract links matching 'item.php' and parse them with the spider's method parse_item
Rule(LinkExtractor(restrict_css=".article_wrapper h3"), callback='parse_item'),
)
def parse_item(self, response):
item = Part()
item["site"] = "flyduino"
item["url"] = response.url
product_name = response.css("div.hproduct")
if not product_name:
return
item["name"] = product_name[0].xpath("//h1/text()").extract()[0]
for m in MANUFACTURERS:
if item["name"].startswith(m):
if m in CORRECT:
m = CORRECT[m]
item["manufacturer"] = m
item["name"] = item["name"][len(m):].strip()
break
return item
|
Stop using the Flyduino sitemap.
|
Stop using the Flyduino sitemap.
|
Python
|
apache-2.0
|
rcbuild-info/scrape,rcbuild-info/scrape
|
import scrapy
from scrapy import log
- from scrapy.contrib.spiders import SitemapSpider, Rule
+ from scrapy.spiders import CrawlSpider, Rule
- from scrapy.contrib.linkextractors import LinkExtractor
+ from scrapy.linkextractors import LinkExtractor
from rcbi.items import Part
MANUFACTURERS = ["Rctimer", "RCTimer", "BaseCam", "Elgae", "ELGAE", "ArduFlyer", "Boscam", "T-Motor", "HQProp", "Suppo", "Flyduino", "SLS", "Frsky"]
CORRECT = {"Rctimer": "RCTimer", "ELGAE": "Elgae", "Frsky": "FrSky"}
- class FlyduinoSpider(SitemapSpider):
+ class FlyduinoSpider(CrawlSpider):
name = "flyduino"
allowed_domains = ["flyduino.net"]
- sitemap_urls = ["http://flyduino.net/sitemap.xml"]
+ start_urls = ["http://flyduino.net/"]
+
+ rules = (
+ # Extract links matching 'category.php' (but not matching 'subsection.php')
+ # and follow links from them (since no callback means follow=True by default).
+ Rule(LinkExtractor(restrict_css=".categories")),
+
+ # Extract links matching 'item.php' and parse them with the spider's method parse_item
+ Rule(LinkExtractor(restrict_css=".article_wrapper h3"), callback='parse_item'),
-
+ )
+
- def parse(self, response):
+ def parse_item(self, response):
- item = Part()
+ item = Part()
- item["site"] = "flyduino"
+ item["site"] = "flyduino"
- item["url"] = response.url
+ item["url"] = response.url
- product_name = response.css("div.hproduct")
+ product_name = response.css("div.hproduct")
- if not product_name:
+ if not product_name:
- return
+ return
- item["name"] = product_name[0].xpath("//h1/text()").extract()[0]
+ item["name"] = product_name[0].xpath("//h1/text()").extract()[0]
- for m in MANUFACTURERS:
+ for m in MANUFACTURERS:
- if item["name"].startswith(m):
+ if item["name"].startswith(m):
- if m in CORRECT:
+ if m in CORRECT:
- m = CORRECT[m]
+ m = CORRECT[m]
- item["manufacturer"] = m
+ item["manufacturer"] = m
- item["name"] = item["name"][len(m):].strip()
+ item["name"] = item["name"][len(m):].strip()
- break
+ break
- return item
+ return item
+
|
Stop using the Flyduino sitemap.
|
## Code Before:
import scrapy
from scrapy import log
from scrapy.contrib.spiders import SitemapSpider, Rule
from scrapy.contrib.linkextractors import LinkExtractor
from rcbi.items import Part
MANUFACTURERS = ["Rctimer", "RCTimer", "BaseCam", "Elgae", "ELGAE", "ArduFlyer", "Boscam", "T-Motor", "HQProp", "Suppo", "Flyduino", "SLS", "Frsky"]
CORRECT = {"Rctimer": "RCTimer", "ELGAE": "Elgae", "Frsky": "FrSky"}
class FlyduinoSpider(SitemapSpider):
name = "flyduino"
allowed_domains = ["flyduino.net"]
sitemap_urls = ["http://flyduino.net/sitemap.xml"]
def parse(self, response):
item = Part()
item["site"] = "flyduino"
item["url"] = response.url
product_name = response.css("div.hproduct")
if not product_name:
return
item["name"] = product_name[0].xpath("//h1/text()").extract()[0]
for m in MANUFACTURERS:
if item["name"].startswith(m):
if m in CORRECT:
m = CORRECT[m]
item["manufacturer"] = m
item["name"] = item["name"][len(m):].strip()
break
return item
## Instruction:
Stop using the Flyduino sitemap.
## Code After:
import scrapy
from scrapy import log
from scrapy.spiders import CrawlSpider, Rule
from scrapy.linkextractors import LinkExtractor
from rcbi.items import Part
MANUFACTURERS = ["Rctimer", "RCTimer", "BaseCam", "Elgae", "ELGAE", "ArduFlyer", "Boscam", "T-Motor", "HQProp", "Suppo", "Flyduino", "SLS", "Frsky"]
CORRECT = {"Rctimer": "RCTimer", "ELGAE": "Elgae", "Frsky": "FrSky"}
class FlyduinoSpider(CrawlSpider):
name = "flyduino"
allowed_domains = ["flyduino.net"]
start_urls = ["http://flyduino.net/"]
rules = (
# Extract links matching 'category.php' (but not matching 'subsection.php')
# and follow links from them (since no callback means follow=True by default).
Rule(LinkExtractor(restrict_css=".categories")),
# Extract links matching 'item.php' and parse them with the spider's method parse_item
Rule(LinkExtractor(restrict_css=".article_wrapper h3"), callback='parse_item'),
)
def parse_item(self, response):
item = Part()
item["site"] = "flyduino"
item["url"] = response.url
product_name = response.css("div.hproduct")
if not product_name:
return
item["name"] = product_name[0].xpath("//h1/text()").extract()[0]
for m in MANUFACTURERS:
if item["name"].startswith(m):
if m in CORRECT:
m = CORRECT[m]
item["manufacturer"] = m
item["name"] = item["name"][len(m):].strip()
break
return item
|
# ... existing code ...
from scrapy import log
from scrapy.spiders import CrawlSpider, Rule
from scrapy.linkextractors import LinkExtractor
from rcbi.items import Part
# ... modified code ...
CORRECT = {"Rctimer": "RCTimer", "ELGAE": "Elgae", "Frsky": "FrSky"}
class FlyduinoSpider(CrawlSpider):
name = "flyduino"
...
allowed_domains = ["flyduino.net"]
start_urls = ["http://flyduino.net/"]
rules = (
# Extract links matching 'category.php' (but not matching 'subsection.php')
# and follow links from them (since no callback means follow=True by default).
Rule(LinkExtractor(restrict_css=".categories")),
# Extract links matching 'item.php' and parse them with the spider's method parse_item
Rule(LinkExtractor(restrict_css=".article_wrapper h3"), callback='parse_item'),
)
def parse_item(self, response):
item = Part()
item["site"] = "flyduino"
item["url"] = response.url
product_name = response.css("div.hproduct")
if not product_name:
return
item["name"] = product_name[0].xpath("//h1/text()").extract()[0]
for m in MANUFACTURERS:
if item["name"].startswith(m):
if m in CORRECT:
m = CORRECT[m]
item["manufacturer"] = m
item["name"] = item["name"][len(m):].strip()
break
return item
# ... rest of the code ...
|
c692038646417dfcd2e41f186b5814b3978847b6
|
conf_site/core/context_processors.py
|
conf_site/core/context_processors.py
|
from django.conf import settings
from django.utils import timezone
import pytz
def core_context(self):
"""Context processor for elements appearing on every page."""
context = {}
context["google_analytics_id"] = settings.GOOGLE_ANALYTICS_PROPERTY_ID
context["sentry_public_dsn"] = settings.SENTRY_PUBLIC_DSN
return context
def time_zone_context(self):
context = {}
# Duplicate the functionality of django.template.context_processors.tz.
context["TIME_ZONE"] = timezone.get_current_timezone_name()
# Add a list of time zones to the context.
context["TIME_ZONES"] = pytz.common_timezones
return context
|
from django.conf import settings
from django.contrib.sites.models import Site
from django.utils import timezone
import pytz
def core_context(self):
"""Context processor for elements appearing on every page."""
context = {}
context["conference_title"] = Site.objects.get_current().name
context["google_analytics_id"] = settings.GOOGLE_ANALYTICS_PROPERTY_ID
context["sentry_public_dsn"] = settings.SENTRY_PUBLIC_DSN
return context
def time_zone_context(self):
context = {}
# Duplicate the functionality of django.template.context_processors.tz.
context["TIME_ZONE"] = timezone.get_current_timezone_name()
# Add a list of time zones to the context.
context["TIME_ZONES"] = pytz.common_timezones
return context
|
Fix conference title context processor.
|
Fix conference title context processor.
|
Python
|
mit
|
pydata/conf_site,pydata/conf_site,pydata/conf_site
|
from django.conf import settings
+ from django.contrib.sites.models import Site
from django.utils import timezone
import pytz
def core_context(self):
"""Context processor for elements appearing on every page."""
context = {}
+ context["conference_title"] = Site.objects.get_current().name
context["google_analytics_id"] = settings.GOOGLE_ANALYTICS_PROPERTY_ID
context["sentry_public_dsn"] = settings.SENTRY_PUBLIC_DSN
return context
def time_zone_context(self):
context = {}
# Duplicate the functionality of django.template.context_processors.tz.
context["TIME_ZONE"] = timezone.get_current_timezone_name()
# Add a list of time zones to the context.
context["TIME_ZONES"] = pytz.common_timezones
return context
|
Fix conference title context processor.
|
## Code Before:
from django.conf import settings
from django.utils import timezone
import pytz
def core_context(self):
"""Context processor for elements appearing on every page."""
context = {}
context["google_analytics_id"] = settings.GOOGLE_ANALYTICS_PROPERTY_ID
context["sentry_public_dsn"] = settings.SENTRY_PUBLIC_DSN
return context
def time_zone_context(self):
context = {}
# Duplicate the functionality of django.template.context_processors.tz.
context["TIME_ZONE"] = timezone.get_current_timezone_name()
# Add a list of time zones to the context.
context["TIME_ZONES"] = pytz.common_timezones
return context
## Instruction:
Fix conference title context processor.
## Code After:
from django.conf import settings
from django.contrib.sites.models import Site
from django.utils import timezone
import pytz
def core_context(self):
"""Context processor for elements appearing on every page."""
context = {}
context["conference_title"] = Site.objects.get_current().name
context["google_analytics_id"] = settings.GOOGLE_ANALYTICS_PROPERTY_ID
context["sentry_public_dsn"] = settings.SENTRY_PUBLIC_DSN
return context
def time_zone_context(self):
context = {}
# Duplicate the functionality of django.template.context_processors.tz.
context["TIME_ZONE"] = timezone.get_current_timezone_name()
# Add a list of time zones to the context.
context["TIME_ZONES"] = pytz.common_timezones
return context
|
...
from django.conf import settings
from django.contrib.sites.models import Site
from django.utils import timezone
...
context = {}
context["conference_title"] = Site.objects.get_current().name
context["google_analytics_id"] = settings.GOOGLE_ANALYTICS_PROPERTY_ID
...
|
d4412f8573dbfc1b06f2a298cc5c3042c6c468e6
|
tests/test_api.py
|
tests/test_api.py
|
from django.test import TestCase
from django_snooze import apis
class APITestCase(TestCase):
def setUp(self):
"""Sets up an API object to play with.
:returns: None
"""
self.api = apis.api
self.api.discover_models()
def test_apps(self):
"""Test if the right apps are present.
:returns: None
"""
self.assertIn('tests', self.api._resources.keys())
self.assertIn('auth', self.api._resources.keys())
|
from django.test import TestCase
from django_snooze import apis
class APITestCase(TestCase):
def setUp(self):
"""Sets up an API object to play with.
:returns: None
"""
self.api = apis.api
self.api.discover_models()
def test_apps(self):
"""Test if the right apps are present.
:returns: None
"""
self.assertIn('tests', self.api._resources.keys())
self.assertIn('auth', self.api._resources.keys())
tests_models = [x.model_name for x in self.api._resources['tests']]
self.assertNotIn('abstract', tests_models)
|
Test to see if abstract classes sneak in.
|
Test to see if abstract classes sneak in.
Now that get_models has been found to skip abstract classes, we want to test
for this in case this behaviour ever changes.
|
Python
|
bsd-3-clause
|
ainmosni/django-snooze,ainmosni/django-snooze
|
from django.test import TestCase
from django_snooze import apis
class APITestCase(TestCase):
def setUp(self):
"""Sets up an API object to play with.
:returns: None
"""
self.api = apis.api
self.api.discover_models()
def test_apps(self):
"""Test if the right apps are present.
:returns: None
"""
self.assertIn('tests', self.api._resources.keys())
self.assertIn('auth', self.api._resources.keys())
+ tests_models = [x.model_name for x in self.api._resources['tests']]
+ self.assertNotIn('abstract', tests_models)
|
Test to see if abstract classes sneak in.
|
## Code Before:
from django.test import TestCase
from django_snooze import apis
class APITestCase(TestCase):
def setUp(self):
"""Sets up an API object to play with.
:returns: None
"""
self.api = apis.api
self.api.discover_models()
def test_apps(self):
"""Test if the right apps are present.
:returns: None
"""
self.assertIn('tests', self.api._resources.keys())
self.assertIn('auth', self.api._resources.keys())
## Instruction:
Test to see if abstract classes sneak in.
## Code After:
from django.test import TestCase
from django_snooze import apis
class APITestCase(TestCase):
def setUp(self):
"""Sets up an API object to play with.
:returns: None
"""
self.api = apis.api
self.api.discover_models()
def test_apps(self):
"""Test if the right apps are present.
:returns: None
"""
self.assertIn('tests', self.api._resources.keys())
self.assertIn('auth', self.api._resources.keys())
tests_models = [x.model_name for x in self.api._resources['tests']]
self.assertNotIn('abstract', tests_models)
|
...
self.assertIn('auth', self.api._resources.keys())
tests_models = [x.model_name for x in self.api._resources['tests']]
self.assertNotIn('abstract', tests_models)
...
|
37c1d6ae1345fbab7aea4404933d78d4b939bbc2
|
hoomd/filters.py
|
hoomd/filters.py
|
import hoomd._hoomd as _hoomd
class ParticleFilterID:
def __init__(self, *args, **kwargs):
args_str = ''.join([str(arg) for arg in args])
kwargs_str = ''.join([str(value)for value in kwargs.values()])
self.args_str = args_str
self.kwargs_str = kwargs_str
_id = hash(self.__class__.__name__ + args_str + kwargs_str)
self._id = _id
def __hash__(self):
return self._id
def __eq__(self, other):
return self._id == other._id
class All(ParticleFilterID, _hoomd.ParticleFilterAll):
def __init__(self):
ParticleFilterID.__init__(self)
_hoomd.ParticleFilterAll(self)
|
import hoomd._hoomd as _hoomd
import numpy as np
class ParticleFilter:
def __init__(self, *args, **kwargs):
args_str = ''.join([repr(arg) if not isinstance(arg, np.ndarray)
else repr(list(arg)) for arg in args])
kwargs_str = ''.join([repr(value) if not isinstance(value, np.ndarray)
else repr(list(value))
for value in kwargs.values()])
self.args_str = args_str
self.kwargs_str = kwargs_str
_id = hash(self.__class__.__name__ + args_str + kwargs_str)
self._id = _id
def __hash__(self):
return self._id
def __eq__(self, other):
return self._id == other._id
class All(ParticleFilterID, _hoomd.ParticleFilterAll):
def __init__(self):
ParticleFilterID.__init__(self)
_hoomd.ParticleFilterAll(self)
|
Change hashing for ParticleFilter python class
|
Change hashing for ParticleFilter python class
|
Python
|
bsd-3-clause
|
joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue
|
import hoomd._hoomd as _hoomd
+ import numpy as np
- class ParticleFilterID:
+ class ParticleFilter:
def __init__(self, *args, **kwargs):
- args_str = ''.join([str(arg) for arg in args])
- kwargs_str = ''.join([str(value)for value in kwargs.values()])
+ args_str = ''.join([repr(arg) if not isinstance(arg, np.ndarray)
+ else repr(list(arg)) for arg in args])
+ kwargs_str = ''.join([repr(value) if not isinstance(value, np.ndarray)
+ else repr(list(value))
+ for value in kwargs.values()])
self.args_str = args_str
self.kwargs_str = kwargs_str
_id = hash(self.__class__.__name__ + args_str + kwargs_str)
self._id = _id
def __hash__(self):
return self._id
def __eq__(self, other):
return self._id == other._id
class All(ParticleFilterID, _hoomd.ParticleFilterAll):
def __init__(self):
ParticleFilterID.__init__(self)
_hoomd.ParticleFilterAll(self)
|
Change hashing for ParticleFilter python class
|
## Code Before:
import hoomd._hoomd as _hoomd
class ParticleFilterID:
def __init__(self, *args, **kwargs):
args_str = ''.join([str(arg) for arg in args])
kwargs_str = ''.join([str(value)for value in kwargs.values()])
self.args_str = args_str
self.kwargs_str = kwargs_str
_id = hash(self.__class__.__name__ + args_str + kwargs_str)
self._id = _id
def __hash__(self):
return self._id
def __eq__(self, other):
return self._id == other._id
class All(ParticleFilterID, _hoomd.ParticleFilterAll):
def __init__(self):
ParticleFilterID.__init__(self)
_hoomd.ParticleFilterAll(self)
## Instruction:
Change hashing for ParticleFilter python class
## Code After:
import hoomd._hoomd as _hoomd
import numpy as np
class ParticleFilter:
def __init__(self, *args, **kwargs):
args_str = ''.join([repr(arg) if not isinstance(arg, np.ndarray)
else repr(list(arg)) for arg in args])
kwargs_str = ''.join([repr(value) if not isinstance(value, np.ndarray)
else repr(list(value))
for value in kwargs.values()])
self.args_str = args_str
self.kwargs_str = kwargs_str
_id = hash(self.__class__.__name__ + args_str + kwargs_str)
self._id = _id
def __hash__(self):
return self._id
def __eq__(self, other):
return self._id == other._id
class All(ParticleFilterID, _hoomd.ParticleFilterAll):
def __init__(self):
ParticleFilterID.__init__(self)
_hoomd.ParticleFilterAll(self)
|
// ... existing code ...
import hoomd._hoomd as _hoomd
import numpy as np
// ... modified code ...
class ParticleFilter:
...
def __init__(self, *args, **kwargs):
args_str = ''.join([repr(arg) if not isinstance(arg, np.ndarray)
else repr(list(arg)) for arg in args])
kwargs_str = ''.join([repr(value) if not isinstance(value, np.ndarray)
else repr(list(value))
for value in kwargs.values()])
self.args_str = args_str
// ... rest of the code ...
|
ea8cbcaf41f01a46390882fbc99e6e14d70a49d1
|
src/mmw/apps/user/models.py
|
src/mmw/apps/user/models.py
|
from django.contrib.auth.models import User
from django.db import models
class ItsiUserManager(models.Manager):
def create_itsi_user(self, user, itsi_id):
itsi_user = self.create(user=user, itsi_id=itsi_id)
return itsi_user
class ItsiUser(models.Model):
user = models.OneToOneField(User, primary_key=True)
itsi_id = models.IntegerField()
objects = ItsiUserManager()
def __unicode__(self):
return unicode(self.user.username)
|
from django.conf import settings
from django.contrib.auth.models import User
from django.db import models
from django.db.models.signals import post_save
from django.dispatch import receiver
from rest_framework.authtoken.models import Token
@receiver(post_save, sender=settings.AUTH_USER_MODEL)
def create_auth_token(sender, instance=None, created=False, **kwargs):
"""
Create an auth token for every newly created user.
"""
if created:
Token.objects.create(user=instance)
class ItsiUserManager(models.Manager):
def create_itsi_user(self, user, itsi_id):
itsi_user = self.create(user=user, itsi_id=itsi_id)
return itsi_user
class ItsiUser(models.Model):
user = models.OneToOneField(User, primary_key=True)
itsi_id = models.IntegerField()
objects = ItsiUserManager()
def __unicode__(self):
return unicode(self.user.username)
|
Create an API auth token for every newly created user
|
Create an API auth token for every newly created user
* Add a post_save signal to add a new authtoken for every new user. For use with
the Geoprocessing API
|
Python
|
apache-2.0
|
WikiWatershed/model-my-watershed,WikiWatershed/model-my-watershed,WikiWatershed/model-my-watershed,WikiWatershed/model-my-watershed,WikiWatershed/model-my-watershed
|
+
+ from django.conf import settings
from django.contrib.auth.models import User
from django.db import models
+ from django.db.models.signals import post_save
+ from django.dispatch import receiver
+
+ from rest_framework.authtoken.models import Token
+
+
+ @receiver(post_save, sender=settings.AUTH_USER_MODEL)
+ def create_auth_token(sender, instance=None, created=False, **kwargs):
+ """
+ Create an auth token for every newly created user.
+ """
+ if created:
+ Token.objects.create(user=instance)
class ItsiUserManager(models.Manager):
def create_itsi_user(self, user, itsi_id):
itsi_user = self.create(user=user, itsi_id=itsi_id)
return itsi_user
class ItsiUser(models.Model):
user = models.OneToOneField(User, primary_key=True)
itsi_id = models.IntegerField()
objects = ItsiUserManager()
def __unicode__(self):
return unicode(self.user.username)
|
Create an API auth token for every newly created user
|
## Code Before:
from django.contrib.auth.models import User
from django.db import models
class ItsiUserManager(models.Manager):
def create_itsi_user(self, user, itsi_id):
itsi_user = self.create(user=user, itsi_id=itsi_id)
return itsi_user
class ItsiUser(models.Model):
user = models.OneToOneField(User, primary_key=True)
itsi_id = models.IntegerField()
objects = ItsiUserManager()
def __unicode__(self):
return unicode(self.user.username)
## Instruction:
Create an API auth token for every newly created user
## Code After:
from django.conf import settings
from django.contrib.auth.models import User
from django.db import models
from django.db.models.signals import post_save
from django.dispatch import receiver
from rest_framework.authtoken.models import Token
@receiver(post_save, sender=settings.AUTH_USER_MODEL)
def create_auth_token(sender, instance=None, created=False, **kwargs):
"""
Create an auth token for every newly created user.
"""
if created:
Token.objects.create(user=instance)
class ItsiUserManager(models.Manager):
def create_itsi_user(self, user, itsi_id):
itsi_user = self.create(user=user, itsi_id=itsi_id)
return itsi_user
class ItsiUser(models.Model):
user = models.OneToOneField(User, primary_key=True)
itsi_id = models.IntegerField()
objects = ItsiUserManager()
def __unicode__(self):
return unicode(self.user.username)
|
// ... existing code ...
from django.conf import settings
from django.contrib.auth.models import User
// ... modified code ...
from django.db import models
from django.db.models.signals import post_save
from django.dispatch import receiver
from rest_framework.authtoken.models import Token
@receiver(post_save, sender=settings.AUTH_USER_MODEL)
def create_auth_token(sender, instance=None, created=False, **kwargs):
"""
Create an auth token for every newly created user.
"""
if created:
Token.objects.create(user=instance)
// ... rest of the code ...
|
1f2a30c4316c6da714b7cbda1d6052e6e5040312
|
rasterio/tool.py
|
rasterio/tool.py
|
import code
import collections
import logging
import sys
import numpy
import rasterio
logger = logging.getLogger('rasterio')
Stats = collections.namedtuple('Stats', ['min', 'max', 'mean'])
def main(banner, dataset):
def show(source, cmap='gray'):
"""Show a raster using matplotlib.
The raster may be either an ndarray or a (dataset, bidx)
tuple.
"""
import matplotlib.pyplot as plt
if isinstance(source, tuple):
arr = source[0].read_band(source[1])
else:
arr = source
plt.imshow(arr, cmap=cmap)
plt.show()
def stats(source):
"""Return a tuple with raster min, max, and mean.
"""
if isinstance(source, tuple):
arr = source[0].read_band(source[1])
else:
arr = source
return Stats(numpy.min(arr), numpy.max(arr), numpy.mean(arr))
code.interact(
banner, local=dict(locals(), src=dataset, np=numpy, rio=rasterio))
return 0
|
import code
import collections
import logging
import sys
import matplotlib.pyplot as plt
import numpy
import rasterio
logger = logging.getLogger('rasterio')
Stats = collections.namedtuple('Stats', ['min', 'max', 'mean'])
def main(banner, dataset):
def show(source, cmap='gray'):
"""Show a raster using matplotlib.
The raster may be either an ndarray or a (dataset, bidx)
tuple.
"""
if isinstance(source, tuple):
arr = source[0].read_band(source[1])
else:
arr = source
plt.imshow(arr, cmap=cmap)
plt.show()
def stats(source):
"""Return a tuple with raster min, max, and mean.
"""
if isinstance(source, tuple):
arr = source[0].read_band(source[1])
else:
arr = source
return Stats(numpy.min(arr), numpy.max(arr), numpy.mean(arr))
code.interact(
banner,
local=dict(
locals(), src=dataset, np=numpy, rio=rasterio, plt=plt))
return 0
|
Add plt to rio_insp locals.
|
Add plt to rio_insp locals.
|
Python
|
bsd-3-clause
|
johanvdw/rasterio,perrygeo/rasterio,youngpm/rasterio,youngpm/rasterio,clembou/rasterio,sgillies/rasterio,youngpm/rasterio,kapadia/rasterio,clembou/rasterio,kapadia/rasterio,kapadia/rasterio,brendan-ward/rasterio,brendan-ward/rasterio,perrygeo/rasterio,johanvdw/rasterio,njwilson23/rasterio,snorfalorpagus/rasterio,njwilson23/rasterio,perrygeo/rasterio,brendan-ward/rasterio,njwilson23/rasterio,johanvdw/rasterio,clembou/rasterio
|
import code
import collections
import logging
import sys
+ import matplotlib.pyplot as plt
import numpy
import rasterio
logger = logging.getLogger('rasterio')
Stats = collections.namedtuple('Stats', ['min', 'max', 'mean'])
def main(banner, dataset):
def show(source, cmap='gray'):
"""Show a raster using matplotlib.
The raster may be either an ndarray or a (dataset, bidx)
tuple.
"""
- import matplotlib.pyplot as plt
if isinstance(source, tuple):
arr = source[0].read_band(source[1])
else:
arr = source
plt.imshow(arr, cmap=cmap)
plt.show()
def stats(source):
"""Return a tuple with raster min, max, and mean.
"""
if isinstance(source, tuple):
arr = source[0].read_band(source[1])
else:
arr = source
return Stats(numpy.min(arr), numpy.max(arr), numpy.mean(arr))
code.interact(
+ banner,
+ local=dict(
- banner, local=dict(locals(), src=dataset, np=numpy, rio=rasterio))
+ locals(), src=dataset, np=numpy, rio=rasterio, plt=plt))
return 0
|
Add plt to rio_insp locals.
|
## Code Before:
import code
import collections
import logging
import sys
import numpy
import rasterio
logger = logging.getLogger('rasterio')
Stats = collections.namedtuple('Stats', ['min', 'max', 'mean'])
def main(banner, dataset):
def show(source, cmap='gray'):
"""Show a raster using matplotlib.
The raster may be either an ndarray or a (dataset, bidx)
tuple.
"""
import matplotlib.pyplot as plt
if isinstance(source, tuple):
arr = source[0].read_band(source[1])
else:
arr = source
plt.imshow(arr, cmap=cmap)
plt.show()
def stats(source):
"""Return a tuple with raster min, max, and mean.
"""
if isinstance(source, tuple):
arr = source[0].read_band(source[1])
else:
arr = source
return Stats(numpy.min(arr), numpy.max(arr), numpy.mean(arr))
code.interact(
banner, local=dict(locals(), src=dataset, np=numpy, rio=rasterio))
return 0
## Instruction:
Add plt to rio_insp locals.
## Code After:
import code
import collections
import logging
import sys
import matplotlib.pyplot as plt
import numpy
import rasterio
logger = logging.getLogger('rasterio')
Stats = collections.namedtuple('Stats', ['min', 'max', 'mean'])
def main(banner, dataset):
def show(source, cmap='gray'):
"""Show a raster using matplotlib.
The raster may be either an ndarray or a (dataset, bidx)
tuple.
"""
if isinstance(source, tuple):
arr = source[0].read_band(source[1])
else:
arr = source
plt.imshow(arr, cmap=cmap)
plt.show()
def stats(source):
"""Return a tuple with raster min, max, and mean.
"""
if isinstance(source, tuple):
arr = source[0].read_band(source[1])
else:
arr = source
return Stats(numpy.min(arr), numpy.max(arr), numpy.mean(arr))
code.interact(
banner,
local=dict(
locals(), src=dataset, np=numpy, rio=rasterio, plt=plt))
return 0
|
# ... existing code ...
import matplotlib.pyplot as plt
import numpy
# ... modified code ...
"""
if isinstance(source, tuple):
...
code.interact(
banner,
local=dict(
locals(), src=dataset, np=numpy, rio=rasterio, plt=plt))
# ... rest of the code ...
|
02f5db5fdb46684b60a9b5e9125da228a927c2c3
|
mrbelvedereci/build/cumulusci/config.py
|
mrbelvedereci/build/cumulusci/config.py
|
from cumulusci.core.config import YamlGlobalConfig
from cumulusci.core.config import YamlProjectConfig
class MrbelvedereProjectConfig(YamlProjectConfig):
def __init__(self, global_config_obj, build_flow):
super(MrbelvedereProjectConfig, self).__init__(global_config_obj)
self.build_flow = build_flow
@property
def config_project_local_path(self):
""" mrbelvedere never uses the local path """
return
@property
def repo_root(self):
return self.build_flow.build_dir
@property
def repo_name(self):
return self.build_flow.build.repo.name
@property
def repo_url(self):
return self.build_flow.build.repo.url
@property
def repo_owner(self):
return self.build_flow.build.repo.url.split('/')[-2]
@property
def repo_branch(self):
return self.build_flow.build.branch.name
@property
def repo_commit(self):
return self.build_flow.build.commit
class MrbelvedereGlobalConfig(YamlGlobalConfig):
project_config_class = MrbelvedereProjectConfig
def get_project_config(self, build_flow):
return self.project_config_class(self, build_flow)
|
from cumulusci.core.config import YamlGlobalConfig
from cumulusci.core.config import YamlProjectConfig
class MrbelvedereProjectConfig(YamlProjectConfig):
def __init__(self, global_config_obj, build_flow):
self.build_flow = build_flow
super(MrbelvedereProjectConfig, self).__init__(global_config_obj)
@property
def config_project_local_path(self):
""" mrbelvedere never uses the local path """
return
@property
def repo_root(self):
return self.build_flow.build_dir
@property
def repo_name(self):
return self.build_flow.build.repo.name
@property
def repo_url(self):
return self.build_flow.build.repo.url
@property
def repo_owner(self):
return self.build_flow.build.repo.url.split('/')[-2]
@property
def repo_branch(self):
return self.build_flow.build.branch.name
@property
def repo_commit(self):
return self.build_flow.build.commit
class MrbelvedereGlobalConfig(YamlGlobalConfig):
project_config_class = MrbelvedereProjectConfig
def get_project_config(self, build_flow):
return self.project_config_class(self, build_flow)
|
Set self.build_flow before calling the super __init__ method
|
Set self.build_flow before calling the super __init__ method
|
Python
|
bsd-3-clause
|
SalesforceFoundation/mrbelvedereci,SalesforceFoundation/mrbelvedereci,SalesforceFoundation/mrbelvedereci,SalesforceFoundation/mrbelvedereci
|
from cumulusci.core.config import YamlGlobalConfig
from cumulusci.core.config import YamlProjectConfig
class MrbelvedereProjectConfig(YamlProjectConfig):
def __init__(self, global_config_obj, build_flow):
+ self.build_flow = build_flow
super(MrbelvedereProjectConfig, self).__init__(global_config_obj)
- self.build_flow = build_flow
@property
def config_project_local_path(self):
""" mrbelvedere never uses the local path """
return
@property
def repo_root(self):
return self.build_flow.build_dir
@property
def repo_name(self):
return self.build_flow.build.repo.name
@property
def repo_url(self):
return self.build_flow.build.repo.url
@property
def repo_owner(self):
return self.build_flow.build.repo.url.split('/')[-2]
@property
def repo_branch(self):
return self.build_flow.build.branch.name
@property
def repo_commit(self):
return self.build_flow.build.commit
class MrbelvedereGlobalConfig(YamlGlobalConfig):
project_config_class = MrbelvedereProjectConfig
def get_project_config(self, build_flow):
return self.project_config_class(self, build_flow)
|
Set self.build_flow before calling the super __init__ method
|
## Code Before:
from cumulusci.core.config import YamlGlobalConfig
from cumulusci.core.config import YamlProjectConfig
class MrbelvedereProjectConfig(YamlProjectConfig):
def __init__(self, global_config_obj, build_flow):
super(MrbelvedereProjectConfig, self).__init__(global_config_obj)
self.build_flow = build_flow
@property
def config_project_local_path(self):
""" mrbelvedere never uses the local path """
return
@property
def repo_root(self):
return self.build_flow.build_dir
@property
def repo_name(self):
return self.build_flow.build.repo.name
@property
def repo_url(self):
return self.build_flow.build.repo.url
@property
def repo_owner(self):
return self.build_flow.build.repo.url.split('/')[-2]
@property
def repo_branch(self):
return self.build_flow.build.branch.name
@property
def repo_commit(self):
return self.build_flow.build.commit
class MrbelvedereGlobalConfig(YamlGlobalConfig):
project_config_class = MrbelvedereProjectConfig
def get_project_config(self, build_flow):
return self.project_config_class(self, build_flow)
## Instruction:
Set self.build_flow before calling the super __init__ method
## Code After:
from cumulusci.core.config import YamlGlobalConfig
from cumulusci.core.config import YamlProjectConfig
class MrbelvedereProjectConfig(YamlProjectConfig):
def __init__(self, global_config_obj, build_flow):
self.build_flow = build_flow
super(MrbelvedereProjectConfig, self).__init__(global_config_obj)
@property
def config_project_local_path(self):
""" mrbelvedere never uses the local path """
return
@property
def repo_root(self):
return self.build_flow.build_dir
@property
def repo_name(self):
return self.build_flow.build.repo.name
@property
def repo_url(self):
return self.build_flow.build.repo.url
@property
def repo_owner(self):
return self.build_flow.build.repo.url.split('/')[-2]
@property
def repo_branch(self):
return self.build_flow.build.branch.name
@property
def repo_commit(self):
return self.build_flow.build.commit
class MrbelvedereGlobalConfig(YamlGlobalConfig):
project_config_class = MrbelvedereProjectConfig
def get_project_config(self, build_flow):
return self.project_config_class(self, build_flow)
|
# ... existing code ...
def __init__(self, global_config_obj, build_flow):
self.build_flow = build_flow
super(MrbelvedereProjectConfig, self).__init__(global_config_obj)
# ... rest of the code ...
|
e6c43333c3939247534ddee4c419dcdcff5eda5f
|
spyder_terminal/server/rest/term_rest.py
|
spyder_terminal/server/rest/term_rest.py
|
"""Main HTTP routes request handlers."""
import tornado.web
import tornado.escape
from os import getcwd
class MainHandler(tornado.web.RequestHandler):
"""Handles creation of new terminals."""
@tornado.gen.coroutine
def post(self):
"""POST verb: Create a new terminal."""
rows = int(self.get_argument('rows', default=23))
cols = int(self.get_argument('cols', default=73))
cwd = self.get_cookie('cwd', default=getcwd())
self.application.logger.info('CWD: {0}'.format(cwd))
self.application.logger.info('Size: ({0}, {1})'.format(cols, rows))
pid = yield self.application.term_manager.create_term(rows, cols, cwd)
self.write(pid)
class ResizeHandler(tornado.web.RequestHandler):
"""Handles resizing of terminals."""
@tornado.gen.coroutine
def post(self, pid):
"""POST verb: Resize a terminal."""
rows = int(self.get_argument('rows', None, 23))
cols = int(self.get_argument('cols', None, 73))
self.application.term_manager.resize_term(pid, rows, cols)
|
"""Main HTTP routes request handlers."""
import tornado.web
import tornado.escape
from os import getcwd
class MainHandler(tornado.web.RequestHandler):
"""Handles creation of new terminals."""
@tornado.gen.coroutine
def post(self):
"""POST verb: Create a new terminal."""
rows = int(self.get_argument('rows', default=23))
cols = int(self.get_argument('cols', default=73))
cwd = self.get_cookie('cwd', default=getcwd())
self.application.logger.info('CWD: {0}'.format(cwd))
self.application.logger.info('Size: ({0}, {1})'.format(cols, rows))
pid = yield self.application.term_manager.create_term(rows, cols, cwd)
self.write(pid)
class ResizeHandler(tornado.web.RequestHandler):
"""Handles resizing of terminals."""
@tornado.gen.coroutine
def post(self, pid):
"""POST verb: Resize a terminal."""
rows = int(self.get_argument('rows', default=23))
cols = int(self.get_argument('cols', default=73))
self.application.term_manager.resize_term(pid, rows, cols)
|
Change default terminal resize arguments
|
Change default terminal resize arguments
|
Python
|
mit
|
andfoy/spyder-terminal,spyder-ide/spyder-terminal,spyder-ide/spyder-terminal,andfoy/spyder-terminal,andfoy/spyder-terminal,spyder-ide/spyder-terminal,spyder-ide/spyder-terminal
|
"""Main HTTP routes request handlers."""
import tornado.web
import tornado.escape
from os import getcwd
class MainHandler(tornado.web.RequestHandler):
"""Handles creation of new terminals."""
@tornado.gen.coroutine
def post(self):
"""POST verb: Create a new terminal."""
rows = int(self.get_argument('rows', default=23))
cols = int(self.get_argument('cols', default=73))
cwd = self.get_cookie('cwd', default=getcwd())
self.application.logger.info('CWD: {0}'.format(cwd))
self.application.logger.info('Size: ({0}, {1})'.format(cols, rows))
pid = yield self.application.term_manager.create_term(rows, cols, cwd)
self.write(pid)
class ResizeHandler(tornado.web.RequestHandler):
"""Handles resizing of terminals."""
@tornado.gen.coroutine
def post(self, pid):
"""POST verb: Resize a terminal."""
- rows = int(self.get_argument('rows', None, 23))
+ rows = int(self.get_argument('rows', default=23))
- cols = int(self.get_argument('cols', None, 73))
+ cols = int(self.get_argument('cols', default=73))
self.application.term_manager.resize_term(pid, rows, cols)
|
Change default terminal resize arguments
|
## Code Before:
"""Main HTTP routes request handlers."""
import tornado.web
import tornado.escape
from os import getcwd
class MainHandler(tornado.web.RequestHandler):
"""Handles creation of new terminals."""
@tornado.gen.coroutine
def post(self):
"""POST verb: Create a new terminal."""
rows = int(self.get_argument('rows', default=23))
cols = int(self.get_argument('cols', default=73))
cwd = self.get_cookie('cwd', default=getcwd())
self.application.logger.info('CWD: {0}'.format(cwd))
self.application.logger.info('Size: ({0}, {1})'.format(cols, rows))
pid = yield self.application.term_manager.create_term(rows, cols, cwd)
self.write(pid)
class ResizeHandler(tornado.web.RequestHandler):
"""Handles resizing of terminals."""
@tornado.gen.coroutine
def post(self, pid):
"""POST verb: Resize a terminal."""
rows = int(self.get_argument('rows', None, 23))
cols = int(self.get_argument('cols', None, 73))
self.application.term_manager.resize_term(pid, rows, cols)
## Instruction:
Change default terminal resize arguments
## Code After:
"""Main HTTP routes request handlers."""
import tornado.web
import tornado.escape
from os import getcwd
class MainHandler(tornado.web.RequestHandler):
"""Handles creation of new terminals."""
@tornado.gen.coroutine
def post(self):
"""POST verb: Create a new terminal."""
rows = int(self.get_argument('rows', default=23))
cols = int(self.get_argument('cols', default=73))
cwd = self.get_cookie('cwd', default=getcwd())
self.application.logger.info('CWD: {0}'.format(cwd))
self.application.logger.info('Size: ({0}, {1})'.format(cols, rows))
pid = yield self.application.term_manager.create_term(rows, cols, cwd)
self.write(pid)
class ResizeHandler(tornado.web.RequestHandler):
"""Handles resizing of terminals."""
@tornado.gen.coroutine
def post(self, pid):
"""POST verb: Resize a terminal."""
rows = int(self.get_argument('rows', default=23))
cols = int(self.get_argument('cols', default=73))
self.application.term_manager.resize_term(pid, rows, cols)
|
// ... existing code ...
"""POST verb: Resize a terminal."""
rows = int(self.get_argument('rows', default=23))
cols = int(self.get_argument('cols', default=73))
self.application.term_manager.resize_term(pid, rows, cols)
// ... rest of the code ...
|
42bfa6b69697c0c093a961df5708f477288a6efa
|
icekit/plugins/twitter_embed/forms.py
|
icekit/plugins/twitter_embed/forms.py
|
import re
from django import forms
from fluent_contents.forms import ContentItemForm
class TwitterEmbedAdminForm(ContentItemForm):
def clean_twitter_url(self):
"""
Make sure the URL provided matches the twitter URL format.
"""
url = self.cleaned_data['twitter_url']
if url:
pattern = re.compile(r'https?://(www\.)?twitter.com/\S+/status(es)?/\S+')
if not pattern.match(url):
raise forms.ValidationError('Please provide a valid twitter link.')
return url
|
import re
from django import forms
from fluent_contents.forms import ContentItemForm
from icekit.plugins.twitter_embed.models import TwitterEmbedItem
class TwitterEmbedAdminForm(ContentItemForm):
class Meta:
model = TwitterEmbedItem
fields = '__all__'
def clean_twitter_url(self):
"""
Make sure the URL provided matches the twitter URL format.
"""
url = self.cleaned_data['twitter_url']
if url:
pattern = re.compile(r'https?://(www\.)?twitter.com/\S+/status(es)?/\S+')
if not pattern.match(url):
raise forms.ValidationError('Please provide a valid twitter link.')
return url
|
Add model and firld information to form.
|
Add model and firld information to form.
|
Python
|
mit
|
ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit
|
import re
from django import forms
from fluent_contents.forms import ContentItemForm
+ from icekit.plugins.twitter_embed.models import TwitterEmbedItem
class TwitterEmbedAdminForm(ContentItemForm):
+ class Meta:
+ model = TwitterEmbedItem
+ fields = '__all__'
+
def clean_twitter_url(self):
"""
Make sure the URL provided matches the twitter URL format.
"""
url = self.cleaned_data['twitter_url']
if url:
pattern = re.compile(r'https?://(www\.)?twitter.com/\S+/status(es)?/\S+')
if not pattern.match(url):
raise forms.ValidationError('Please provide a valid twitter link.')
return url
-
|
Add model and firld information to form.
|
## Code Before:
import re
from django import forms
from fluent_contents.forms import ContentItemForm
class TwitterEmbedAdminForm(ContentItemForm):
def clean_twitter_url(self):
"""
Make sure the URL provided matches the twitter URL format.
"""
url = self.cleaned_data['twitter_url']
if url:
pattern = re.compile(r'https?://(www\.)?twitter.com/\S+/status(es)?/\S+')
if not pattern.match(url):
raise forms.ValidationError('Please provide a valid twitter link.')
return url
## Instruction:
Add model and firld information to form.
## Code After:
import re
from django import forms
from fluent_contents.forms import ContentItemForm
from icekit.plugins.twitter_embed.models import TwitterEmbedItem
class TwitterEmbedAdminForm(ContentItemForm):
class Meta:
model = TwitterEmbedItem
fields = '__all__'
def clean_twitter_url(self):
"""
Make sure the URL provided matches the twitter URL format.
"""
url = self.cleaned_data['twitter_url']
if url:
pattern = re.compile(r'https?://(www\.)?twitter.com/\S+/status(es)?/\S+')
if not pattern.match(url):
raise forms.ValidationError('Please provide a valid twitter link.')
return url
|
...
from fluent_contents.forms import ContentItemForm
from icekit.plugins.twitter_embed.models import TwitterEmbedItem
...
class TwitterEmbedAdminForm(ContentItemForm):
class Meta:
model = TwitterEmbedItem
fields = '__all__'
def clean_twitter_url(self):
...
return url
...
|
d4cfe4c9d5ff680a85c25c144b077d928386811c
|
onetime/backends.py
|
onetime/backends.py
|
from django.contrib.auth.models import User
from onetime.models import Key
class OneTimeBackend:
def authenticate(self, key):
data = Key.objects.get(key=key)
if data is None:
None
if not data.is_valid():
return None
return data.user
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
|
from django.contrib.auth.models import User
from onetime.models import Key
class OneTimeBackend:
def authenticate(self, key):
data = Key.objects.filter(key=key)
if len(data) == 0:
return None
data = data[0]
if not data.is_valid():
return None
return data.user
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
|
Use filter() instead of get() since the key might be invalid
|
Use filter() instead of get() since the key might be invalid
|
Python
|
agpl-3.0
|
ISIFoundation/influenzanet-website,ISIFoundation/influenzanet-website,ISIFoundation/influenzanet-website,vanschelven/cmsplugin-journal,ISIFoundation/influenzanet-website,ISIFoundation/influenzanet-website,uploadcare/django-loginurl,ISIFoundation/influenzanet-website,fajran/django-loginurl,ISIFoundation/influenzanet-website
|
from django.contrib.auth.models import User
from onetime.models import Key
class OneTimeBackend:
def authenticate(self, key):
- data = Key.objects.get(key=key)
+ data = Key.objects.filter(key=key)
- if data is None:
+ if len(data) == 0:
- None
+ return None
+ data = data[0]
if not data.is_valid():
return None
return data.user
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
|
Use filter() instead of get() since the key might be invalid
|
## Code Before:
from django.contrib.auth.models import User
from onetime.models import Key
class OneTimeBackend:
def authenticate(self, key):
data = Key.objects.get(key=key)
if data is None:
None
if not data.is_valid():
return None
return data.user
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
## Instruction:
Use filter() instead of get() since the key might be invalid
## Code After:
from django.contrib.auth.models import User
from onetime.models import Key
class OneTimeBackend:
def authenticate(self, key):
data = Key.objects.filter(key=key)
if len(data) == 0:
return None
data = data[0]
if not data.is_valid():
return None
return data.user
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
|
...
def authenticate(self, key):
data = Key.objects.filter(key=key)
if len(data) == 0:
return None
data = data[0]
if not data.is_valid():
...
|
10360cc4d956faac194c58eb3b52fae2b348b356
|
links/functions.py
|
links/functions.py
|
from uuid import uuid4
from random import randint
from basic_http import BasicHttp
def random_pk(length=8):
s = uuid4().hex
f = randint(0, (len(s) - length))
t = f + length
return s[f:t]
def get_url_info(url):
print url
req = BasicHttp(url)
print req
res = req.HEAD()
data = {
'file_name': url.split('/')[-1],
'content_type': res['headers']['Content-Type'],
'file_size': res['headers']['Content-Length']
}
return data
|
from uuid import uuid4
from random import randint
from basic_http import BasicHttp
def random_pk(length=8):
s = uuid4().hex
f = randint(0, (len(s) - length))
t = f + length
return s[f:t]
def get_url_info(url):
req = BasicHttp(url)
res = req.HEAD()
data = {
'file_name': url.split('/')[-1],
'content_type': res['header']['Content-Type'],
'file_size': res['header']['Content-Length']
}
return data
|
Remove of debug prints. Fix of invalid key name.
|
Remove of debug prints.
Fix of invalid key name.
|
Python
|
bsd-3-clause
|
nachopro/followlink,nachopro/followlink
|
from uuid import uuid4
from random import randint
from basic_http import BasicHttp
def random_pk(length=8):
s = uuid4().hex
f = randint(0, (len(s) - length))
t = f + length
return s[f:t]
def get_url_info(url):
- print url
req = BasicHttp(url)
- print req
res = req.HEAD()
data = {
'file_name': url.split('/')[-1],
- 'content_type': res['headers']['Content-Type'],
+ 'content_type': res['header']['Content-Type'],
- 'file_size': res['headers']['Content-Length']
+ 'file_size': res['header']['Content-Length']
}
return data
|
Remove of debug prints. Fix of invalid key name.
|
## Code Before:
from uuid import uuid4
from random import randint
from basic_http import BasicHttp
def random_pk(length=8):
s = uuid4().hex
f = randint(0, (len(s) - length))
t = f + length
return s[f:t]
def get_url_info(url):
print url
req = BasicHttp(url)
print req
res = req.HEAD()
data = {
'file_name': url.split('/')[-1],
'content_type': res['headers']['Content-Type'],
'file_size': res['headers']['Content-Length']
}
return data
## Instruction:
Remove of debug prints. Fix of invalid key name.
## Code After:
from uuid import uuid4
from random import randint
from basic_http import BasicHttp
def random_pk(length=8):
s = uuid4().hex
f = randint(0, (len(s) - length))
t = f + length
return s[f:t]
def get_url_info(url):
req = BasicHttp(url)
res = req.HEAD()
data = {
'file_name': url.split('/')[-1],
'content_type': res['header']['Content-Type'],
'file_size': res['header']['Content-Length']
}
return data
|
# ... existing code ...
def get_url_info(url):
req = BasicHttp(url)
res = req.HEAD()
# ... modified code ...
'file_name': url.split('/')[-1],
'content_type': res['header']['Content-Type'],
'file_size': res['header']['Content-Length']
}
# ... rest of the code ...
|
7947d474da8bb086493890d81a6788d76e00b108
|
numba/cuda/tests/__init__.py
|
numba/cuda/tests/__init__.py
|
from numba.testing import SerialSuite
from numba.testing import load_testsuite
from numba import cuda
from os.path import dirname, join
def load_tests(loader, tests, pattern):
suite = SerialSuite()
this_dir = dirname(__file__)
suite.addTests(load_testsuite(loader, join(this_dir, 'nocuda')))
suite.addTests(load_testsuite(loader, join(this_dir, 'cudasim')))
if cuda.is_available():
gpus = cuda.list_devices()
if gpus and gpus[0].compute_capability >= (2, 0):
suite.addTests(load_testsuite(loader, join(this_dir, 'cudadrv')))
suite.addTests(load_testsuite(loader, join(this_dir, 'cudapy')))
else:
print("skipped CUDA tests because GPU CC < 2.0")
else:
print("skipped CUDA tests")
return suite
|
from numba.testing import SerialSuite
from numba.testing import load_testsuite
from numba import cuda
from os.path import dirname, join
def load_tests(loader, tests, pattern):
suite = SerialSuite()
this_dir = dirname(__file__)
suite.addTests(load_testsuite(loader, join(this_dir, 'nocuda')))
if cuda.is_available():
suite.addTests(load_testsuite(loader, join(this_dir, 'cudasim')))
gpus = cuda.list_devices()
if gpus and gpus[0].compute_capability >= (2, 0):
suite.addTests(load_testsuite(loader, join(this_dir, 'cudadrv')))
suite.addTests(load_testsuite(loader, join(this_dir, 'cudapy')))
else:
print("skipped CUDA tests because GPU CC < 2.0")
else:
print("skipped CUDA tests")
return suite
|
Fix tests on machine without CUDA
|
Fix tests on machine without CUDA
|
Python
|
bsd-2-clause
|
sklam/numba,numba/numba,seibert/numba,IntelLabs/numba,jriehl/numba,stonebig/numba,gmarkall/numba,cpcloud/numba,IntelLabs/numba,gmarkall/numba,jriehl/numba,cpcloud/numba,sklam/numba,cpcloud/numba,numba/numba,stonebig/numba,stefanseefeld/numba,sklam/numba,cpcloud/numba,seibert/numba,sklam/numba,gmarkall/numba,stefanseefeld/numba,jriehl/numba,numba/numba,cpcloud/numba,stefanseefeld/numba,IntelLabs/numba,numba/numba,IntelLabs/numba,stuartarchibald/numba,jriehl/numba,sklam/numba,IntelLabs/numba,numba/numba,stonebig/numba,stuartarchibald/numba,stonebig/numba,jriehl/numba,gmarkall/numba,stefanseefeld/numba,stuartarchibald/numba,stuartarchibald/numba,stonebig/numba,stefanseefeld/numba,seibert/numba,seibert/numba,gmarkall/numba,stuartarchibald/numba,seibert/numba
|
from numba.testing import SerialSuite
from numba.testing import load_testsuite
from numba import cuda
from os.path import dirname, join
def load_tests(loader, tests, pattern):
suite = SerialSuite()
this_dir = dirname(__file__)
suite.addTests(load_testsuite(loader, join(this_dir, 'nocuda')))
- suite.addTests(load_testsuite(loader, join(this_dir, 'cudasim')))
if cuda.is_available():
+ suite.addTests(load_testsuite(loader, join(this_dir, 'cudasim')))
gpus = cuda.list_devices()
if gpus and gpus[0].compute_capability >= (2, 0):
suite.addTests(load_testsuite(loader, join(this_dir, 'cudadrv')))
suite.addTests(load_testsuite(loader, join(this_dir, 'cudapy')))
else:
print("skipped CUDA tests because GPU CC < 2.0")
else:
print("skipped CUDA tests")
return suite
|
Fix tests on machine without CUDA
|
## Code Before:
from numba.testing import SerialSuite
from numba.testing import load_testsuite
from numba import cuda
from os.path import dirname, join
def load_tests(loader, tests, pattern):
suite = SerialSuite()
this_dir = dirname(__file__)
suite.addTests(load_testsuite(loader, join(this_dir, 'nocuda')))
suite.addTests(load_testsuite(loader, join(this_dir, 'cudasim')))
if cuda.is_available():
gpus = cuda.list_devices()
if gpus and gpus[0].compute_capability >= (2, 0):
suite.addTests(load_testsuite(loader, join(this_dir, 'cudadrv')))
suite.addTests(load_testsuite(loader, join(this_dir, 'cudapy')))
else:
print("skipped CUDA tests because GPU CC < 2.0")
else:
print("skipped CUDA tests")
return suite
## Instruction:
Fix tests on machine without CUDA
## Code After:
from numba.testing import SerialSuite
from numba.testing import load_testsuite
from numba import cuda
from os.path import dirname, join
def load_tests(loader, tests, pattern):
suite = SerialSuite()
this_dir = dirname(__file__)
suite.addTests(load_testsuite(loader, join(this_dir, 'nocuda')))
if cuda.is_available():
suite.addTests(load_testsuite(loader, join(this_dir, 'cudasim')))
gpus = cuda.list_devices()
if gpus and gpus[0].compute_capability >= (2, 0):
suite.addTests(load_testsuite(loader, join(this_dir, 'cudadrv')))
suite.addTests(load_testsuite(loader, join(this_dir, 'cudapy')))
else:
print("skipped CUDA tests because GPU CC < 2.0")
else:
print("skipped CUDA tests")
return suite
|
# ... existing code ...
suite.addTests(load_testsuite(loader, join(this_dir, 'nocuda')))
if cuda.is_available():
suite.addTests(load_testsuite(loader, join(this_dir, 'cudasim')))
gpus = cuda.list_devices()
# ... rest of the code ...
|
dd50adfa567f7be04b3c000508f3f649147be387
|
scripts/update_vv.py
|
scripts/update_vv.py
|
import mica.vv
mica.vv.update()
|
import matplotlib
matplotlib.use('Agg')
import mica.vv
mica.vv.update()
|
Set vv cron script to use Agg backend
|
Set vv cron script to use Agg backend
|
Python
|
bsd-3-clause
|
sot/mica,sot/mica
|
+ import matplotlib
+ matplotlib.use('Agg')
import mica.vv
mica.vv.update()
|
Set vv cron script to use Agg backend
|
## Code Before:
import mica.vv
mica.vv.update()
## Instruction:
Set vv cron script to use Agg backend
## Code After:
import matplotlib
matplotlib.use('Agg')
import mica.vv
mica.vv.update()
|
# ... existing code ...
import matplotlib
matplotlib.use('Agg')
import mica.vv
# ... rest of the code ...
|
ccc98ced56ee8dda02332720c7146e1548a3b53c
|
project/project/urls.py
|
project/project/urls.py
|
from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^admin_tools/', include('admin_tools.urls')),
url(r'^accounts/logout/$', 'allauth.account.views.logout', name='account_logout'),
url('^accounts/social/', include('allauth.socialaccount.urls')),
url('^accounts/', include('allauth.socialaccount.providers.google.urls')),
url(r'^', include("project.teams.urls")),
url(r'^', include("project.profiles.urls")),
]
|
from django.conf.urls import include, url
from django.conf import settings
from django.contrib import admin
from django.views.generic.base import RedirectView
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^admin_tools/', include('admin_tools.urls')),
url(r'^accounts/login/$', RedirectView.as_view(url=settings.LOGIN_URL),
name='account_login'),
url(r'^accounts/logout/$', 'allauth.account.views.logout', name='account_logout'),
url(r'^accounts/social/', include('allauth.socialaccount.urls')),
url(r'^accounts/', include('allauth.socialaccount.providers.google.urls')),
url(r'^', include("project.teams.urls")),
url(r'^', include("project.profiles.urls")),
]
|
Set up redirect to login view
|
Set up redirect to login view
|
Python
|
mit
|
jonsimington/app,compsci-hfh/app,compsci-hfh/app,jonsimington/app
|
from django.conf.urls import include, url
+ from django.conf import settings
from django.contrib import admin
+ from django.views.generic.base import RedirectView
+
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^admin_tools/', include('admin_tools.urls')),
+ url(r'^accounts/login/$', RedirectView.as_view(url=settings.LOGIN_URL),
+ name='account_login'),
url(r'^accounts/logout/$', 'allauth.account.views.logout', name='account_logout'),
- url('^accounts/social/', include('allauth.socialaccount.urls')),
+ url(r'^accounts/social/', include('allauth.socialaccount.urls')),
- url('^accounts/', include('allauth.socialaccount.providers.google.urls')),
+ url(r'^accounts/', include('allauth.socialaccount.providers.google.urls')),
url(r'^', include("project.teams.urls")),
url(r'^', include("project.profiles.urls")),
]
|
Set up redirect to login view
|
## Code Before:
from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^admin_tools/', include('admin_tools.urls')),
url(r'^accounts/logout/$', 'allauth.account.views.logout', name='account_logout'),
url('^accounts/social/', include('allauth.socialaccount.urls')),
url('^accounts/', include('allauth.socialaccount.providers.google.urls')),
url(r'^', include("project.teams.urls")),
url(r'^', include("project.profiles.urls")),
]
## Instruction:
Set up redirect to login view
## Code After:
from django.conf.urls import include, url
from django.conf import settings
from django.contrib import admin
from django.views.generic.base import RedirectView
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^admin_tools/', include('admin_tools.urls')),
url(r'^accounts/login/$', RedirectView.as_view(url=settings.LOGIN_URL),
name='account_login'),
url(r'^accounts/logout/$', 'allauth.account.views.logout', name='account_logout'),
url(r'^accounts/social/', include('allauth.socialaccount.urls')),
url(r'^accounts/', include('allauth.socialaccount.providers.google.urls')),
url(r'^', include("project.teams.urls")),
url(r'^', include("project.profiles.urls")),
]
|
...
from django.conf.urls import include, url
from django.conf import settings
from django.contrib import admin
from django.views.generic.base import RedirectView
...
url(r'^accounts/login/$', RedirectView.as_view(url=settings.LOGIN_URL),
name='account_login'),
url(r'^accounts/logout/$', 'allauth.account.views.logout', name='account_logout'),
url(r'^accounts/social/', include('allauth.socialaccount.urls')),
url(r'^accounts/', include('allauth.socialaccount.providers.google.urls')),
...
|
90cd7a194ce1294d6b14b819b10ca62e3d058cb9
|
auslib/test/web/test_dockerflow.py
|
auslib/test/web/test_dockerflow.py
|
import mock
from auslib.test.web.test_client import ClientTestBase
class TestDockerflowEndpoints(ClientTestBase):
def testVersion(self):
ret = self.client.get("/__version__")
self.assertEquals(ret.data, """
{
"source":"https://github.com/mozilla/balrog",
"version":"1.0",
"commit":"abcdef123456"
}
""")
def testHeartbeat(self):
with mock.patch("auslib.global_state.dbo.rules.countRules") as cr:
ret = self.client.get("/__heartbeat__")
self.assertEqual(ret.status_code, 200)
self.assertEqual(cr.call_count, 1)
def testLbHeartbeat(self):
ret = self.client.get("/__lbheartbeat__")
self.assertEqual(ret.status_code, 200)
|
import mock
from auslib.test.web.test_client import ClientTestBase
class TestDockerflowEndpoints(ClientTestBase):
def testVersion(self):
ret = self.client.get("/__version__")
self.assertEquals(ret.data, """
{
"source":"https://github.com/mozilla/balrog",
"version":"1.0",
"commit":"abcdef123456"
}
""")
def testHeartbeat(self):
with mock.patch("auslib.global_state.dbo.rules.countRules") as cr:
ret = self.client.get("/__heartbeat__")
self.assertEqual(ret.status_code, 200)
self.assertEqual(cr.call_count, 1)
def testHeartbeatWithException(self):
with mock.patch("auslib.global_state.dbo.rules.countRules") as cr:
cr.side_effect = Exception("kabom!")
# Because there's no web server between us and the endpoint, we recieve
# the Exception directly instead of a 500 error
self.assertRaises(Exception, self.client.get, "/__heartbeat__")
self.assertEqual(cr.call_count, 1)
def testLbHeartbeat(self):
ret = self.client.get("/__lbheartbeat__")
self.assertEqual(ret.status_code, 200)
|
Add test to make sure public facing app raises exception when it hits an error.
|
Add test to make sure public facing app raises exception when it hits an error.
|
Python
|
mpl-2.0
|
aksareen/balrog,nurav/balrog,nurav/balrog,mozbhearsum/balrog,tieu/balrog,mozbhearsum/balrog,aksareen/balrog,testbhearsum/balrog,testbhearsum/balrog,nurav/balrog,aksareen/balrog,tieu/balrog,nurav/balrog,tieu/balrog,mozbhearsum/balrog,tieu/balrog,testbhearsum/balrog,mozbhearsum/balrog,aksareen/balrog,testbhearsum/balrog
|
import mock
from auslib.test.web.test_client import ClientTestBase
class TestDockerflowEndpoints(ClientTestBase):
def testVersion(self):
ret = self.client.get("/__version__")
self.assertEquals(ret.data, """
{
"source":"https://github.com/mozilla/balrog",
"version":"1.0",
"commit":"abcdef123456"
}
""")
def testHeartbeat(self):
with mock.patch("auslib.global_state.dbo.rules.countRules") as cr:
ret = self.client.get("/__heartbeat__")
self.assertEqual(ret.status_code, 200)
self.assertEqual(cr.call_count, 1)
+ def testHeartbeatWithException(self):
+ with mock.patch("auslib.global_state.dbo.rules.countRules") as cr:
+ cr.side_effect = Exception("kabom!")
+ # Because there's no web server between us and the endpoint, we recieve
+ # the Exception directly instead of a 500 error
+ self.assertRaises(Exception, self.client.get, "/__heartbeat__")
+ self.assertEqual(cr.call_count, 1)
+
def testLbHeartbeat(self):
ret = self.client.get("/__lbheartbeat__")
self.assertEqual(ret.status_code, 200)
|
Add test to make sure public facing app raises exception when it hits an error.
|
## Code Before:
import mock
from auslib.test.web.test_client import ClientTestBase
class TestDockerflowEndpoints(ClientTestBase):
def testVersion(self):
ret = self.client.get("/__version__")
self.assertEquals(ret.data, """
{
"source":"https://github.com/mozilla/balrog",
"version":"1.0",
"commit":"abcdef123456"
}
""")
def testHeartbeat(self):
with mock.patch("auslib.global_state.dbo.rules.countRules") as cr:
ret = self.client.get("/__heartbeat__")
self.assertEqual(ret.status_code, 200)
self.assertEqual(cr.call_count, 1)
def testLbHeartbeat(self):
ret = self.client.get("/__lbheartbeat__")
self.assertEqual(ret.status_code, 200)
## Instruction:
Add test to make sure public facing app raises exception when it hits an error.
## Code After:
import mock
from auslib.test.web.test_client import ClientTestBase
class TestDockerflowEndpoints(ClientTestBase):
def testVersion(self):
ret = self.client.get("/__version__")
self.assertEquals(ret.data, """
{
"source":"https://github.com/mozilla/balrog",
"version":"1.0",
"commit":"abcdef123456"
}
""")
def testHeartbeat(self):
with mock.patch("auslib.global_state.dbo.rules.countRules") as cr:
ret = self.client.get("/__heartbeat__")
self.assertEqual(ret.status_code, 200)
self.assertEqual(cr.call_count, 1)
def testHeartbeatWithException(self):
with mock.patch("auslib.global_state.dbo.rules.countRules") as cr:
cr.side_effect = Exception("kabom!")
# Because there's no web server between us and the endpoint, we recieve
# the Exception directly instead of a 500 error
self.assertRaises(Exception, self.client.get, "/__heartbeat__")
self.assertEqual(cr.call_count, 1)
def testLbHeartbeat(self):
ret = self.client.get("/__lbheartbeat__")
self.assertEqual(ret.status_code, 200)
|
// ... existing code ...
def testHeartbeatWithException(self):
with mock.patch("auslib.global_state.dbo.rules.countRules") as cr:
cr.side_effect = Exception("kabom!")
# Because there's no web server between us and the endpoint, we recieve
# the Exception directly instead of a 500 error
self.assertRaises(Exception, self.client.get, "/__heartbeat__")
self.assertEqual(cr.call_count, 1)
def testLbHeartbeat(self):
// ... rest of the code ...
|
739018911befdb6804f26bc1a99dba6faa1313b7
|
mezzanine/core/auth_backends.py
|
mezzanine/core/auth_backends.py
|
from django.contrib.auth.backends import ModelBackend
from django.contrib.auth.models import User
from django.contrib.auth.tokens import default_token_generator
from django.db.models import Q
from django.utils.http import base36_to_int
class MezzanineBackend(ModelBackend):
"""
Extends Django's ``ModelBackend`` to allow login via username,
email, or verification token.
Args are either ``username`` and ``password``, or ``uidb36``
and ``token``. In either case, ``is_active`` can also be given.
For login, is_active is not given, so that the login form can
raise a specific error for inactive users.
For password reset, True is given for is_active.
For signup verficiation, False is given for is_active.
"""
def authenticate(self, **kwargs):
if kwargs:
username = kwargs.pop("username", None)
if username:
username_or_email = Q(username=username) | Q(email=username)
password = kwargs.pop("password")
try:
user = User.objects.get(username_or_email, **kwargs)
except User.DoesNotExist:
pass
else:
if user.check_password(password):
return user
else:
kwargs["id"] = base36_to_int(kwargs.pop("uidb36"))
token = kwargs.pop("token")
try:
user = User.objects.get(**kwargs)
except User.DoesNotExist:
pass
else:
if default_token_generator.check_token(user, token):
return user
|
from django.contrib.auth.backends import ModelBackend
from django.contrib.auth.models import User
from django.contrib.auth.tokens import default_token_generator
from django.db.models import Q
from django.utils.http import base36_to_int
class MezzanineBackend(ModelBackend):
"""
Extends Django's ``ModelBackend`` to allow login via username,
email, or verification token.
Args are either ``username`` and ``password``, or ``uidb36``
and ``token``. In either case, ``is_active`` can also be given.
For login, is_active is not given, so that the login form can
raise a specific error for inactive users.
For password reset, True is given for is_active.
For signup verficiation, False is given for is_active.
"""
def authenticate(self, **kwargs):
if kwargs:
username = kwargs.pop("username", None)
if username:
username_or_email = Q(username=username) | Q(email=username)
password = kwargs.pop("password")
try:
user = User.objects.get(username_or_email, **kwargs)
except User.DoesNotExist:
pass
else:
if user.check_password(password):
return user
else:
if 'uidb36' not in kwargs:
return
kwargs["id"] = base36_to_int(kwargs.pop("uidb36"))
token = kwargs.pop("token")
try:
user = User.objects.get(**kwargs)
except User.DoesNotExist:
pass
else:
if default_token_generator.check_token(user, token):
return user
|
Fix kwargs usage to work with other auth backends.
|
Fix kwargs usage to work with other auth backends.
|
Python
|
bsd-2-clause
|
ryneeverett/mezzanine,nikolas/mezzanine,tuxinhang1989/mezzanine,dustinrb/mezzanine,sjuxax/mezzanine,emile2016/mezzanine,orlenko/sfpirg,Kniyl/mezzanine,webounty/mezzanine,molokov/mezzanine,spookylukey/mezzanine,Skytorn86/mezzanine,christianwgd/mezzanine,jjz/mezzanine,agepoly/mezzanine,jjz/mezzanine,tuxinhang1989/mezzanine,PegasusWang/mezzanine,theclanks/mezzanine,adrian-the-git/mezzanine,SoLoHiC/mezzanine,adrian-the-git/mezzanine,agepoly/mezzanine,stephenmcd/mezzanine,SoLoHiC/mezzanine,Kniyl/mezzanine,dsanders11/mezzanine,theclanks/mezzanine,orlenko/plei,stbarnabas/mezzanine,damnfine/mezzanine,gbosh/mezzanine,dovydas/mezzanine,scarcry/snm-mezzanine,promil23/mezzanine,christianwgd/mezzanine,frankchin/mezzanine,industrydive/mezzanine,emile2016/mezzanine,mush42/mezzanine,Skytorn86/mezzanine,sjdines/mezzanine,joshcartme/mezzanine,batpad/mezzanine,saintbird/mezzanine,dovydas/mezzanine,saintbird/mezzanine,ZeroXn/mezzanine,promil23/mezzanine,biomassives/mezzanine,molokov/mezzanine,joshcartme/mezzanine,jerivas/mezzanine,Cicero-Zhao/mezzanine,tuxinhang1989/mezzanine,stephenmcd/mezzanine,Kniyl/mezzanine,jjz/mezzanine,readevalprint/mezzanine,wyzex/mezzanine,nikolas/mezzanine,AlexHill/mezzanine,adrian-the-git/mezzanine,gradel/mezzanine,orlenko/plei,SoLoHiC/mezzanine,gradel/mezzanine,vladir/mezzanine,douglaskastle/mezzanine,orlenko/sfpirg,stbarnabas/mezzanine,frankier/mezzanine,fusionbox/mezzanine,sjdines/mezzanine,molokov/mezzanine,wrwrwr/mezzanine,promil23/mezzanine,Skytorn86/mezzanine,ryneeverett/mezzanine,Cajoline/mezzanine,mush42/mezzanine,jerivas/mezzanine,dustinrb/mezzanine,saintbird/mezzanine,viaregio/mezzanine,theclanks/mezzanine,sjdines/mezzanine,cccs-web/mezzanine,wbtuomela/mezzanine,industrydive/mezzanine,gradel/mezzanine,agepoly/mezzanine,vladir/mezzanine,frankier/mezzanine,viaregio/mezzanine,webounty/mezzanine,jerivas/mezzanine,douglaskastle/mezzanine,sjuxax/mezzanine,fusionbox/mezzanine,eino-makitalo/mezzanine,ZeroXn/mezzanine,industrydive/mezzanine,dovydas/mezzanine,dekomote/mezzanine-modeltranslation-backport,joshcartme/mezzanine,frankchin/mezzanine,orlenko/plei,scarcry/snm-mezzanine,PegasusWang/mezzanine,PegasusWang/mezzanine,Cajoline/mezzanine,ZeroXn/mezzanine,orlenko/sfpirg,geodesign/mezzanine,dekomote/mezzanine-modeltranslation-backport,dsanders11/mezzanine,spookylukey/mezzanine,dsanders11/mezzanine,eino-makitalo/mezzanine,viaregio/mezzanine,dustinrb/mezzanine,frankier/mezzanine,ryneeverett/mezzanine,damnfine/mezzanine,wrwrwr/mezzanine,sjuxax/mezzanine,geodesign/mezzanine,wbtuomela/mezzanine,biomassives/mezzanine,biomassives/mezzanine,nikolas/mezzanine,Cicero-Zhao/mezzanine,geodesign/mezzanine,wyzex/mezzanine,AlexHill/mezzanine,wbtuomela/mezzanine,christianwgd/mezzanine,mush42/mezzanine,readevalprint/mezzanine,webounty/mezzanine,batpad/mezzanine,stephenmcd/mezzanine,guibernardino/mezzanine,douglaskastle/mezzanine,spookylukey/mezzanine,emile2016/mezzanine,vladir/mezzanine,eino-makitalo/mezzanine,Cajoline/mezzanine,dekomote/mezzanine-modeltranslation-backport,damnfine/mezzanine,wyzex/mezzanine,gbosh/mezzanine,cccs-web/mezzanine,readevalprint/mezzanine,frankchin/mezzanine,gbosh/mezzanine,guibernardino/mezzanine,scarcry/snm-mezzanine
|
from django.contrib.auth.backends import ModelBackend
from django.contrib.auth.models import User
from django.contrib.auth.tokens import default_token_generator
from django.db.models import Q
from django.utils.http import base36_to_int
class MezzanineBackend(ModelBackend):
"""
Extends Django's ``ModelBackend`` to allow login via username,
email, or verification token.
Args are either ``username`` and ``password``, or ``uidb36``
and ``token``. In either case, ``is_active`` can also be given.
For login, is_active is not given, so that the login form can
raise a specific error for inactive users.
For password reset, True is given for is_active.
For signup verficiation, False is given for is_active.
"""
def authenticate(self, **kwargs):
if kwargs:
username = kwargs.pop("username", None)
if username:
username_or_email = Q(username=username) | Q(email=username)
password = kwargs.pop("password")
try:
user = User.objects.get(username_or_email, **kwargs)
except User.DoesNotExist:
pass
else:
if user.check_password(password):
return user
else:
+ if 'uidb36' not in kwargs:
+ return
kwargs["id"] = base36_to_int(kwargs.pop("uidb36"))
token = kwargs.pop("token")
try:
user = User.objects.get(**kwargs)
except User.DoesNotExist:
pass
else:
if default_token_generator.check_token(user, token):
return user
|
Fix kwargs usage to work with other auth backends.
|
## Code Before:
from django.contrib.auth.backends import ModelBackend
from django.contrib.auth.models import User
from django.contrib.auth.tokens import default_token_generator
from django.db.models import Q
from django.utils.http import base36_to_int
class MezzanineBackend(ModelBackend):
"""
Extends Django's ``ModelBackend`` to allow login via username,
email, or verification token.
Args are either ``username`` and ``password``, or ``uidb36``
and ``token``. In either case, ``is_active`` can also be given.
For login, is_active is not given, so that the login form can
raise a specific error for inactive users.
For password reset, True is given for is_active.
For signup verficiation, False is given for is_active.
"""
def authenticate(self, **kwargs):
if kwargs:
username = kwargs.pop("username", None)
if username:
username_or_email = Q(username=username) | Q(email=username)
password = kwargs.pop("password")
try:
user = User.objects.get(username_or_email, **kwargs)
except User.DoesNotExist:
pass
else:
if user.check_password(password):
return user
else:
kwargs["id"] = base36_to_int(kwargs.pop("uidb36"))
token = kwargs.pop("token")
try:
user = User.objects.get(**kwargs)
except User.DoesNotExist:
pass
else:
if default_token_generator.check_token(user, token):
return user
## Instruction:
Fix kwargs usage to work with other auth backends.
## Code After:
from django.contrib.auth.backends import ModelBackend
from django.contrib.auth.models import User
from django.contrib.auth.tokens import default_token_generator
from django.db.models import Q
from django.utils.http import base36_to_int
class MezzanineBackend(ModelBackend):
"""
Extends Django's ``ModelBackend`` to allow login via username,
email, or verification token.
Args are either ``username`` and ``password``, or ``uidb36``
and ``token``. In either case, ``is_active`` can also be given.
For login, is_active is not given, so that the login form can
raise a specific error for inactive users.
For password reset, True is given for is_active.
For signup verficiation, False is given for is_active.
"""
def authenticate(self, **kwargs):
if kwargs:
username = kwargs.pop("username", None)
if username:
username_or_email = Q(username=username) | Q(email=username)
password = kwargs.pop("password")
try:
user = User.objects.get(username_or_email, **kwargs)
except User.DoesNotExist:
pass
else:
if user.check_password(password):
return user
else:
if 'uidb36' not in kwargs:
return
kwargs["id"] = base36_to_int(kwargs.pop("uidb36"))
token = kwargs.pop("token")
try:
user = User.objects.get(**kwargs)
except User.DoesNotExist:
pass
else:
if default_token_generator.check_token(user, token):
return user
|
# ... existing code ...
else:
if 'uidb36' not in kwargs:
return
kwargs["id"] = base36_to_int(kwargs.pop("uidb36"))
# ... rest of the code ...
|
dc884cfd49133a9a25cc5ba6276b94dd44d18729
|
test/test_general.py
|
test/test_general.py
|
import threading
import time
import sys
from busybees import worker
from busybees import hive
import pash
class ErrWorker(worker.Worker):
def work(self, command):
proc = pash.ShellProc()
proc.run(command)
return "Exit code: %s" % proc.get_val('exit_code')
def test_hive():
apiary = hive.Hive()
apiary.create_queen('A1')
apiary.create_queen('A2')
apiary.start_queen('A1')
apiary.start_queen('A2')
jobs = ["iscsiadm -m discovery -t st -p 192.168.88.110",
"iscsiadm -m discovery -t st -p 192.168.90.110",
"iscsiadm -m discovery -t st -p 192.168.88.110"]
apiary.instruct_queen('A1', jobs, ErrWorker)
apiary.kill_queen('A1')
time.sleep(3)
this = apiary.die()
for key in this.keys():
for i in this[key]:
assert i == "Exit code: 6"
print i
sys.exit(0)
|
import threading
import time
import sys
from busybees import worker
from busybees import hive
import pash
class ErrWorker(worker.Worker):
def work(self, command):
proc = pash.ShellProc()
proc.run(command)
return "Exit code: %s" % proc.get_val('exit_code')
def test_hive():
apiary = hive.Hive()
apiary.create_queen('A1')
apiary.create_queen('A2')
apiary.start_queen('A1')
apiary.start_queen('A2')
job1 = ["iscsiadm -m discovery -t st -p 192.168.88.110",
"iscsiadm -m discovery -t st -p 192.168.90.110",
"iscsiadm -m discovery -t st -p 192.168.88.110"]
apiary.instruct_queen('A1', job1, ErrWorker)
job2 = ["ls -l ~", "date", "cal"]
apiary.instruct_queen('A2', job2)
apiary.kill_queen('A1')
time.sleep(3)
this = apiary.die()
for key in this.keys():
for i in this[key]:
assert i != '' and i != None
|
Add jobs to second test queen, add assertions
|
Add jobs to second test queen, add assertions
|
Python
|
bsd-3-clause
|
iansmcf/busybees
|
import threading
import time
import sys
from busybees import worker
from busybees import hive
import pash
class ErrWorker(worker.Worker):
def work(self, command):
proc = pash.ShellProc()
proc.run(command)
return "Exit code: %s" % proc.get_val('exit_code')
def test_hive():
apiary = hive.Hive()
apiary.create_queen('A1')
apiary.create_queen('A2')
apiary.start_queen('A1')
apiary.start_queen('A2')
- jobs = ["iscsiadm -m discovery -t st -p 192.168.88.110",
+ job1 = ["iscsiadm -m discovery -t st -p 192.168.88.110",
"iscsiadm -m discovery -t st -p 192.168.90.110",
"iscsiadm -m discovery -t st -p 192.168.88.110"]
- apiary.instruct_queen('A1', jobs, ErrWorker)
+ apiary.instruct_queen('A1', job1, ErrWorker)
+
+ job2 = ["ls -l ~", "date", "cal"]
+ apiary.instruct_queen('A2', job2)
apiary.kill_queen('A1')
time.sleep(3)
this = apiary.die()
for key in this.keys():
for i in this[key]:
+ assert i != '' and i != None
- assert i == "Exit code: 6"
- print i
- sys.exit(0)
|
Add jobs to second test queen, add assertions
|
## Code Before:
import threading
import time
import sys
from busybees import worker
from busybees import hive
import pash
class ErrWorker(worker.Worker):
def work(self, command):
proc = pash.ShellProc()
proc.run(command)
return "Exit code: %s" % proc.get_val('exit_code')
def test_hive():
apiary = hive.Hive()
apiary.create_queen('A1')
apiary.create_queen('A2')
apiary.start_queen('A1')
apiary.start_queen('A2')
jobs = ["iscsiadm -m discovery -t st -p 192.168.88.110",
"iscsiadm -m discovery -t st -p 192.168.90.110",
"iscsiadm -m discovery -t st -p 192.168.88.110"]
apiary.instruct_queen('A1', jobs, ErrWorker)
apiary.kill_queen('A1')
time.sleep(3)
this = apiary.die()
for key in this.keys():
for i in this[key]:
assert i == "Exit code: 6"
print i
sys.exit(0)
## Instruction:
Add jobs to second test queen, add assertions
## Code After:
import threading
import time
import sys
from busybees import worker
from busybees import hive
import pash
class ErrWorker(worker.Worker):
def work(self, command):
proc = pash.ShellProc()
proc.run(command)
return "Exit code: %s" % proc.get_val('exit_code')
def test_hive():
apiary = hive.Hive()
apiary.create_queen('A1')
apiary.create_queen('A2')
apiary.start_queen('A1')
apiary.start_queen('A2')
job1 = ["iscsiadm -m discovery -t st -p 192.168.88.110",
"iscsiadm -m discovery -t st -p 192.168.90.110",
"iscsiadm -m discovery -t st -p 192.168.88.110"]
apiary.instruct_queen('A1', job1, ErrWorker)
job2 = ["ls -l ~", "date", "cal"]
apiary.instruct_queen('A2', job2)
apiary.kill_queen('A1')
time.sleep(3)
this = apiary.die()
for key in this.keys():
for i in this[key]:
assert i != '' and i != None
|
// ... existing code ...
job1 = ["iscsiadm -m discovery -t st -p 192.168.88.110",
"iscsiadm -m discovery -t st -p 192.168.90.110",
// ... modified code ...
"iscsiadm -m discovery -t st -p 192.168.88.110"]
apiary.instruct_queen('A1', job1, ErrWorker)
job2 = ["ls -l ~", "date", "cal"]
apiary.instruct_queen('A2', job2)
...
for i in this[key]:
assert i != '' and i != None
// ... rest of the code ...
|
d410a5295b67b17ca1cdc4d53ed8f776159278bc
|
json2parquet/__init__.py
|
json2parquet/__init__.py
|
from __future__ import unicode_literals
from .client import load_json, ingest_data, write_parquet, convert_json
__title__ = 'json2parquet'
__version__ = '0.0.24'
__all__ = ['load_json', 'ingest_data', 'write_parquet', 'convert_json', 'write_parquet_dataset']
|
from __future__ import unicode_literals
from .client import load_json, ingest_data, write_parquet, convert_json, write_parquet_dataset
__title__ = 'json2parquet'
__version__ = '0.0.24'
__all__ = ['load_json', 'ingest_data', 'write_parquet', 'convert_json', 'write_parquet_dataset']
|
Make client.write_parquet_dataset available for export
|
Make client.write_parquet_dataset available for export
This commit adds write_parquet_dataset to the imports from .client in
__init__.py
Previously, `from json2parquet import write_parquet_dataset` would
result in an error: `ImportError: cannot import name
'write_parquet_dataset' from 'json2parquet' `
|
Python
|
mit
|
andrewgross/json2parquet
|
from __future__ import unicode_literals
- from .client import load_json, ingest_data, write_parquet, convert_json
+ from .client import load_json, ingest_data, write_parquet, convert_json, write_parquet_dataset
__title__ = 'json2parquet'
__version__ = '0.0.24'
__all__ = ['load_json', 'ingest_data', 'write_parquet', 'convert_json', 'write_parquet_dataset']
|
Make client.write_parquet_dataset available for export
|
## Code Before:
from __future__ import unicode_literals
from .client import load_json, ingest_data, write_parquet, convert_json
__title__ = 'json2parquet'
__version__ = '0.0.24'
__all__ = ['load_json', 'ingest_data', 'write_parquet', 'convert_json', 'write_parquet_dataset']
## Instruction:
Make client.write_parquet_dataset available for export
## Code After:
from __future__ import unicode_literals
from .client import load_json, ingest_data, write_parquet, convert_json, write_parquet_dataset
__title__ = 'json2parquet'
__version__ = '0.0.24'
__all__ = ['load_json', 'ingest_data', 'write_parquet', 'convert_json', 'write_parquet_dataset']
|
// ... existing code ...
from .client import load_json, ingest_data, write_parquet, convert_json, write_parquet_dataset
// ... rest of the code ...
|
3ca3f9473d7031ef9536f56c253ba0a4b7e1ee6e
|
test/unit/ggrc/converters/test_query_helper.py
|
test/unit/ggrc/converters/test_query_helper.py
|
import unittest
import mock
from ggrc.converters import query_helper
class TestQueryHelper(unittest.TestCase):
def test_expression_keys(self):
""" test expression keys function
Make sure it works with:
empty query
simple query
complex query
invalid complex query
"""
query = mock.MagicMock()
helper = query_helper.QueryHelper(query)
expressions = [
(set(), {}),
(set(["key_1"]), {
"left": "key_1",
"op": {"name": "="},
"right": "",
}),
(set(["key_1", "key_2"]), {
"left": {
"left": "key_2",
"op": {"name": "="},
"right": "",
},
"op": {"name": "AND"},
"right": {
"left": "key_1",
"op": {"name": "!="},
"right": "",
},
}),
(set(), {
"left": {
"left": "5",
"op": {"name": "="},
"right": "",
},
"op": {"name": "="},
"right": {
"left": "key_1",
"op": {"name": "!="},
"right": "",
},
}),
]
for expected_result, expression in expressions:
self.assertEqual(expected_result, helper.expression_keys(expression))
|
import unittest
import mock
from ggrc.converters import query_helper
class TestQueryHelper(unittest.TestCase):
def test_expression_keys(self):
""" test expression keys function
Make sure it works with:
empty query
simple query
complex query
invalid complex query
"""
# pylint: disable=protected-access
# needed for testing protected function inside the query helper
query = mock.MagicMock()
helper = query_helper.QueryHelper(query)
expressions = [
(set(), {}),
(set(["key_1"]), {
"left": "key_1",
"op": {"name": "="},
"right": "",
}),
(set(["key_1", "key_2"]), {
"left": {
"left": "key_2",
"op": {"name": "="},
"right": "",
},
"op": {"name": "AND"},
"right": {
"left": "key_1",
"op": {"name": "!="},
"right": "",
},
}),
(set(), {
"left": {
"left": "5",
"op": {"name": "="},
"right": "",
},
"op": {"name": "="},
"right": {
"left": "key_1",
"op": {"name": "!="},
"right": "",
},
}),
]
for expected_result, expression in expressions:
self.assertEqual(expected_result, helper._expression_keys(expression))
|
Update unit tests with new query helper names
|
Update unit tests with new query helper names
|
Python
|
apache-2.0
|
j0gurt/ggrc-core,AleksNeStu/ggrc-core,plamut/ggrc-core,j0gurt/ggrc-core,edofic/ggrc-core,andrei-karalionak/ggrc-core,josthkko/ggrc-core,andrei-karalionak/ggrc-core,AleksNeStu/ggrc-core,VinnieJohns/ggrc-core,AleksNeStu/ggrc-core,selahssea/ggrc-core,andrei-karalionak/ggrc-core,plamut/ggrc-core,plamut/ggrc-core,josthkko/ggrc-core,AleksNeStu/ggrc-core,j0gurt/ggrc-core,kr41/ggrc-core,andrei-karalionak/ggrc-core,kr41/ggrc-core,j0gurt/ggrc-core,selahssea/ggrc-core,kr41/ggrc-core,edofic/ggrc-core,edofic/ggrc-core,VinnieJohns/ggrc-core,josthkko/ggrc-core,josthkko/ggrc-core,selahssea/ggrc-core,plamut/ggrc-core,edofic/ggrc-core,VinnieJohns/ggrc-core,selahssea/ggrc-core,VinnieJohns/ggrc-core,kr41/ggrc-core
|
import unittest
import mock
from ggrc.converters import query_helper
class TestQueryHelper(unittest.TestCase):
def test_expression_keys(self):
""" test expression keys function
Make sure it works with:
empty query
simple query
complex query
invalid complex query
"""
+ # pylint: disable=protected-access
+ # needed for testing protected function inside the query helper
query = mock.MagicMock()
helper = query_helper.QueryHelper(query)
expressions = [
(set(), {}),
(set(["key_1"]), {
"left": "key_1",
"op": {"name": "="},
"right": "",
}),
(set(["key_1", "key_2"]), {
"left": {
"left": "key_2",
"op": {"name": "="},
"right": "",
},
"op": {"name": "AND"},
"right": {
"left": "key_1",
"op": {"name": "!="},
"right": "",
},
}),
(set(), {
"left": {
"left": "5",
"op": {"name": "="},
"right": "",
},
"op": {"name": "="},
"right": {
"left": "key_1",
"op": {"name": "!="},
"right": "",
},
}),
]
for expected_result, expression in expressions:
- self.assertEqual(expected_result, helper.expression_keys(expression))
+ self.assertEqual(expected_result, helper._expression_keys(expression))
|
Update unit tests with new query helper names
|
## Code Before:
import unittest
import mock
from ggrc.converters import query_helper
class TestQueryHelper(unittest.TestCase):
def test_expression_keys(self):
""" test expression keys function
Make sure it works with:
empty query
simple query
complex query
invalid complex query
"""
query = mock.MagicMock()
helper = query_helper.QueryHelper(query)
expressions = [
(set(), {}),
(set(["key_1"]), {
"left": "key_1",
"op": {"name": "="},
"right": "",
}),
(set(["key_1", "key_2"]), {
"left": {
"left": "key_2",
"op": {"name": "="},
"right": "",
},
"op": {"name": "AND"},
"right": {
"left": "key_1",
"op": {"name": "!="},
"right": "",
},
}),
(set(), {
"left": {
"left": "5",
"op": {"name": "="},
"right": "",
},
"op": {"name": "="},
"right": {
"left": "key_1",
"op": {"name": "!="},
"right": "",
},
}),
]
for expected_result, expression in expressions:
self.assertEqual(expected_result, helper.expression_keys(expression))
## Instruction:
Update unit tests with new query helper names
## Code After:
import unittest
import mock
from ggrc.converters import query_helper
class TestQueryHelper(unittest.TestCase):
def test_expression_keys(self):
""" test expression keys function
Make sure it works with:
empty query
simple query
complex query
invalid complex query
"""
# pylint: disable=protected-access
# needed for testing protected function inside the query helper
query = mock.MagicMock()
helper = query_helper.QueryHelper(query)
expressions = [
(set(), {}),
(set(["key_1"]), {
"left": "key_1",
"op": {"name": "="},
"right": "",
}),
(set(["key_1", "key_2"]), {
"left": {
"left": "key_2",
"op": {"name": "="},
"right": "",
},
"op": {"name": "AND"},
"right": {
"left": "key_1",
"op": {"name": "!="},
"right": "",
},
}),
(set(), {
"left": {
"left": "5",
"op": {"name": "="},
"right": "",
},
"op": {"name": "="},
"right": {
"left": "key_1",
"op": {"name": "!="},
"right": "",
},
}),
]
for expected_result, expression in expressions:
self.assertEqual(expected_result, helper._expression_keys(expression))
|
# ... existing code ...
"""
# pylint: disable=protected-access
# needed for testing protected function inside the query helper
query = mock.MagicMock()
# ... modified code ...
for expected_result, expression in expressions:
self.assertEqual(expected_result, helper._expression_keys(expression))
# ... rest of the code ...
|
69b33f8f87b6dfc0fbaf96eca25c02535c9e09e7
|
src/test/almost_equal.py
|
src/test/almost_equal.py
|
def datetime_almost_equal(datetime1, datetime2, seconds=60):
dd = datetime1 - datetime2
sd = (dd.days * 24 * 60 * 60) + dd.seconds
return abs(sd) <= seconds
|
from datetime import datetime
import pytz
def datetime_almost_equal(datetime1:datetime, datetime2:datetime, seconds:int=60):
if not(datetime1.tzinfo):
datetime1 = pytz.utc.localize(datetime1)
datetime1 = datetime1.astimezone(pytz.utc)
if not(datetime2.tzinfo):
datetime2 = pytz.utc.localize(datetime2)
datetime2 = datetime2.astimezone(pytz.utc)
dd = datetime1 - datetime2
sd = (dd.days * 24 * 60 * 60) + dd.seconds
return abs(sd) <= seconds
|
Make sure all datetimes are UTC
|
Make sure all datetimes are UTC
|
Python
|
apache-2.0
|
sffjunkie/astral,sffjunkie/astral
|
+ from datetime import datetime
+
+ import pytz
+
- def datetime_almost_equal(datetime1, datetime2, seconds=60):
+ def datetime_almost_equal(datetime1:datetime, datetime2:datetime, seconds:int=60):
+ if not(datetime1.tzinfo):
+ datetime1 = pytz.utc.localize(datetime1)
+ datetime1 = datetime1.astimezone(pytz.utc)
+
+ if not(datetime2.tzinfo):
+ datetime2 = pytz.utc.localize(datetime2)
+ datetime2 = datetime2.astimezone(pytz.utc)
+
dd = datetime1 - datetime2
sd = (dd.days * 24 * 60 * 60) + dd.seconds
return abs(sd) <= seconds
|
Make sure all datetimes are UTC
|
## Code Before:
def datetime_almost_equal(datetime1, datetime2, seconds=60):
dd = datetime1 - datetime2
sd = (dd.days * 24 * 60 * 60) + dd.seconds
return abs(sd) <= seconds
## Instruction:
Make sure all datetimes are UTC
## Code After:
from datetime import datetime
import pytz
def datetime_almost_equal(datetime1:datetime, datetime2:datetime, seconds:int=60):
if not(datetime1.tzinfo):
datetime1 = pytz.utc.localize(datetime1)
datetime1 = datetime1.astimezone(pytz.utc)
if not(datetime2.tzinfo):
datetime2 = pytz.utc.localize(datetime2)
datetime2 = datetime2.astimezone(pytz.utc)
dd = datetime1 - datetime2
sd = (dd.days * 24 * 60 * 60) + dd.seconds
return abs(sd) <= seconds
|
...
from datetime import datetime
import pytz
def datetime_almost_equal(datetime1:datetime, datetime2:datetime, seconds:int=60):
if not(datetime1.tzinfo):
datetime1 = pytz.utc.localize(datetime1)
datetime1 = datetime1.astimezone(pytz.utc)
if not(datetime2.tzinfo):
datetime2 = pytz.utc.localize(datetime2)
datetime2 = datetime2.astimezone(pytz.utc)
dd = datetime1 - datetime2
...
|
6d663d1d0172b716e0dccc1f617b5a09b2905b67
|
script/upload-windows-pdb.py
|
script/upload-windows-pdb.py
|
import os
import glob
from lib.util import execute, rm_rf, safe_mkdir, s3put, s3_config
SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
SYMBOLS_DIR = 'dist\\symbols'
PDB_LIST = [
'out\\Release\\atom.exe.pdb',
'vendor\\brightray\\vendor\\download\\libchromiumcontent\\Release\\chromiumcontent.dll.pdb',
]
def main():
os.chdir(SOURCE_ROOT)
rm_rf(SYMBOLS_DIR)
safe_mkdir(SYMBOLS_DIR)
for pdb in PDB_LIST:
run_symstore(pdb, SYMBOLS_DIR, 'AtomShell')
bucket, access_key, secret_key = s3_config()
files = glob.glob(SYMBOLS_DIR + '/*.pdb/*/*.pdb')
upload_symbols(bucket, access_key, secret_key, files)
def run_symstore(pdb, dest, product):
execute(['symstore', 'add', '/r', '/f', pdb, '/s', dest, '/t', product])
def upload_symbols(bucket, access_key, secret_key, files):
s3put(bucket, access_key, secret_key, SYMBOLS_DIR, 'atom-shell/symbols', files)
if __name__ == '__main__':
import sys
sys.exit(main())
|
import os
import glob
from lib.util import execute, rm_rf, safe_mkdir, s3put, s3_config
SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
SYMBOLS_DIR = 'dist\\symbols'
PDB_LIST = [
'out\\Release\\atom.exe.pdb',
'vendor\\brightray\\vendor\\download\\libchromiumcontent\\Release\\chromiumcontent.dll.pdb',
]
def main():
os.chdir(SOURCE_ROOT)
rm_rf(SYMBOLS_DIR)
safe_mkdir(SYMBOLS_DIR)
for pdb in PDB_LIST:
run_symstore(pdb, SYMBOLS_DIR, 'AtomShell')
bucket, access_key, secret_key = s3_config()
files = glob.glob(SYMBOLS_DIR + '/*.pdb/*/*.pdb')
files = [f.lower() for f in files]
upload_symbols(bucket, access_key, secret_key, files)
def run_symstore(pdb, dest, product):
execute(['symstore', 'add', '/r', '/f', pdb, '/s', dest, '/t', product])
def upload_symbols(bucket, access_key, secret_key, files):
s3put(bucket, access_key, secret_key, SYMBOLS_DIR, 'atom-shell/symbols', files)
if __name__ == '__main__':
import sys
sys.exit(main())
|
Use lowercase for symbol paths
|
Use lowercase for symbol paths
|
Python
|
mit
|
wolfflow/electron,shockone/electron,ianscrivener/electron,oiledCode/electron,christian-bromann/electron,fffej/electron,darwin/electron,digideskio/electron,jannishuebl/electron,darwin/electron,lrlna/electron,faizalpribadi/electron,lzpfmh/electron,rsvip/electron,mubassirhayat/electron,bwiggs/electron,jiaz/electron,gstack/infinium-shell,bobwol/electron,meowlab/electron,egoist/electron,simongregory/electron,vHanda/electron,felixrieseberg/electron,nekuz0r/electron,wolfflow/electron,rreimann/electron,fritx/electron,destan/electron,vaginessa/electron,michaelchiche/electron,stevemao/electron,John-Lin/electron,JussMee15/electron,jlhbaseball15/electron,rhencke/electron,stevemao/electron,brave/electron,fomojola/electron,thompsonemerson/electron,gamedevsam/electron,shiftkey/electron,Neron-X5/electron,fritx/electron,bruce/electron,voidbridge/electron,BionicClick/electron,lzpfmh/electron,howmuchcomputer/electron,RIAEvangelist/electron,Gerhut/electron,howmuchcomputer/electron,tinydew4/electron,maxogden/atom-shell,seanchas116/electron,joaomoreno/atom-shell,destan/electron,mattdesl/electron,iftekeriba/electron,brave/muon,the-ress/electron,bright-sparks/electron,christian-bromann/electron,rprichard/electron,darwin/electron,jonatasfreitasv/electron,adamjgray/electron,ankitaggarwal011/electron,wan-qy/electron,medixdev/electron,rsvip/electron,bbondy/electron,nagyistoce/electron-atom-shell,medixdev/electron,mhkeller/electron,eriser/electron,deed02392/electron,rhencke/electron,seanchas116/electron,dkfiresky/electron,jsutcodes/electron,pombredanne/electron,astoilkov/electron,MaxGraey/electron,deepak1556/atom-shell,simonfork/electron,gbn972/electron,greyhwndz/electron,edulan/electron,roadev/electron,robinvandernoord/electron,kazupon/electron,rajatsingla28/electron,howmuchcomputer/electron,meowlab/electron,tomashanacek/electron,chriskdon/electron,evgenyzinoviev/electron,joneit/electron,gabriel/electron,jiaz/electron,arturts/electron,greyhwndz/electron,edulan/electron,micalan/electron,kazupon/electron,d-salas/electron,eric-seekas/electron,thompsonemerson/electron,RobertJGabriel/electron,vaginessa/electron,electron/electron,iftekeriba/electron,natgolov/electron,xfstudio/electron,ervinb/electron,xfstudio/electron,xiruibing/electron,arturts/electron,renaesop/electron,roadev/electron,fffej/electron,thomsonreuters/electron,coderhaoxin/electron,twolfson/electron,evgenyzinoviev/electron,RobertJGabriel/electron,fomojola/electron,jsutcodes/electron,nagyistoce/electron-atom-shell,leftstick/electron,noikiy/electron,BionicClick/electron,LadyNaggaga/electron,gstack/infinium-shell,yalexx/electron,webmechanicx/electron,rajatsingla28/electron,greyhwndz/electron,webmechanicx/electron,carsonmcdonald/electron,bwiggs/electron,vaginessa/electron,jtburke/electron,beni55/electron,eriser/electron,JesselJohn/electron,jtburke/electron,jaanus/electron,Andrey-Pavlov/electron,robinvandernoord/electron,kokdemo/electron,tincan24/electron,miniak/electron,rajatsingla28/electron,noikiy/electron,nicobot/electron,setzer777/electron,gabriel/electron,zhakui/electron,jonatasfreitasv/electron,synaptek/electron,subblue/electron,kokdemo/electron,Evercoder/electron,aecca/electron,neutrous/electron,ianscrivener/electron,stevekinney/electron,adcentury/electron,Floato/electron,deepak1556/atom-shell,bbondy/electron,nicobot/electron,bitemyapp/electron,jacksondc/electron,jannishuebl/electron,mhkeller/electron,anko/electron,RIAEvangelist/electron,SufianHassan/electron,eric-seekas/electron,mattotodd/electron,LadyNaggaga/electron,shaundunne/electron,vHanda/electron,pombredanne/electron,chrisswk/electron,DivyaKMenon/electron,stevekinney/electron,nicholasess/electron,Evercoder/electron,bright-sparks/electron,GoooIce/electron,zhakui/electron,trankmichael/electron,yan-foto/electron,jjz/electron,kikong/electron,gabrielPeart/electron,renaesop/electron,leolujuyi/electron,eric-seekas/electron,farmisen/electron,sircharleswatson/electron,tinydew4/electron,natgolov/electron,kikong/electron,pirafrank/electron,carsonmcdonald/electron,jjz/electron,fffej/electron,adamjgray/electron,leethomas/electron,leolujuyi/electron,chriskdon/electron,meowlab/electron,Evercoder/electron,maxogden/atom-shell,yan-foto/electron,jlord/electron,stevekinney/electron,jiaz/electron,cos2004/electron,pandoraui/electron,GoooIce/electron,thingsinjars/electron,shockone/electron,electron/electron,digideskio/electron,sky7sea/electron,farmisen/electron,bpasero/electron,gbn972/electron,nekuz0r/electron,kenmozi/electron,voidbridge/electron,aaron-goshine/electron,aliib/electron,faizalpribadi/electron,beni55/electron,adamjgray/electron,wan-qy/electron,neutrous/electron,the-ress/electron,shockone/electron,shockone/electron,pandoraui/electron,bbondy/electron,miniak/electron,shockone/electron,bitemyapp/electron,systembugtj/electron,neutrous/electron,yalexx/electron,tylergibson/electron,gabriel/electron,felixrieseberg/electron,egoist/electron,shaundunne/electron,Rokt33r/electron,gamedevsam/electron,mhkeller/electron,preco21/electron,soulteary/electron,robinvandernoord/electron,DivyaKMenon/electron,gabrielPeart/electron,bright-sparks/electron,mrwizard82d1/electron,yan-foto/electron,renaesop/electron,the-ress/electron,subblue/electron,tinydew4/electron,bwiggs/electron,chrisswk/electron,jhen0409/electron,lzpfmh/electron,brave/muon,trankmichael/electron,RobertJGabriel/electron,renaesop/electron,roadev/electron,tonyganch/electron,baiwyc119/electron,stevemao/electron,jonatasfreitasv/electron,mubassirhayat/electron,arturts/electron,pandoraui/electron,nekuz0r/electron,vipulroxx/electron,jhen0409/electron,jhen0409/electron,xiruibing/electron,adcentury/electron,fomojola/electron,coderhaoxin/electron,joneit/electron,dahal/electron,nicobot/electron,bwiggs/electron,aaron-goshine/electron,howmuchcomputer/electron,bpasero/electron,cos2004/electron,deed02392/electron,fireball-x/atom-shell,dahal/electron,jtburke/electron,tomashanacek/electron,bobwol/electron,farmisen/electron,evgenyzinoviev/electron,Zagorakiss/electron,greyhwndz/electron,shaundunne/electron,Faiz7412/electron,howmuchcomputer/electron,RIAEvangelist/electron,takashi/electron,davazp/electron,jsutcodes/electron,bobwol/electron,aaron-goshine/electron,aaron-goshine/electron,mirrh/electron,michaelchiche/electron,BionicClick/electron,baiwyc119/electron,arturts/electron,medixdev/electron,matiasinsaurralde/electron,bpasero/electron,MaxWhere/electron,shennushi/electron,aliib/electron,natgolov/electron,kcrt/electron,jiaz/electron,adcentury/electron,xiruibing/electron,chriskdon/electron,Ivshti/electron,mrwizard82d1/electron,jjz/electron,leftstick/electron,twolfson/electron,brave/muon,micalan/electron,jaanus/electron,nekuz0r/electron,micalan/electron,tincan24/electron,shockone/electron,fabien-d/electron,twolfson/electron,kcrt/electron,trankmichael/electron,LadyNaggaga/electron,LadyNaggaga/electron,Zagorakiss/electron,egoist/electron,hokein/atom-shell,JesselJohn/electron,ianscrivener/electron,voidbridge/electron,davazp/electron,saronwei/electron,MaxWhere/electron,rreimann/electron,maxogden/atom-shell,shennushi/electron,tincan24/electron,jsutcodes/electron,takashi/electron,adamjgray/electron,vHanda/electron,sshiting/electron,LadyNaggaga/electron,zhakui/electron,MaxGraey/electron,ianscrivener/electron,GoooIce/electron,jsutcodes/electron,jaanus/electron,chriskdon/electron,Gerhut/electron,wolfflow/electron,MaxWhere/electron,John-Lin/electron,deed02392/electron,mrwizard82d1/electron,abhishekgahlot/electron,etiktin/electron,brave/electron,thingsinjars/electron,mjaniszew/electron,rreimann/electron,John-Lin/electron,kenmozi/electron,arturts/electron,cos2004/electron,d-salas/electron,BionicClick/electron,biblerule/UMCTelnetHub,jonatasfreitasv/electron,Neron-X5/electron,Evercoder/electron,kikong/electron,thompsonemerson/electron,jonatasfreitasv/electron,jannishuebl/electron,shennushi/electron,eric-seekas/electron,bwiggs/electron,dkfiresky/electron,jlord/electron,Neron-X5/electron,icattlecoder/electron,Zagorakiss/electron,tincan24/electron,joneit/electron,kostia/electron,pombredanne/electron,Rokt33r/electron,matiasinsaurralde/electron,jlhbaseball15/electron,dahal/electron,systembugtj/electron,tinydew4/electron,gstack/infinium-shell,bruce/electron,BionicClick/electron,gamedevsam/electron,bpasero/electron,bitemyapp/electron,micalan/electron,dongjoon-hyun/electron,twolfson/electron,aecca/electron,timruffles/electron,subblue/electron,nekuz0r/electron,minggo/electron,jtburke/electron,rsvip/electron,zhakui/electron,bbondy/electron,preco21/electron,brenca/electron,vaginessa/electron,leolujuyi/electron,cqqccqc/electron,jannishuebl/electron,jlord/electron,jcblw/electron,zhakui/electron,rhencke/electron,meowlab/electron,MaxGraey/electron,trigrass2/electron,thompsonemerson/electron,RIAEvangelist/electron,Andrey-Pavlov/electron,mattdesl/electron,bpasero/electron,soulteary/electron,shiftkey/electron,rreimann/electron,jlhbaseball15/electron,benweissmann/electron,trigrass2/electron,chrisswk/electron,tonyganch/electron,LadyNaggaga/electron,meowlab/electron,roadev/electron,tomashanacek/electron,pombredanne/electron,seanchas116/electron,rprichard/electron,thingsinjars/electron,Rokt33r/electron,MaxGraey/electron,takashi/electron,jtburke/electron,cqqccqc/electron,Jonekee/electron,Jacobichou/electron,sky7sea/electron,wan-qy/electron,voidbridge/electron,maxogden/atom-shell,dkfiresky/electron,DivyaKMenon/electron,simongregory/electron,Jacobichou/electron,lrlna/electron,sshiting/electron,gerhardberger/electron,yalexx/electron,gerhardberger/electron,coderhaoxin/electron,vipulroxx/electron,Ivshti/electron,micalan/electron,trigrass2/electron,evgenyzinoviev/electron,leethomas/electron,astoilkov/electron,cqqccqc/electron,systembugtj/electron,Floato/electron,gerhardberger/electron,tylergibson/electron,cqqccqc/electron,synaptek/electron,tincan24/electron,rreimann/electron,iftekeriba/electron,preco21/electron,gbn972/electron,mjaniszew/electron,joaomoreno/atom-shell,gabriel/electron,vipulroxx/electron,kazupon/electron,Zagorakiss/electron,gamedevsam/electron,tincan24/electron,mubassirhayat/electron,electron/electron,deed02392/electron,bwiggs/electron,carsonmcdonald/electron,systembugtj/electron,ervinb/electron,posix4e/electron,synaptek/electron,aecca/electron,Jacobichou/electron,preco21/electron,yalexx/electron,bruce/electron,etiktin/electron,Andrey-Pavlov/electron,sshiting/electron,felixrieseberg/electron,rprichard/electron,fabien-d/electron,GoooIce/electron,adamjgray/electron,maxogden/atom-shell,vaginessa/electron,wan-qy/electron,nicholasess/electron,dkfiresky/electron,fabien-d/electron,beni55/electron,takashi/electron,deed02392/electron,stevekinney/electron,gabriel/electron,michaelchiche/electron,saronwei/electron,mjaniszew/electron,voidbridge/electron,Faiz7412/electron,carsonmcdonald/electron,Faiz7412/electron,baiwyc119/electron,posix4e/electron,adamjgray/electron,vipulroxx/electron,the-ress/electron,anko/electron,etiktin/electron,seanchas116/electron,MaxWhere/electron,aichingm/electron,RIAEvangelist/electron,Jacobichou/electron,jaanus/electron,nicobot/electron,xfstudio/electron,hokein/atom-shell,coderhaoxin/electron,neutrous/electron,SufianHassan/electron,Rokt33r/electron,RobertJGabriel/electron,tylergibson/electron,lzpfmh/electron,ankitaggarwal011/electron,joaomoreno/atom-shell,medixdev/electron,Floato/electron,mjaniszew/electron,jonatasfreitasv/electron,gamedevsam/electron,stevekinney/electron,gerhardberger/electron,jcblw/electron,timruffles/electron,shiftkey/electron,noikiy/electron,yalexx/electron,cqqccqc/electron,Gerhut/electron,tonyganch/electron,IonicaBizauKitchen/electron,joaomoreno/atom-shell,ervinb/electron,bitemyapp/electron,noikiy/electron,yan-foto/electron,smczk/electron,ankitaggarwal011/electron,trankmichael/electron,stevekinney/electron,Gerhut/electron,mirrh/electron,John-Lin/electron,mirrh/electron,xfstudio/electron,pandoraui/electron,sircharleswatson/electron,mattotodd/electron,JussMee15/electron,gabrielPeart/electron,natgolov/electron,tomashanacek/electron,cos2004/electron,abhishekgahlot/electron,hokein/atom-shell,astoilkov/electron,jannishuebl/electron,baiwyc119/electron,webmechanicx/electron,baiwyc119/electron,setzer777/electron,jacksondc/electron,farmisen/electron,posix4e/electron,soulteary/electron,natgolov/electron,anko/electron,the-ress/electron,nagyistoce/electron-atom-shell,joneit/electron,ianscrivener/electron,eric-seekas/electron,ervinb/electron,fabien-d/electron,aichingm/electron,bbondy/electron,michaelchiche/electron,Gerhut/electron,mhkeller/electron,preco21/electron,vaginessa/electron,dkfiresky/electron,simonfork/electron,setzer777/electron,systembugtj/electron,thomsonreuters/electron,d-salas/electron,trigrass2/electron,xiruibing/electron,kostia/electron,felixrieseberg/electron,cos2004/electron,tinydew4/electron,biblerule/UMCTelnetHub,minggo/electron,Zagorakiss/electron,carsonmcdonald/electron,stevemao/electron,JesselJohn/electron,timruffles/electron,synaptek/electron,jlhbaseball15/electron,Jonekee/electron,bruce/electron,fomojola/electron,eric-seekas/electron,fireball-x/atom-shell,greyhwndz/electron,jacksondc/electron,matiasinsaurralde/electron,saronwei/electron,kazupon/electron,rajatsingla28/electron,brenca/electron,dongjoon-hyun/electron,IonicaBizauKitchen/electron,arusakov/electron,nekuz0r/electron,kazupon/electron,thomsonreuters/electron,mattdesl/electron,simonfork/electron,smczk/electron,nicholasess/electron,Neron-X5/electron,eriser/electron,GoooIce/electron,bright-sparks/electron,faizalpribadi/electron,bitemyapp/electron,mjaniszew/electron,eriser/electron,soulteary/electron,synaptek/electron,gabrielPeart/electron,simonfork/electron,mirrh/electron,mhkeller/electron,nicholasess/electron,fritx/electron,howmuchcomputer/electron,tonyganch/electron,posix4e/electron,kenmozi/electron,xfstudio/electron,JussMee15/electron,Jonekee/electron,aecca/electron,sircharleswatson/electron,icattlecoder/electron,jiaz/electron,renaesop/electron,rreimann/electron,Andrey-Pavlov/electron,thomsonreuters/electron,rhencke/electron,simongregory/electron,RobertJGabriel/electron,simonfork/electron,Andrey-Pavlov/electron,nagyistoce/electron-atom-shell,wan-qy/electron,pombredanne/electron,thingsinjars/electron,evgenyzinoviev/electron,lrlna/electron,setzer777/electron,benweissmann/electron,robinvandernoord/electron,aliib/electron,Andrey-Pavlov/electron,darwin/electron,shiftkey/electron,twolfson/electron,benweissmann/electron,shiftkey/electron,DivyaKMenon/electron,smczk/electron,wan-qy/electron,egoist/electron,pirafrank/electron,jcblw/electron,tinydew4/electron,leethomas/electron,evgenyzinoviev/electron,Jonekee/electron,jjz/electron,vipulroxx/electron,chrisswk/electron,minggo/electron,miniak/electron,RobertJGabriel/electron,fabien-d/electron,biblerule/UMCTelnetHub,kikong/electron,neutrous/electron,mirrh/electron,JussMee15/electron,dongjoon-hyun/electron,seanchas116/electron,minggo/electron,wolfflow/electron,aliib/electron,xiruibing/electron,jcblw/electron,adcentury/electron,rsvip/electron,oiledCode/electron,MaxWhere/electron,Neron-X5/electron,rajatsingla28/electron,faizalpribadi/electron,digideskio/electron,xiruibing/electron,DivyaKMenon/electron,Rokt33r/electron,bright-sparks/electron,GoooIce/electron,Neron-X5/electron,electron/electron,brave/electron,deed02392/electron,vHanda/electron,destan/electron,DivyaKMenon/electron,jlhbaseball15/electron,sky7sea/electron,fffej/electron,oiledCode/electron,jiaz/electron,jlord/electron,brave/muon,kostia/electron,bruce/electron,setzer777/electron,mjaniszew/electron,sircharleswatson/electron,saronwei/electron,zhakui/electron,deepak1556/atom-shell,dahal/electron,ervinb/electron,John-Lin/electron,chrisswk/electron,jjz/electron,jacksondc/electron,d-salas/electron,destan/electron,soulteary/electron,Ivshti/electron,nicholasess/electron,baiwyc119/electron,edulan/electron,natgolov/electron,posix4e/electron,leftstick/electron,bright-sparks/electron,farmisen/electron,beni55/electron,jjz/electron,yalexx/electron,Jacobichou/electron,fffej/electron,leethomas/electron,gbn972/electron,matiasinsaurralde/electron,Ivshti/electron,bobwol/electron,Floato/electron,IonicaBizauKitchen/electron,stevemao/electron,fireball-x/atom-shell,pirafrank/electron,SufianHassan/electron,etiktin/electron,JesselJohn/electron,vHanda/electron,icattlecoder/electron,gamedevsam/electron,stevemao/electron,biblerule/UMCTelnetHub,destan/electron,shaundunne/electron,anko/electron,shennushi/electron,fritx/electron,kostia/electron,faizalpribadi/electron,anko/electron,meowlab/electron,biblerule/UMCTelnetHub,seanchas116/electron,jacksondc/electron,leolujuyi/electron,robinvandernoord/electron,MaxWhere/electron,IonicaBizauKitchen/electron,shennushi/electron,simongregory/electron,Faiz7412/electron,rhencke/electron,miniak/electron,mrwizard82d1/electron,kokdemo/electron,michaelchiche/electron,faizalpribadi/electron,greyhwndz/electron,electron/electron,mattotodd/electron,fomojola/electron,arusakov/electron,arusakov/electron,JesselJohn/electron,nicobot/electron,bpasero/electron,kokdemo/electron,SufianHassan/electron,jlord/electron,roadev/electron,christian-bromann/electron,sky7sea/electron,IonicaBizauKitchen/electron,trigrass2/electron,adcentury/electron,adcentury/electron,minggo/electron,rajatsingla28/electron,bitemyapp/electron,sshiting/electron,thomsonreuters/electron,iftekeriba/electron,mattdesl/electron,gerhardberger/electron,the-ress/electron,ankitaggarwal011/electron,rprichard/electron,SufianHassan/electron,Zagorakiss/electron,leolujuyi/electron,benweissmann/electron,Ivshti/electron,thompsonemerson/electron,wolfflow/electron,gerhardberger/electron,hokein/atom-shell,fffej/electron,BionicClick/electron,coderhaoxin/electron,chriskdon/electron,destan/electron,abhishekgahlot/electron,lrlna/electron,smczk/electron,kenmozi/electron,micalan/electron,kcrt/electron,kcrt/electron,trankmichael/electron,simongregory/electron,pirafrank/electron,jcblw/electron,medixdev/electron,aliib/electron,digideskio/electron,kcrt/electron,oiledCode/electron,pombredanne/electron,jaanus/electron,aaron-goshine/electron,christian-bromann/electron,jlhbaseball15/electron,icattlecoder/electron,jannishuebl/electron,abhishekgahlot/electron,gabrielPeart/electron,bobwol/electron,davazp/electron,mrwizard82d1/electron,joneit/electron,miniak/electron,brenca/electron,darwin/electron,icattlecoder/electron,brave/electron,sky7sea/electron,smczk/electron,synaptek/electron,eriser/electron,Evercoder/electron,abhishekgahlot/electron,shennushi/electron,davazp/electron,tonyganch/electron,shiftkey/electron,RIAEvangelist/electron,coderhaoxin/electron,fritx/electron,aecca/electron,MaxGraey/electron,kostia/electron,lrlna/electron,tomashanacek/electron,oiledCode/electron,jaanus/electron,lzpfmh/electron,gabrielPeart/electron,yan-foto/electron,dongjoon-hyun/electron,medixdev/electron,arturts/electron,felixrieseberg/electron,nagyistoce/electron-atom-shell,beni55/electron,jacksondc/electron,kenmozi/electron,miniak/electron,tylergibson/electron,kikong/electron,gbn972/electron,fritx/electron,Jonekee/electron,felixrieseberg/electron,simonfork/electron,thingsinjars/electron,smczk/electron,deepak1556/atom-shell,kokdemo/electron,dongjoon-hyun/electron,neutrous/electron,davazp/electron,Jacobichou/electron,d-salas/electron,matiasinsaurralde/electron,systembugtj/electron,JesselJohn/electron,mrwizard82d1/electron,Gerhut/electron,dongjoon-hyun/electron,IonicaBizauKitchen/electron,lrlna/electron,brenca/electron,biblerule/UMCTelnetHub,saronwei/electron,dahal/electron,rsvip/electron,iftekeriba/electron,trankmichael/electron,benweissmann/electron,michaelchiche/electron,bruce/electron,setzer777/electron,SufianHassan/electron,etiktin/electron,the-ress/electron,mattdesl/electron,electron/electron,noikiy/electron,lzpfmh/electron,Faiz7412/electron,brave/electron,Jonekee/electron,gerhardberger/electron,mattotodd/electron,fomojola/electron,cqqccqc/electron,mattdesl/electron,arusakov/electron,deepak1556/atom-shell,xfstudio/electron,kcrt/electron,soulteary/electron,farmisen/electron,brenca/electron,robinvandernoord/electron,fireball-x/atom-shell,gstack/infinium-shell,mirrh/electron,ankitaggarwal011/electron,roadev/electron,christian-bromann/electron,bpasero/electron,egoist/electron,egoist/electron,timruffles/electron,tomashanacek/electron,webmechanicx/electron,gbn972/electron,subblue/electron,bbondy/electron,joneit/electron,icattlecoder/electron,jcblw/electron,tylergibson/electron,aaron-goshine/electron,webmechanicx/electron,sshiting/electron,anko/electron,thomsonreuters/electron,gstack/infinium-shell,d-salas/electron,brenca/electron,Floato/electron,aecca/electron,pandoraui/electron,thompsonemerson/electron,iftekeriba/electron,mubassirhayat/electron,subblue/electron,simongregory/electron,tonyganch/electron,leethomas/electron,leftstick/electron,sshiting/electron,takashi/electron,noikiy/electron,joaomoreno/atom-shell,thingsinjars/electron,pirafrank/electron,JussMee15/electron,vHanda/electron,JussMee15/electron,yan-foto/electron,kokdemo/electron,jhen0409/electron,arusakov/electron,christian-bromann/electron,edulan/electron,ianscrivener/electron,mhkeller/electron,tylergibson/electron,pandoraui/electron,arusakov/electron,beni55/electron,shaundunne/electron,dkfiresky/electron,carsonmcdonald/electron,bobwol/electron,renaesop/electron,timruffles/electron,Floato/electron,ankitaggarwal011/electron,wolfflow/electron,oiledCode/electron,aichingm/electron,posix4e/electron,twolfson/electron,kenmozi/electron,mattotodd/electron,etiktin/electron,jtburke/electron,sky7sea/electron,davazp/electron,chriskdon/electron,leolujuyi/electron,eriser/electron,nicobot/electron,brave/muon,Rokt33r/electron,leftstick/electron,brave/electron,digideskio/electron,edulan/electron,mattotodd/electron,gabriel/electron,webmechanicx/electron,kostia/electron,dahal/electron,joaomoreno/atom-shell,edulan/electron,benweissmann/electron,pirafrank/electron,mubassirhayat/electron,aliib/electron,sircharleswatson/electron,vipulroxx/electron,rhencke/electron,aichingm/electron,shaundunne/electron,jhen0409/electron,Evercoder/electron,John-Lin/electron,astoilkov/electron,brave/muon,cos2004/electron,abhishekgahlot/electron,preco21/electron,digideskio/electron,matiasinsaurralde/electron,trigrass2/electron,nicholasess/electron,astoilkov/electron,jhen0409/electron,hokein/atom-shell,leethomas/electron,minggo/electron,fireball-x/atom-shell,astoilkov/electron,leftstick/electron,voidbridge/electron,jsutcodes/electron,subblue/electron,kazupon/electron,aichingm/electron,sircharleswatson/electron,ervinb/electron,takashi/electron,saronwei/electron,electron/electron,aichingm/electron
|
import os
import glob
from lib.util import execute, rm_rf, safe_mkdir, s3put, s3_config
SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
SYMBOLS_DIR = 'dist\\symbols'
PDB_LIST = [
'out\\Release\\atom.exe.pdb',
'vendor\\brightray\\vendor\\download\\libchromiumcontent\\Release\\chromiumcontent.dll.pdb',
]
def main():
os.chdir(SOURCE_ROOT)
rm_rf(SYMBOLS_DIR)
safe_mkdir(SYMBOLS_DIR)
for pdb in PDB_LIST:
run_symstore(pdb, SYMBOLS_DIR, 'AtomShell')
bucket, access_key, secret_key = s3_config()
files = glob.glob(SYMBOLS_DIR + '/*.pdb/*/*.pdb')
+ files = [f.lower() for f in files]
upload_symbols(bucket, access_key, secret_key, files)
def run_symstore(pdb, dest, product):
execute(['symstore', 'add', '/r', '/f', pdb, '/s', dest, '/t', product])
def upload_symbols(bucket, access_key, secret_key, files):
s3put(bucket, access_key, secret_key, SYMBOLS_DIR, 'atom-shell/symbols', files)
if __name__ == '__main__':
import sys
sys.exit(main())
|
Use lowercase for symbol paths
|
## Code Before:
import os
import glob
from lib.util import execute, rm_rf, safe_mkdir, s3put, s3_config
SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
SYMBOLS_DIR = 'dist\\symbols'
PDB_LIST = [
'out\\Release\\atom.exe.pdb',
'vendor\\brightray\\vendor\\download\\libchromiumcontent\\Release\\chromiumcontent.dll.pdb',
]
def main():
os.chdir(SOURCE_ROOT)
rm_rf(SYMBOLS_DIR)
safe_mkdir(SYMBOLS_DIR)
for pdb in PDB_LIST:
run_symstore(pdb, SYMBOLS_DIR, 'AtomShell')
bucket, access_key, secret_key = s3_config()
files = glob.glob(SYMBOLS_DIR + '/*.pdb/*/*.pdb')
upload_symbols(bucket, access_key, secret_key, files)
def run_symstore(pdb, dest, product):
execute(['symstore', 'add', '/r', '/f', pdb, '/s', dest, '/t', product])
def upload_symbols(bucket, access_key, secret_key, files):
s3put(bucket, access_key, secret_key, SYMBOLS_DIR, 'atom-shell/symbols', files)
if __name__ == '__main__':
import sys
sys.exit(main())
## Instruction:
Use lowercase for symbol paths
## Code After:
import os
import glob
from lib.util import execute, rm_rf, safe_mkdir, s3put, s3_config
SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
SYMBOLS_DIR = 'dist\\symbols'
PDB_LIST = [
'out\\Release\\atom.exe.pdb',
'vendor\\brightray\\vendor\\download\\libchromiumcontent\\Release\\chromiumcontent.dll.pdb',
]
def main():
os.chdir(SOURCE_ROOT)
rm_rf(SYMBOLS_DIR)
safe_mkdir(SYMBOLS_DIR)
for pdb in PDB_LIST:
run_symstore(pdb, SYMBOLS_DIR, 'AtomShell')
bucket, access_key, secret_key = s3_config()
files = glob.glob(SYMBOLS_DIR + '/*.pdb/*/*.pdb')
files = [f.lower() for f in files]
upload_symbols(bucket, access_key, secret_key, files)
def run_symstore(pdb, dest, product):
execute(['symstore', 'add', '/r', '/f', pdb, '/s', dest, '/t', product])
def upload_symbols(bucket, access_key, secret_key, files):
s3put(bucket, access_key, secret_key, SYMBOLS_DIR, 'atom-shell/symbols', files)
if __name__ == '__main__':
import sys
sys.exit(main())
|
# ... existing code ...
files = glob.glob(SYMBOLS_DIR + '/*.pdb/*/*.pdb')
files = [f.lower() for f in files]
upload_symbols(bucket, access_key, secret_key, files)
# ... rest of the code ...
|
7845e017b264a38472d0dc94988a0afe6938132f
|
tests/acceptance/conftest.py
|
tests/acceptance/conftest.py
|
import mock
import pytest
@pytest.fixture
def default_trace_id_generator(dummy_request):
return lambda dummy_request: '17133d482ba4f605'
@pytest.fixture
def settings():
return {
'zipkin.tracing_percent': 100,
'zipkin.trace_id_generator': default_trace_id_generator,
}
@pytest.fixture
def get_span():
return {
'id': '1',
'tags': {
'http.uri': '/sample',
'http.uri.qs': '/sample',
'http.route': '/sample',
'response_status_code': '200',
},
'name': 'GET /sample',
'traceId': '17133d482ba4f605',
'localEndpoint': {
'ipv4': '127.0.0.1',
'port': 80,
'serviceName': 'acceptance_service',
},
'kind': 'SERVER',
'timestamp': mock.ANY,
'duration': mock.ANY,
}
|
import mock
import pytest
@pytest.fixture
def default_trace_id_generator(dummy_request):
return lambda dummy_request: '17133d482ba4f605'
@pytest.fixture
def settings():
return {
'zipkin.tracing_percent': 100,
'zipkin.trace_id_generator': default_trace_id_generator,
}
@pytest.fixture
def get_span():
return {
'id': '1',
'tags': {
'http.uri': '/sample',
'http.uri.qs': '/sample',
'http.route': '/sample',
'response_status_code': '200',
},
'name': 'GET /sample',
'traceId': '17133d482ba4f605',
'localEndpoint': {
'ipv4': mock.ANY,
'port': 80,
'serviceName': 'acceptance_service',
},
'kind': 'SERVER',
'timestamp': mock.ANY,
'duration': mock.ANY,
}
|
Allow any ip in the get_span expected span since it's not deterministic
|
Allow any ip in the get_span expected span since it's not deterministic
|
Python
|
apache-2.0
|
Yelp/pyramid_zipkin
|
import mock
import pytest
@pytest.fixture
def default_trace_id_generator(dummy_request):
return lambda dummy_request: '17133d482ba4f605'
@pytest.fixture
def settings():
return {
'zipkin.tracing_percent': 100,
'zipkin.trace_id_generator': default_trace_id_generator,
}
@pytest.fixture
def get_span():
return {
'id': '1',
'tags': {
'http.uri': '/sample',
'http.uri.qs': '/sample',
'http.route': '/sample',
'response_status_code': '200',
},
'name': 'GET /sample',
'traceId': '17133d482ba4f605',
'localEndpoint': {
- 'ipv4': '127.0.0.1',
+ 'ipv4': mock.ANY,
'port': 80,
'serviceName': 'acceptance_service',
},
'kind': 'SERVER',
'timestamp': mock.ANY,
'duration': mock.ANY,
}
|
Allow any ip in the get_span expected span since it's not deterministic
|
## Code Before:
import mock
import pytest
@pytest.fixture
def default_trace_id_generator(dummy_request):
return lambda dummy_request: '17133d482ba4f605'
@pytest.fixture
def settings():
return {
'zipkin.tracing_percent': 100,
'zipkin.trace_id_generator': default_trace_id_generator,
}
@pytest.fixture
def get_span():
return {
'id': '1',
'tags': {
'http.uri': '/sample',
'http.uri.qs': '/sample',
'http.route': '/sample',
'response_status_code': '200',
},
'name': 'GET /sample',
'traceId': '17133d482ba4f605',
'localEndpoint': {
'ipv4': '127.0.0.1',
'port': 80,
'serviceName': 'acceptance_service',
},
'kind': 'SERVER',
'timestamp': mock.ANY,
'duration': mock.ANY,
}
## Instruction:
Allow any ip in the get_span expected span since it's not deterministic
## Code After:
import mock
import pytest
@pytest.fixture
def default_trace_id_generator(dummy_request):
return lambda dummy_request: '17133d482ba4f605'
@pytest.fixture
def settings():
return {
'zipkin.tracing_percent': 100,
'zipkin.trace_id_generator': default_trace_id_generator,
}
@pytest.fixture
def get_span():
return {
'id': '1',
'tags': {
'http.uri': '/sample',
'http.uri.qs': '/sample',
'http.route': '/sample',
'response_status_code': '200',
},
'name': 'GET /sample',
'traceId': '17133d482ba4f605',
'localEndpoint': {
'ipv4': mock.ANY,
'port': 80,
'serviceName': 'acceptance_service',
},
'kind': 'SERVER',
'timestamp': mock.ANY,
'duration': mock.ANY,
}
|
# ... existing code ...
'localEndpoint': {
'ipv4': mock.ANY,
'port': 80,
# ... rest of the code ...
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.