commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 10
2.94k
| new_contents
stringlengths 21
3.18k
| subject
stringlengths 16
444
| message
stringlengths 17
2.63k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43k
| ndiff
stringlengths 52
3.32k
| instruction
stringlengths 16
444
| content
stringlengths 133
4.32k
| fuzzy_diff
stringlengths 33
3.23k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
de7219dd9d40f316dc0dd6f6c2cad68e66898762
|
tests/test_live.py
|
tests/test_live.py
|
import os
import pytest
from aiosmtplib import SMTP, SMTPAuthenticationError, SMTPStatus
pytestmark = [
pytest.mark.skipif(
os.environ.get("CI") == "true",
reason="No tests against real servers on CI servers",
),
pytest.mark.asyncio(),
]
async def test_starttls_gmail():
client = SMTP(hostname="smtp.gmail.com", port=587, use_tls=False)
await client.connect(timeout=1.0)
await client.ehlo()
await client.starttls(validate_certs=False)
response = await client.ehlo()
assert response.code == SMTPStatus.completed
assert "smtp.gmail.com at your service" in response.message
with pytest.raises(SMTPAuthenticationError):
await client.login("test", "test")
@pytest.mark.asyncio()
async def test_qq_login():
client = SMTP(hostname="smtp.qq.com", port=587, use_tls=False)
await client.connect(timeout=2.0)
await client.ehlo()
await client.starttls(validate_certs=False)
with pytest.raises(SMTPAuthenticationError):
await client.login("test", "test")
|
import os
import pytest
from aiosmtplib import SMTP, SMTPAuthenticationError, SMTPStatus
pytestmark = [
pytest.mark.skipif(
os.environ.get("AIOSMTPLIB_LIVE_TESTS") != "true",
reason="No tests against real servers unless requested",
),
pytest.mark.asyncio(),
]
async def test_starttls_gmail():
client = SMTP(hostname="smtp.gmail.com", port=587, use_tls=False)
await client.connect(timeout=1.0)
await client.ehlo()
await client.starttls(validate_certs=False)
response = await client.ehlo()
assert response.code == SMTPStatus.completed
assert "smtp.gmail.com at your service" in response.message
with pytest.raises(SMTPAuthenticationError):
await client.login("test", "test")
@pytest.mark.asyncio()
async def test_qq_login():
client = SMTP(hostname="smtp.qq.com", port=587, use_tls=False)
await client.connect(timeout=2.0)
await client.ehlo()
await client.starttls(validate_certs=False)
with pytest.raises(SMTPAuthenticationError):
await client.login("test", "test")
|
Disable live tests by default
|
Disable live tests by default
|
Python
|
mit
|
cole/aiosmtplib
|
import os
import pytest
from aiosmtplib import SMTP, SMTPAuthenticationError, SMTPStatus
pytestmark = [
pytest.mark.skipif(
- os.environ.get("CI") == "true",
+ os.environ.get("AIOSMTPLIB_LIVE_TESTS") != "true",
- reason="No tests against real servers on CI servers",
+ reason="No tests against real servers unless requested",
),
pytest.mark.asyncio(),
]
async def test_starttls_gmail():
client = SMTP(hostname="smtp.gmail.com", port=587, use_tls=False)
await client.connect(timeout=1.0)
await client.ehlo()
await client.starttls(validate_certs=False)
response = await client.ehlo()
assert response.code == SMTPStatus.completed
assert "smtp.gmail.com at your service" in response.message
with pytest.raises(SMTPAuthenticationError):
await client.login("test", "test")
@pytest.mark.asyncio()
async def test_qq_login():
client = SMTP(hostname="smtp.qq.com", port=587, use_tls=False)
await client.connect(timeout=2.0)
await client.ehlo()
await client.starttls(validate_certs=False)
with pytest.raises(SMTPAuthenticationError):
await client.login("test", "test")
|
Disable live tests by default
|
## Code Before:
import os
import pytest
from aiosmtplib import SMTP, SMTPAuthenticationError, SMTPStatus
pytestmark = [
pytest.mark.skipif(
os.environ.get("CI") == "true",
reason="No tests against real servers on CI servers",
),
pytest.mark.asyncio(),
]
async def test_starttls_gmail():
client = SMTP(hostname="smtp.gmail.com", port=587, use_tls=False)
await client.connect(timeout=1.0)
await client.ehlo()
await client.starttls(validate_certs=False)
response = await client.ehlo()
assert response.code == SMTPStatus.completed
assert "smtp.gmail.com at your service" in response.message
with pytest.raises(SMTPAuthenticationError):
await client.login("test", "test")
@pytest.mark.asyncio()
async def test_qq_login():
client = SMTP(hostname="smtp.qq.com", port=587, use_tls=False)
await client.connect(timeout=2.0)
await client.ehlo()
await client.starttls(validate_certs=False)
with pytest.raises(SMTPAuthenticationError):
await client.login("test", "test")
## Instruction:
Disable live tests by default
## Code After:
import os
import pytest
from aiosmtplib import SMTP, SMTPAuthenticationError, SMTPStatus
pytestmark = [
pytest.mark.skipif(
os.environ.get("AIOSMTPLIB_LIVE_TESTS") != "true",
reason="No tests against real servers unless requested",
),
pytest.mark.asyncio(),
]
async def test_starttls_gmail():
client = SMTP(hostname="smtp.gmail.com", port=587, use_tls=False)
await client.connect(timeout=1.0)
await client.ehlo()
await client.starttls(validate_certs=False)
response = await client.ehlo()
assert response.code == SMTPStatus.completed
assert "smtp.gmail.com at your service" in response.message
with pytest.raises(SMTPAuthenticationError):
await client.login("test", "test")
@pytest.mark.asyncio()
async def test_qq_login():
client = SMTP(hostname="smtp.qq.com", port=587, use_tls=False)
await client.connect(timeout=2.0)
await client.ehlo()
await client.starttls(validate_certs=False)
with pytest.raises(SMTPAuthenticationError):
await client.login("test", "test")
|
// ... existing code ...
pytestmark = [
pytest.mark.skipif(
os.environ.get("AIOSMTPLIB_LIVE_TESTS") != "true",
reason="No tests against real servers unless requested",
),
pytest.mark.asyncio(),
]
// ... rest of the code ...
|
67e47e0179352e9d5206fe7196762481d0bcaba4
|
aspen/server.py
|
aspen/server.py
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from algorithm import Algorithm
def main():
Server().main()
class Server(object):
def __init__(self, argv=None):
self.argv = argv
def get_algorithm(self):
return Algorithm('aspen.algorithms.server')
def get_website(self):
"""Return a website object. Useful in testing.
"""
algorithm = self.get_algorithm()
algorithm.run(argv=self.argv, _through='get_website_from_argv')
return algorithm.state['website']
def main(self, argv=None):
"""http://aspen.io/cli/
"""
try:
argv = argv if argv is not None else self.argv
algorithm = self.get_algorithm()
algorithm.run(argv=argv)
except (SystemExit, KeyboardInterrupt):
# Under some (most?) network engines, a SIGINT will be trapped by the
# SIGINT signal handler above. However, gevent does "something" with
# signals and our signal handler never fires. However, we *do* get a
# KeyboardInterrupt here in that case. *shrug*
#
# See: https://github.com/gittip/aspen-python/issues/196
pass
except:
import aspen, traceback
aspen.log_dammit("Oh no! Aspen crashed!")
aspen.log_dammit(traceback.format_exc())
if __name__ == '__main__':
main()
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from algorithm import Algorithm
def main():
Server().main()
class Server(object):
def __init__(self, argv=None):
self.argv = argv
def get_algorithm(self):
return Algorithm('aspen.algorithms.server')
def get_website(self):
"""Return a website object. Useful in testing.
"""
algorithm = self.get_algorithm()
state = algorithm.run(argv=self.argv, _through='get_website_from_argv')
return state['website']
def main(self, argv=None):
"""http://aspen.io/cli/
"""
try:
argv = argv if argv is not None else self.argv
algorithm = self.get_algorithm()
algorithm.run(argv=argv)
except (SystemExit, KeyboardInterrupt):
# Under some (most?) network engines, a SIGINT will be trapped by the
# SIGINT signal handler above. However, gevent does "something" with
# signals and our signal handler never fires. However, we *do* get a
# KeyboardInterrupt here in that case. *shrug*
#
# See: https://github.com/gittip/aspen-python/issues/196
pass
except:
import aspen, traceback
aspen.log_dammit("Oh no! Aspen crashed!")
aspen.log_dammit(traceback.format_exc())
if __name__ == '__main__':
main()
|
Drop back to state as return val
|
Drop back to state as return val
If we store state on Algorithm then we're not thread-safe.
|
Python
|
mit
|
gratipay/aspen.py,gratipay/aspen.py
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from algorithm import Algorithm
def main():
Server().main()
class Server(object):
def __init__(self, argv=None):
self.argv = argv
def get_algorithm(self):
return Algorithm('aspen.algorithms.server')
def get_website(self):
"""Return a website object. Useful in testing.
"""
algorithm = self.get_algorithm()
- algorithm.run(argv=self.argv, _through='get_website_from_argv')
+ state = algorithm.run(argv=self.argv, _through='get_website_from_argv')
- return algorithm.state['website']
+ return state['website']
def main(self, argv=None):
"""http://aspen.io/cli/
"""
try:
argv = argv if argv is not None else self.argv
algorithm = self.get_algorithm()
algorithm.run(argv=argv)
except (SystemExit, KeyboardInterrupt):
# Under some (most?) network engines, a SIGINT will be trapped by the
# SIGINT signal handler above. However, gevent does "something" with
# signals and our signal handler never fires. However, we *do* get a
# KeyboardInterrupt here in that case. *shrug*
#
# See: https://github.com/gittip/aspen-python/issues/196
pass
except:
import aspen, traceback
aspen.log_dammit("Oh no! Aspen crashed!")
aspen.log_dammit(traceback.format_exc())
if __name__ == '__main__':
main()
|
Drop back to state as return val
|
## Code Before:
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from algorithm import Algorithm
def main():
Server().main()
class Server(object):
def __init__(self, argv=None):
self.argv = argv
def get_algorithm(self):
return Algorithm('aspen.algorithms.server')
def get_website(self):
"""Return a website object. Useful in testing.
"""
algorithm = self.get_algorithm()
algorithm.run(argv=self.argv, _through='get_website_from_argv')
return algorithm.state['website']
def main(self, argv=None):
"""http://aspen.io/cli/
"""
try:
argv = argv if argv is not None else self.argv
algorithm = self.get_algorithm()
algorithm.run(argv=argv)
except (SystemExit, KeyboardInterrupt):
# Under some (most?) network engines, a SIGINT will be trapped by the
# SIGINT signal handler above. However, gevent does "something" with
# signals and our signal handler never fires. However, we *do* get a
# KeyboardInterrupt here in that case. *shrug*
#
# See: https://github.com/gittip/aspen-python/issues/196
pass
except:
import aspen, traceback
aspen.log_dammit("Oh no! Aspen crashed!")
aspen.log_dammit(traceback.format_exc())
if __name__ == '__main__':
main()
## Instruction:
Drop back to state as return val
## Code After:
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from algorithm import Algorithm
def main():
Server().main()
class Server(object):
def __init__(self, argv=None):
self.argv = argv
def get_algorithm(self):
return Algorithm('aspen.algorithms.server')
def get_website(self):
"""Return a website object. Useful in testing.
"""
algorithm = self.get_algorithm()
state = algorithm.run(argv=self.argv, _through='get_website_from_argv')
return state['website']
def main(self, argv=None):
"""http://aspen.io/cli/
"""
try:
argv = argv if argv is not None else self.argv
algorithm = self.get_algorithm()
algorithm.run(argv=argv)
except (SystemExit, KeyboardInterrupt):
# Under some (most?) network engines, a SIGINT will be trapped by the
# SIGINT signal handler above. However, gevent does "something" with
# signals and our signal handler never fires. However, we *do* get a
# KeyboardInterrupt here in that case. *shrug*
#
# See: https://github.com/gittip/aspen-python/issues/196
pass
except:
import aspen, traceback
aspen.log_dammit("Oh no! Aspen crashed!")
aspen.log_dammit(traceback.format_exc())
if __name__ == '__main__':
main()
|
...
"""Return a website object. Useful in testing.
"""
algorithm = self.get_algorithm()
state = algorithm.run(argv=self.argv, _through='get_website_from_argv')
return state['website']
def main(self, argv=None):
"""http://aspen.io/cli/
...
|
f1d9c010b58d69cdcf8f55a3e5937cbdf58c10e6
|
tools/corintick_dump.py
|
tools/corintick_dump.py
|
import argparse
import glob
from pytrthree import TRTHIterator
from corintick import Corintick, ValidationError
def main(args):
db = Corintick(args.config)
files = glob.glob(args.files)
for ric, df in TRTHIterator(files):
cols = args.columns if args.columns else df.columns
try:
db.write(ric, df[cols], collection=args.collection)
except ValidationError as e:
db.logger.error(e)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Parse TRTH files and insert into Corintick.')
parser.add_argument('--config', type=argparse.FileType('r'), required=True,
help='Corintick configuration (YAML file)')
parser.add_argument('--files', type=str, default='*', required=True,
help='Glob of files to download')
parser.add_argument('--columns', nargs='*', type=str,
help='Columns to be inserted (optional)')
parser.add_argument('--collection', type=str, default=None,
help='Collection to insert to (optional)')
args = parser.parse_args()
main(args)
|
import argparse
import glob
import os
from pytrthree import TRTHIterator
from corintick import Corintick, ValidationError
def main(args):
db = Corintick(args.config)
files = glob.glob(os.path.expanduser(args.files))
for ric, df in TRTHIterator(files):
cols = args.columns if args.columns else df.columns
try:
db.write(ric, df[cols], collection=args.collection)
except ValidationError as e:
db.logger.error(e)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Parse TRTH files and insert into Corintick.')
parser.add_argument('--config', type=argparse.FileType('r'), required=True,
help='Corintick configuration (YAML file)')
parser.add_argument('--files', type=str, default='*', required=True,
help='Glob of files to insert')
parser.add_argument('--columns', nargs='*', type=str,
help='Columns to be inserted (optional)')
parser.add_argument('--collection', type=str, default=None,
help='Collection to insert to (optional)')
args = parser.parse_args()
main(args)
|
Fix help docstring and glob parsing
|
Fix help docstring and glob parsing
|
Python
|
mit
|
plugaai/pytrthree
|
import argparse
import glob
+ import os
from pytrthree import TRTHIterator
from corintick import Corintick, ValidationError
def main(args):
db = Corintick(args.config)
- files = glob.glob(args.files)
+ files = glob.glob(os.path.expanduser(args.files))
for ric, df in TRTHIterator(files):
cols = args.columns if args.columns else df.columns
try:
db.write(ric, df[cols], collection=args.collection)
except ValidationError as e:
db.logger.error(e)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Parse TRTH files and insert into Corintick.')
parser.add_argument('--config', type=argparse.FileType('r'), required=True,
help='Corintick configuration (YAML file)')
parser.add_argument('--files', type=str, default='*', required=True,
- help='Glob of files to download')
+ help='Glob of files to insert')
parser.add_argument('--columns', nargs='*', type=str,
help='Columns to be inserted (optional)')
parser.add_argument('--collection', type=str, default=None,
help='Collection to insert to (optional)')
args = parser.parse_args()
main(args)
|
Fix help docstring and glob parsing
|
## Code Before:
import argparse
import glob
from pytrthree import TRTHIterator
from corintick import Corintick, ValidationError
def main(args):
db = Corintick(args.config)
files = glob.glob(args.files)
for ric, df in TRTHIterator(files):
cols = args.columns if args.columns else df.columns
try:
db.write(ric, df[cols], collection=args.collection)
except ValidationError as e:
db.logger.error(e)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Parse TRTH files and insert into Corintick.')
parser.add_argument('--config', type=argparse.FileType('r'), required=True,
help='Corintick configuration (YAML file)')
parser.add_argument('--files', type=str, default='*', required=True,
help='Glob of files to download')
parser.add_argument('--columns', nargs='*', type=str,
help='Columns to be inserted (optional)')
parser.add_argument('--collection', type=str, default=None,
help='Collection to insert to (optional)')
args = parser.parse_args()
main(args)
## Instruction:
Fix help docstring and glob parsing
## Code After:
import argparse
import glob
import os
from pytrthree import TRTHIterator
from corintick import Corintick, ValidationError
def main(args):
db = Corintick(args.config)
files = glob.glob(os.path.expanduser(args.files))
for ric, df in TRTHIterator(files):
cols = args.columns if args.columns else df.columns
try:
db.write(ric, df[cols], collection=args.collection)
except ValidationError as e:
db.logger.error(e)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Parse TRTH files and insert into Corintick.')
parser.add_argument('--config', type=argparse.FileType('r'), required=True,
help='Corintick configuration (YAML file)')
parser.add_argument('--files', type=str, default='*', required=True,
help='Glob of files to insert')
parser.add_argument('--columns', nargs='*', type=str,
help='Columns to be inserted (optional)')
parser.add_argument('--collection', type=str, default=None,
help='Collection to insert to (optional)')
args = parser.parse_args()
main(args)
|
# ... existing code ...
import argparse
import glob
import os
from pytrthree import TRTHIterator
from corintick import Corintick, ValidationError
# ... modified code ...
def main(args):
db = Corintick(args.config)
files = glob.glob(os.path.expanduser(args.files))
for ric, df in TRTHIterator(files):
cols = args.columns if args.columns else df.columns
try:
...
parser.add_argument('--config', type=argparse.FileType('r'), required=True,
help='Corintick configuration (YAML file)')
parser.add_argument('--files', type=str, default='*', required=True,
help='Glob of files to insert')
parser.add_argument('--columns', nargs='*', type=str,
help='Columns to be inserted (optional)')
parser.add_argument('--collection', type=str, default=None,
# ... rest of the code ...
|
aa1c2880dc85228d9a8d534858c1cfe70428cbde
|
src/ggrc/migrations/versions/20160510122526_44ebc240800b_remove_response_relationships.py
|
src/ggrc/migrations/versions/20160510122526_44ebc240800b_remove_response_relationships.py
|
# disable Invalid constant name pylint warning for mandatory Alembic variables.
# pylint: disable=invalid-name
from alembic import op
# revision identifiers, used by Alembic.
revision = '44ebc240800b'
down_revision = '3715694bd315'
def upgrade():
"""Upgrade database schema and/or data, creating a new revision."""
op.execute(
"""
DELETE FROM relationships
WHERE source_type IN
("Response", "DocumentationResponse", "InterviewResponse",
"PopulationSampleResponse")
OR destination_type IN
("Response", "DocumentationResponse", "InterviewResponse",
"PopulationSampleResponse")
""")
def downgrade():
"""Downgrade database schema and/or data back to the previous revision."""
pass
|
# disable Invalid constant name pylint warning for mandatory Alembic variables.
# pylint: disable=invalid-name
from alembic import op
# revision identifiers, used by Alembic.
revision = '44ebc240800b'
down_revision = '3715694bd315'
def upgrade():
"""Upgrade database schema and/or data, creating a new revision."""
op.execute(
"""
DELETE FROM relationships
WHERE source_type IN
("Response", "DocumentationResponse", "InterviewResponse",
"PopulationSampleResponse")
OR destination_type IN
("Response", "DocumentationResponse", "InterviewResponse",
"PopulationSampleResponse")
""")
op.execute(
"""
DELETE FROM object_documents
WHERE documentable_type IN
("Response", "DocumentationResponse", "InterviewResponse",
"PopulationSampleResponse")
""")
def downgrade():
"""Downgrade database schema and/or data back to the previous revision."""
pass
|
Remove responses references in object_documents
|
Remove responses references in object_documents
|
Python
|
apache-2.0
|
selahssea/ggrc-core,andrei-karalionak/ggrc-core,kr41/ggrc-core,AleksNeStu/ggrc-core,josthkko/ggrc-core,edofic/ggrc-core,plamut/ggrc-core,josthkko/ggrc-core,andrei-karalionak/ggrc-core,j0gurt/ggrc-core,plamut/ggrc-core,plamut/ggrc-core,AleksNeStu/ggrc-core,edofic/ggrc-core,AleksNeStu/ggrc-core,josthkko/ggrc-core,andrei-karalionak/ggrc-core,j0gurt/ggrc-core,plamut/ggrc-core,NejcZupec/ggrc-core,edofic/ggrc-core,NejcZupec/ggrc-core,edofic/ggrc-core,selahssea/ggrc-core,andrei-karalionak/ggrc-core,NejcZupec/ggrc-core,VinnieJohns/ggrc-core,kr41/ggrc-core,kr41/ggrc-core,AleksNeStu/ggrc-core,VinnieJohns/ggrc-core,josthkko/ggrc-core,NejcZupec/ggrc-core,VinnieJohns/ggrc-core,selahssea/ggrc-core,kr41/ggrc-core,selahssea/ggrc-core,j0gurt/ggrc-core,j0gurt/ggrc-core,VinnieJohns/ggrc-core
|
# disable Invalid constant name pylint warning for mandatory Alembic variables.
# pylint: disable=invalid-name
from alembic import op
# revision identifiers, used by Alembic.
revision = '44ebc240800b'
down_revision = '3715694bd315'
def upgrade():
"""Upgrade database schema and/or data, creating a new revision."""
op.execute(
"""
DELETE FROM relationships
WHERE source_type IN
("Response", "DocumentationResponse", "InterviewResponse",
"PopulationSampleResponse")
OR destination_type IN
("Response", "DocumentationResponse", "InterviewResponse",
"PopulationSampleResponse")
""")
+ op.execute(
+ """
+ DELETE FROM object_documents
+ WHERE documentable_type IN
+ ("Response", "DocumentationResponse", "InterviewResponse",
+ "PopulationSampleResponse")
+ """)
def downgrade():
"""Downgrade database schema and/or data back to the previous revision."""
pass
|
Remove responses references in object_documents
|
## Code Before:
# disable Invalid constant name pylint warning for mandatory Alembic variables.
# pylint: disable=invalid-name
from alembic import op
# revision identifiers, used by Alembic.
revision = '44ebc240800b'
down_revision = '3715694bd315'
def upgrade():
"""Upgrade database schema and/or data, creating a new revision."""
op.execute(
"""
DELETE FROM relationships
WHERE source_type IN
("Response", "DocumentationResponse", "InterviewResponse",
"PopulationSampleResponse")
OR destination_type IN
("Response", "DocumentationResponse", "InterviewResponse",
"PopulationSampleResponse")
""")
def downgrade():
"""Downgrade database schema and/or data back to the previous revision."""
pass
## Instruction:
Remove responses references in object_documents
## Code After:
# disable Invalid constant name pylint warning for mandatory Alembic variables.
# pylint: disable=invalid-name
from alembic import op
# revision identifiers, used by Alembic.
revision = '44ebc240800b'
down_revision = '3715694bd315'
def upgrade():
"""Upgrade database schema and/or data, creating a new revision."""
op.execute(
"""
DELETE FROM relationships
WHERE source_type IN
("Response", "DocumentationResponse", "InterviewResponse",
"PopulationSampleResponse")
OR destination_type IN
("Response", "DocumentationResponse", "InterviewResponse",
"PopulationSampleResponse")
""")
op.execute(
"""
DELETE FROM object_documents
WHERE documentable_type IN
("Response", "DocumentationResponse", "InterviewResponse",
"PopulationSampleResponse")
""")
def downgrade():
"""Downgrade database schema and/or data back to the previous revision."""
pass
|
# ... existing code ...
("Response", "DocumentationResponse", "InterviewResponse",
"PopulationSampleResponse")
""")
op.execute(
"""
DELETE FROM object_documents
WHERE documentable_type IN
("Response", "DocumentationResponse", "InterviewResponse",
"PopulationSampleResponse")
""")
def downgrade():
# ... rest of the code ...
|
5bffcac159bc28adaf03b05ca75cd7387aad3240
|
linked-list/linked-list.py
|
linked-list/linked-list.py
|
class Node(object):
def __init__(self, data):
self.data = data
self.next = None
|
class Node(object):
def __init__(self, data):
self.data = data
self.next = None
class LinkedList(object):
def __init__(self, head=None):
self.head = head
|
Set up linked list class
|
Set up linked list class
|
Python
|
mit
|
derekmpham/interview-prep,derekmpham/interview-prep
|
class Node(object):
def __init__(self, data):
self.data = data
self.next = None
+ class LinkedList(object):
+ def __init__(self, head=None):
+ self.head = head
+
+
+
|
Set up linked list class
|
## Code Before:
class Node(object):
def __init__(self, data):
self.data = data
self.next = None
## Instruction:
Set up linked list class
## Code After:
class Node(object):
def __init__(self, data):
self.data = data
self.next = None
class LinkedList(object):
def __init__(self, head=None):
self.head = head
|
# ... existing code ...
def __init__(self, data):
self.data = data
self.next = None
class LinkedList(object):
def __init__(self, head=None):
self.head = head
# ... rest of the code ...
|
1c5af88a0689aadab4069f9f2ad16164791624b3
|
Discord/utilities/errors.py
|
Discord/utilities/errors.py
|
from discord.ext.commands.errors import CommandError
class NotServerOwner(CommandError):
'''Not Server Owner'''
pass
class VoiceNotConnected(CommandError):
'''Voice Not Connected'''
pass
class PermittedVoiceNotConnected(VoiceNotConnected):
'''Permitted, but Voice Not Connected'''
pass
class NotPermittedVoiceNotConnected(VoiceNotConnected):
'''Voice Not Connected, and Not Permitted'''
pass
class MissingPermissions(CommandError):
'''Missing Permissions'''
pass
class MissingCapability(CommandError):
'''Missing Capability'''
def __init__(self, permissions):
self.permissions = permissions
class NotPermitted(CommandError):
'''Not Permitted'''
pass
class AudioError(CommandError):
'''Audio Error'''
pass
|
from discord.ext.commands.errors import CommandError
class NotServerOwner(CommandError):
'''Not Server Owner'''
pass
class VoiceNotConnected(CommandError):
'''Voice Not Connected'''
pass
class PermittedVoiceNotConnected(VoiceNotConnected):
'''Permitted, but Voice Not Connected'''
pass
class NotPermittedVoiceNotConnected(VoiceNotConnected):
'''Voice Not Connected, and Not Permitted'''
pass
class MissingPermissions(CommandError):
'''Missing Permissions'''
pass
class NotPermitted(CommandError):
'''Not Permitted'''
pass
class AudioError(CommandError):
'''Audio Error'''
pass
|
Remove no longer used Missing Capability error
|
[Discord] Remove no longer used Missing Capability error
|
Python
|
mit
|
Harmon758/Harmonbot,Harmon758/Harmonbot
|
from discord.ext.commands.errors import CommandError
class NotServerOwner(CommandError):
'''Not Server Owner'''
pass
class VoiceNotConnected(CommandError):
'''Voice Not Connected'''
pass
class PermittedVoiceNotConnected(VoiceNotConnected):
'''Permitted, but Voice Not Connected'''
pass
class NotPermittedVoiceNotConnected(VoiceNotConnected):
'''Voice Not Connected, and Not Permitted'''
pass
class MissingPermissions(CommandError):
'''Missing Permissions'''
pass
- class MissingCapability(CommandError):
- '''Missing Capability'''
- def __init__(self, permissions):
- self.permissions = permissions
-
class NotPermitted(CommandError):
'''Not Permitted'''
pass
class AudioError(CommandError):
'''Audio Error'''
pass
|
Remove no longer used Missing Capability error
|
## Code Before:
from discord.ext.commands.errors import CommandError
class NotServerOwner(CommandError):
'''Not Server Owner'''
pass
class VoiceNotConnected(CommandError):
'''Voice Not Connected'''
pass
class PermittedVoiceNotConnected(VoiceNotConnected):
'''Permitted, but Voice Not Connected'''
pass
class NotPermittedVoiceNotConnected(VoiceNotConnected):
'''Voice Not Connected, and Not Permitted'''
pass
class MissingPermissions(CommandError):
'''Missing Permissions'''
pass
class MissingCapability(CommandError):
'''Missing Capability'''
def __init__(self, permissions):
self.permissions = permissions
class NotPermitted(CommandError):
'''Not Permitted'''
pass
class AudioError(CommandError):
'''Audio Error'''
pass
## Instruction:
Remove no longer used Missing Capability error
## Code After:
from discord.ext.commands.errors import CommandError
class NotServerOwner(CommandError):
'''Not Server Owner'''
pass
class VoiceNotConnected(CommandError):
'''Voice Not Connected'''
pass
class PermittedVoiceNotConnected(VoiceNotConnected):
'''Permitted, but Voice Not Connected'''
pass
class NotPermittedVoiceNotConnected(VoiceNotConnected):
'''Voice Not Connected, and Not Permitted'''
pass
class MissingPermissions(CommandError):
'''Missing Permissions'''
pass
class NotPermitted(CommandError):
'''Not Permitted'''
pass
class AudioError(CommandError):
'''Audio Error'''
pass
|
# ... existing code ...
'''Missing Permissions'''
pass
class NotPermitted(CommandError):
'''Not Permitted'''
pass
# ... rest of the code ...
|
1c51c772d4b21eba70cd09429e603f1873b2c13c
|
examples/demo.py
|
examples/demo.py
|
import pytaf
taf_str = """
TAF AMD KDEN 291134Z 2912/3018 32006KT 1/4SM FG OVC001
TEMPO 2914/2915 1SM -BR CLR
FM291500 04006KT P6SM SKC
TEMPO 2915/2917 2SM BR OVC008
FM291900 05007KT P6SM SCT050 BKN090 WS010/13040KT
PROB30 2921/3001 VRB20G30KT -TSRA BKN050CB
FM300100 31007KT P6SM SCT070 BKN120 +FC
FM300500 23006KT P6SM SCT120 $
"""
t = pytaf.TAF(taf_str)
d = pytaf.Decoder(t)
print taf_str
print
dec = d.decode_taf()
print dec
|
import pytaf
taf_str = """
TAF AMD KDEN 291134Z 2912/3018 32006KT 1/4SM FG OVC001
TEMPO 2914/2915 1SM -BR CLR
FM291500 04006KT P6SM SKC
TEMPO 2915/2917 2SM BR OVC008
FM291900 05007KT P6SM SCT050 BKN090 WS010/13040KT
PROB30 2921/3001 VRB20G30KT -TSRA BKN050CB
FM300100 31007KT P6SM SCT070 BKN120 +FC
FM300500 23006KT P6SM SCT120 $
"""
# Create a parsed TAF object from string
t = pytaf.TAF(taf_str)
# Create a decoder object from the TAF object
d = pytaf.Decoder(t)
# Print the raw string for the reference
print(taf_str)
# Decode and print the decoded string
dec = d.decode_taf()
print(dec)
|
Update the example script to work with python3.
|
Update the example script to work with python3.
|
Python
|
mit
|
dmbaturin/pytaf
|
import pytaf
taf_str = """
TAF AMD KDEN 291134Z 2912/3018 32006KT 1/4SM FG OVC001
TEMPO 2914/2915 1SM -BR CLR
FM291500 04006KT P6SM SKC
TEMPO 2915/2917 2SM BR OVC008
FM291900 05007KT P6SM SCT050 BKN090 WS010/13040KT
PROB30 2921/3001 VRB20G30KT -TSRA BKN050CB
FM300100 31007KT P6SM SCT070 BKN120 +FC
FM300500 23006KT P6SM SCT120 $
"""
+ # Create a parsed TAF object from string
t = pytaf.TAF(taf_str)
+ # Create a decoder object from the TAF object
d = pytaf.Decoder(t)
+ # Print the raw string for the reference
- print taf_str
+ print(taf_str)
- print
+
+ # Decode and print the decoded string
dec = d.decode_taf()
-
- print dec
+ print(dec)
|
Update the example script to work with python3.
|
## Code Before:
import pytaf
taf_str = """
TAF AMD KDEN 291134Z 2912/3018 32006KT 1/4SM FG OVC001
TEMPO 2914/2915 1SM -BR CLR
FM291500 04006KT P6SM SKC
TEMPO 2915/2917 2SM BR OVC008
FM291900 05007KT P6SM SCT050 BKN090 WS010/13040KT
PROB30 2921/3001 VRB20G30KT -TSRA BKN050CB
FM300100 31007KT P6SM SCT070 BKN120 +FC
FM300500 23006KT P6SM SCT120 $
"""
t = pytaf.TAF(taf_str)
d = pytaf.Decoder(t)
print taf_str
print
dec = d.decode_taf()
print dec
## Instruction:
Update the example script to work with python3.
## Code After:
import pytaf
taf_str = """
TAF AMD KDEN 291134Z 2912/3018 32006KT 1/4SM FG OVC001
TEMPO 2914/2915 1SM -BR CLR
FM291500 04006KT P6SM SKC
TEMPO 2915/2917 2SM BR OVC008
FM291900 05007KT P6SM SCT050 BKN090 WS010/13040KT
PROB30 2921/3001 VRB20G30KT -TSRA BKN050CB
FM300100 31007KT P6SM SCT070 BKN120 +FC
FM300500 23006KT P6SM SCT120 $
"""
# Create a parsed TAF object from string
t = pytaf.TAF(taf_str)
# Create a decoder object from the TAF object
d = pytaf.Decoder(t)
# Print the raw string for the reference
print(taf_str)
# Decode and print the decoded string
dec = d.decode_taf()
print(dec)
|
# ... existing code ...
FM300500 23006KT P6SM SCT120 $
"""
# Create a parsed TAF object from string
t = pytaf.TAF(taf_str)
# Create a decoder object from the TAF object
d = pytaf.Decoder(t)
# Print the raw string for the reference
print(taf_str)
# Decode and print the decoded string
dec = d.decode_taf()
print(dec)
# ... rest of the code ...
|
e67c57128f88b61eac08e488e54343d48f1454c7
|
ddcz/forms/authentication.py
|
ddcz/forms/authentication.py
|
import logging
from django import forms
from django.contrib.auth import forms as authforms
from ..models import UserProfile
logger = logging.getLogger(__name__)
class LoginForm(forms.Form):
nick = forms.CharField(label="Nick", max_length=20)
password = forms.CharField(label="Heslo", max_length=50, widget=forms.PasswordInput)
class PasswordResetForm(authforms.PasswordResetForm):
def get_users(self, email):
"""Given an email, return matching user(s) who should receive a reset.
This is overridem from original form to use UserProfile instead of standard
user model since that is normative for email storage.
"""
user_profiles = UserProfile.objects.filter(email_uzivatele__iexact=email)
users = tuple(
list(
up.user
for up in user_profiles
if up.user.has_usable_password() and up.user.is_active
)
)
logger.info(
"Selected users for password reset: %s"
% ", ".join([str(u.pk) for u in users])
)
return users
|
import logging
from django import forms
from django.contrib.auth import forms as authforms
from ..models import UserProfile
logger = logging.getLogger(__name__)
class LoginForm(forms.Form):
nick = forms.CharField(label="Nick", max_length=25)
password = forms.CharField(
label="Heslo", max_length=100, widget=forms.PasswordInput
)
class PasswordResetForm(authforms.PasswordResetForm):
def get_users(self, email):
"""Given an email, return matching user(s) who should receive a reset.
This is overridem from original form to use UserProfile instead of standard
user model since that is normative for email storage.
"""
user_profiles = UserProfile.objects.filter(email_uzivatele__iexact=email)
users = tuple(
list(
up.user
for up in user_profiles
if up.user.has_usable_password() and up.user.is_active
)
)
logger.info(
"Selected users for password reset: %s"
% ", ".join([str(u.pk) for u in users])
)
return users
|
Update LoginForm to match reality
|
Update LoginForm to match reality
|
Python
|
mit
|
dracidoupe/graveyard,dracidoupe/graveyard,dracidoupe/graveyard,dracidoupe/graveyard
|
import logging
from django import forms
from django.contrib.auth import forms as authforms
from ..models import UserProfile
logger = logging.getLogger(__name__)
class LoginForm(forms.Form):
- nick = forms.CharField(label="Nick", max_length=20)
+ nick = forms.CharField(label="Nick", max_length=25)
+ password = forms.CharField(
- password = forms.CharField(label="Heslo", max_length=50, widget=forms.PasswordInput)
+ label="Heslo", max_length=100, widget=forms.PasswordInput
+ )
class PasswordResetForm(authforms.PasswordResetForm):
def get_users(self, email):
"""Given an email, return matching user(s) who should receive a reset.
This is overridem from original form to use UserProfile instead of standard
user model since that is normative for email storage.
"""
user_profiles = UserProfile.objects.filter(email_uzivatele__iexact=email)
users = tuple(
list(
up.user
for up in user_profiles
if up.user.has_usable_password() and up.user.is_active
)
)
logger.info(
"Selected users for password reset: %s"
% ", ".join([str(u.pk) for u in users])
)
return users
|
Update LoginForm to match reality
|
## Code Before:
import logging
from django import forms
from django.contrib.auth import forms as authforms
from ..models import UserProfile
logger = logging.getLogger(__name__)
class LoginForm(forms.Form):
nick = forms.CharField(label="Nick", max_length=20)
password = forms.CharField(label="Heslo", max_length=50, widget=forms.PasswordInput)
class PasswordResetForm(authforms.PasswordResetForm):
def get_users(self, email):
"""Given an email, return matching user(s) who should receive a reset.
This is overridem from original form to use UserProfile instead of standard
user model since that is normative for email storage.
"""
user_profiles = UserProfile.objects.filter(email_uzivatele__iexact=email)
users = tuple(
list(
up.user
for up in user_profiles
if up.user.has_usable_password() and up.user.is_active
)
)
logger.info(
"Selected users for password reset: %s"
% ", ".join([str(u.pk) for u in users])
)
return users
## Instruction:
Update LoginForm to match reality
## Code After:
import logging
from django import forms
from django.contrib.auth import forms as authforms
from ..models import UserProfile
logger = logging.getLogger(__name__)
class LoginForm(forms.Form):
nick = forms.CharField(label="Nick", max_length=25)
password = forms.CharField(
label="Heslo", max_length=100, widget=forms.PasswordInput
)
class PasswordResetForm(authforms.PasswordResetForm):
def get_users(self, email):
"""Given an email, return matching user(s) who should receive a reset.
This is overridem from original form to use UserProfile instead of standard
user model since that is normative for email storage.
"""
user_profiles = UserProfile.objects.filter(email_uzivatele__iexact=email)
users = tuple(
list(
up.user
for up in user_profiles
if up.user.has_usable_password() and up.user.is_active
)
)
logger.info(
"Selected users for password reset: %s"
% ", ".join([str(u.pk) for u in users])
)
return users
|
// ... existing code ...
class LoginForm(forms.Form):
nick = forms.CharField(label="Nick", max_length=25)
password = forms.CharField(
label="Heslo", max_length=100, widget=forms.PasswordInput
)
class PasswordResetForm(authforms.PasswordResetForm):
// ... rest of the code ...
|
105ac0020dbc60fe57da7db75fb82cf872a0834d
|
crm_switzerland/models/res_partner.py
|
crm_switzerland/models/res_partner.py
|
from odoo import api, models
class ResPartner(models.Model):
_inherit = 'res.partner'
@api.multi
def schedule_meeting(self):
old_action = super(ResPartner, self).schedule_meeting()
new_action = self.env.ref(
'crm_switzerland.action_calendar_event_partner').read()[0]
new_action['domain'] = [('partner_ids', 'in', self.ids)]
new_action['context'] = {
'default_partner_ids': old_action['context'][
'default_partner_ids']
}
return new_action
@api.model
def _notify_prepare_template_context(self, message):
# modification of context for lang
message = message.with_context(lang=self.lang)
return super(ResPartner, self).\
_notify_prepare_template_context(message)
|
from odoo import api, models
class ResPartner(models.Model):
_inherit = 'res.partner'
@api.multi
def schedule_meeting(self):
old_action = super(ResPartner, self).schedule_meeting()
new_action = self.env.ref(
'crm_switzerland.action_calendar_event_partner').read()[0]
new_action['domain'] = [('partner_ids', 'in', self.ids)]
new_action['context'] = {
'default_partner_ids': old_action['context'][
'default_partner_ids']
}
return new_action
@api.model
def _notify_prepare_template_context(self, message):
# modification of context for lang
message = message.with_context(lang=self[:1].lang or self.env.lang)
return super(ResPartner, self).\
_notify_prepare_template_context(message)
|
FIX bug when sending notification to multiple partners
|
FIX bug when sending notification to multiple partners
|
Python
|
agpl-3.0
|
ecino/compassion-switzerland,eicher31/compassion-switzerland,CompassionCH/compassion-switzerland,eicher31/compassion-switzerland,CompassionCH/compassion-switzerland,ecino/compassion-switzerland,eicher31/compassion-switzerland,CompassionCH/compassion-switzerland,ecino/compassion-switzerland
|
from odoo import api, models
class ResPartner(models.Model):
_inherit = 'res.partner'
@api.multi
def schedule_meeting(self):
old_action = super(ResPartner, self).schedule_meeting()
new_action = self.env.ref(
'crm_switzerland.action_calendar_event_partner').read()[0]
new_action['domain'] = [('partner_ids', 'in', self.ids)]
new_action['context'] = {
'default_partner_ids': old_action['context'][
'default_partner_ids']
}
return new_action
@api.model
def _notify_prepare_template_context(self, message):
# modification of context for lang
- message = message.with_context(lang=self.lang)
+ message = message.with_context(lang=self[:1].lang or self.env.lang)
return super(ResPartner, self).\
_notify_prepare_template_context(message)
|
FIX bug when sending notification to multiple partners
|
## Code Before:
from odoo import api, models
class ResPartner(models.Model):
_inherit = 'res.partner'
@api.multi
def schedule_meeting(self):
old_action = super(ResPartner, self).schedule_meeting()
new_action = self.env.ref(
'crm_switzerland.action_calendar_event_partner').read()[0]
new_action['domain'] = [('partner_ids', 'in', self.ids)]
new_action['context'] = {
'default_partner_ids': old_action['context'][
'default_partner_ids']
}
return new_action
@api.model
def _notify_prepare_template_context(self, message):
# modification of context for lang
message = message.with_context(lang=self.lang)
return super(ResPartner, self).\
_notify_prepare_template_context(message)
## Instruction:
FIX bug when sending notification to multiple partners
## Code After:
from odoo import api, models
class ResPartner(models.Model):
_inherit = 'res.partner'
@api.multi
def schedule_meeting(self):
old_action = super(ResPartner, self).schedule_meeting()
new_action = self.env.ref(
'crm_switzerland.action_calendar_event_partner').read()[0]
new_action['domain'] = [('partner_ids', 'in', self.ids)]
new_action['context'] = {
'default_partner_ids': old_action['context'][
'default_partner_ids']
}
return new_action
@api.model
def _notify_prepare_template_context(self, message):
# modification of context for lang
message = message.with_context(lang=self[:1].lang or self.env.lang)
return super(ResPartner, self).\
_notify_prepare_template_context(message)
|
// ... existing code ...
@api.model
def _notify_prepare_template_context(self, message):
# modification of context for lang
message = message.with_context(lang=self[:1].lang or self.env.lang)
return super(ResPartner, self).\
_notify_prepare_template_context(message)
// ... rest of the code ...
|
c6dae4cbd8d8dcbcd323526c2811fea9525bcb74
|
__init__.py
|
__init__.py
|
import spyral.memoize
import spyral.point
import spyral.camera
import spyral.util
import spyral.sprite
import spyral.gui
import spyral.scene
import spyral._lib
import pygame
director = scene.Director()
def init():
pygame.init()
pygame.font.init()
|
import spyral.memoize
import spyral.point
import spyral.camera
import spyral.util
import spyral.sprite
import spyral.gui
import spyral.scene
import spyral._lib
import spyral.event
import pygame
director = scene.Director()
def init():
pygame.init()
pygame.font.init()
|
Add an event module import
|
Add an event module import
|
Python
|
lgpl-2.1
|
platipy/spyral
|
import spyral.memoize
import spyral.point
import spyral.camera
import spyral.util
import spyral.sprite
import spyral.gui
import spyral.scene
import spyral._lib
+ import spyral.event
import pygame
director = scene.Director()
def init():
pygame.init()
pygame.font.init()
|
Add an event module import
|
## Code Before:
import spyral.memoize
import spyral.point
import spyral.camera
import spyral.util
import spyral.sprite
import spyral.gui
import spyral.scene
import spyral._lib
import pygame
director = scene.Director()
def init():
pygame.init()
pygame.font.init()
## Instruction:
Add an event module import
## Code After:
import spyral.memoize
import spyral.point
import spyral.camera
import spyral.util
import spyral.sprite
import spyral.gui
import spyral.scene
import spyral._lib
import spyral.event
import pygame
director = scene.Director()
def init():
pygame.init()
pygame.font.init()
|
...
import spyral.gui
import spyral.scene
import spyral._lib
import spyral.event
import pygame
director = scene.Director()
...
|
a71f79d3966c7b3f491c2dacce721cd974af52c4
|
sale_properties_dynamic_fields/__openerp__.py
|
sale_properties_dynamic_fields/__openerp__.py
|
{
'name': "Sale properties dynamic fields",
'version': '1.0',
'category': '',
'author': "Agile Business Group, Odoo Community Association (OCA)",
'website': 'http://www.agilebg.com',
'license': 'AGPL-3',
"depends": [
'sale_properties_easy_creation',
],
"data": [
'mrp_property_group_view.xml',
],
"demo": [
],
"test": [
'test/properties.yml',
],
"active": False,
"installable": True
}
|
{
'name': "Sale properties dynamic fields",
'version': '1.0',
'category': '',
'author': "Agile Business Group, Odoo Community Association (OCA)",
'website': 'http://www.agilebg.com',
'license': 'AGPL-3',
"depends": [
'sale_properties_easy_creation',
],
"data": [
'mrp_property_group_view.xml',
],
"demo": [
],
"test": [
'test/properties.yml',
],
"installable": True
}
|
Remove active key since is deprecated
|
Remove active key since is deprecated
|
Python
|
agpl-3.0
|
xpansa/sale-workflow,Antiun/sale-workflow,akretion/sale-workflow,acsone/sale-workflow,brain-tec/sale-workflow,brain-tec/sale-workflow,Eficent/sale-workflow,thomaspaulb/sale-workflow,acsone/sale-workflow,open-synergy/sale-workflow,jabibi/sale-workflow,Endika/sale-workflow,fevxie/sale-workflow,factorlibre/sale-workflow,BT-cserra/sale-workflow,diagramsoftware/sale-workflow,ddico/sale-workflow,akretion/sale-workflow
|
{
'name': "Sale properties dynamic fields",
'version': '1.0',
'category': '',
'author': "Agile Business Group, Odoo Community Association (OCA)",
'website': 'http://www.agilebg.com',
'license': 'AGPL-3',
"depends": [
'sale_properties_easy_creation',
],
"data": [
'mrp_property_group_view.xml',
],
"demo": [
],
"test": [
'test/properties.yml',
],
- "active": False,
"installable": True
}
|
Remove active key since is deprecated
|
## Code Before:
{
'name': "Sale properties dynamic fields",
'version': '1.0',
'category': '',
'author': "Agile Business Group, Odoo Community Association (OCA)",
'website': 'http://www.agilebg.com',
'license': 'AGPL-3',
"depends": [
'sale_properties_easy_creation',
],
"data": [
'mrp_property_group_view.xml',
],
"demo": [
],
"test": [
'test/properties.yml',
],
"active": False,
"installable": True
}
## Instruction:
Remove active key since is deprecated
## Code After:
{
'name': "Sale properties dynamic fields",
'version': '1.0',
'category': '',
'author': "Agile Business Group, Odoo Community Association (OCA)",
'website': 'http://www.agilebg.com',
'license': 'AGPL-3',
"depends": [
'sale_properties_easy_creation',
],
"data": [
'mrp_property_group_view.xml',
],
"demo": [
],
"test": [
'test/properties.yml',
],
"installable": True
}
|
...
"test": [
'test/properties.yml',
],
"installable": True
}
...
|
6da466984143d2a9176870583ca5dba8d1b9764c
|
test/integration/test_graylogapi.py
|
test/integration/test_graylogapi.py
|
import pytest
from pygraylog.pygraylog import graylogapi
def test_get():
api = graylogapi.GraylogAPI('http://echo.jsontest.com/one/two',
username = 'Zack',
password = 'Zack')
res = api._get()
expected = {
'one': 'two'
}
assert res == expected
def test_post():
api = graylogapi.GraylogAPI('http://echo.jsontest.com/one/two',
username = 'Zack',
password = 'Zack')
with pytest.raises(NotImplementedError):
api._post()
def test_put():
api = graylogapi.GraylogAPI('http://echo.jsontest.com/one/two',
username = 'Zack',
password = 'Zack')
with pytest.raises(NotImplementedError):
api._put()
def test_delete():
api = graylogapi.GraylogAPI('http://echo.jsontest.com/one/two',
username = 'Zack',
password = 'Zack')
with pytest.raises(NotImplementedError):
api._delete()
|
import pytest
from pygraylog.pygraylog import graylogapi
def test_get():
api = graylogapi.GraylogAPI('http://echo.jsontest.com/one/two',
username = 'Zack',
password = 'Zack')
res = api._get()
expected = "{\"one\": \"two\"}\n"
assert res == expected
def test_post():
api = graylogapi.GraylogAPI('http://echo.jsontest.com/one/two',
username = 'Zack',
password = 'Zack')
with pytest.raises(NotImplementedError):
api._post()
def test_put():
api = graylogapi.GraylogAPI('http://echo.jsontest.com/one/two',
username = 'Zack',
password = 'Zack')
with pytest.raises(NotImplementedError):
api._put()
def test_delete():
api = graylogapi.GraylogAPI('http://echo.jsontest.com/one/two',
username = 'Zack',
password = 'Zack')
with pytest.raises(NotImplementedError):
api._delete()
|
Modify test to reflect that api returns string response.
|
Modify test to reflect that api returns string response.
|
Python
|
apache-2.0
|
zmallen/pygraylog
|
import pytest
from pygraylog.pygraylog import graylogapi
def test_get():
api = graylogapi.GraylogAPI('http://echo.jsontest.com/one/two',
username = 'Zack',
password = 'Zack')
res = api._get()
+ expected = "{\"one\": \"two\"}\n"
- expected = {
- 'one': 'two'
- }
assert res == expected
def test_post():
api = graylogapi.GraylogAPI('http://echo.jsontest.com/one/two',
username = 'Zack',
password = 'Zack')
with pytest.raises(NotImplementedError):
api._post()
def test_put():
api = graylogapi.GraylogAPI('http://echo.jsontest.com/one/two',
username = 'Zack',
password = 'Zack')
with pytest.raises(NotImplementedError):
api._put()
def test_delete():
api = graylogapi.GraylogAPI('http://echo.jsontest.com/one/two',
username = 'Zack',
password = 'Zack')
with pytest.raises(NotImplementedError):
api._delete()
|
Modify test to reflect that api returns string response.
|
## Code Before:
import pytest
from pygraylog.pygraylog import graylogapi
def test_get():
api = graylogapi.GraylogAPI('http://echo.jsontest.com/one/two',
username = 'Zack',
password = 'Zack')
res = api._get()
expected = {
'one': 'two'
}
assert res == expected
def test_post():
api = graylogapi.GraylogAPI('http://echo.jsontest.com/one/two',
username = 'Zack',
password = 'Zack')
with pytest.raises(NotImplementedError):
api._post()
def test_put():
api = graylogapi.GraylogAPI('http://echo.jsontest.com/one/two',
username = 'Zack',
password = 'Zack')
with pytest.raises(NotImplementedError):
api._put()
def test_delete():
api = graylogapi.GraylogAPI('http://echo.jsontest.com/one/two',
username = 'Zack',
password = 'Zack')
with pytest.raises(NotImplementedError):
api._delete()
## Instruction:
Modify test to reflect that api returns string response.
## Code After:
import pytest
from pygraylog.pygraylog import graylogapi
def test_get():
api = graylogapi.GraylogAPI('http://echo.jsontest.com/one/two',
username = 'Zack',
password = 'Zack')
res = api._get()
expected = "{\"one\": \"two\"}\n"
assert res == expected
def test_post():
api = graylogapi.GraylogAPI('http://echo.jsontest.com/one/two',
username = 'Zack',
password = 'Zack')
with pytest.raises(NotImplementedError):
api._post()
def test_put():
api = graylogapi.GraylogAPI('http://echo.jsontest.com/one/two',
username = 'Zack',
password = 'Zack')
with pytest.raises(NotImplementedError):
api._put()
def test_delete():
api = graylogapi.GraylogAPI('http://echo.jsontest.com/one/two',
username = 'Zack',
password = 'Zack')
with pytest.raises(NotImplementedError):
api._delete()
|
# ... existing code ...
username = 'Zack',
password = 'Zack')
res = api._get()
expected = "{\"one\": \"two\"}\n"
assert res == expected
def test_post():
# ... rest of the code ...
|
fc6a0edca3ae42cb3570ddf62c841282bb0229aa
|
integration/util.py
|
integration/util.py
|
from fabric.api import env
class Integration(object):
def setup(self):
env.host_string = "127.0.0.1"
|
from fabric.api import env
class Integration(object):
def setup(self):
if not env.host_string: # Allow runtime selection
env.host_string = "127.0.0.1"
|
Allow easy local exec of integration suite via eg -H
|
Allow easy local exec of integration suite via eg -H
|
Python
|
bsd-2-clause
|
kmonsoor/fabric,haridsv/fabric,kxxoling/fabric,cgvarela/fabric,rane-hs/fabric-py3,tolbkni/fabric,jaraco/fabric,TarasRudnyk/fabric,cmattoon/fabric,raimon49/fabric,itoed/fabric,pashinin/fabric,ploxiln/fabric,elijah513/fabric,opavader/fabric,xLegoz/fabric,rbramwell/fabric,qinrong/fabric,bspink/fabric,hrubi/fabric,StackStorm/fabric,tekapo/fabric,askulkarni2/fabric,mathiasertl/fabric,SamuelMarks/fabric,rodrigc/fabric,bitprophet/fabric,amaniak/fabric,sdelements/fabric,pgroudas/fabric,fernandezcuesta/fabric,bitmonk/fabric,MjAbuz/fabric,likesxuqiang/fabric,getsentry/fabric
|
from fabric.api import env
class Integration(object):
def setup(self):
+ if not env.host_string: # Allow runtime selection
- env.host_string = "127.0.0.1"
+ env.host_string = "127.0.0.1"
|
Allow easy local exec of integration suite via eg -H
|
## Code Before:
from fabric.api import env
class Integration(object):
def setup(self):
env.host_string = "127.0.0.1"
## Instruction:
Allow easy local exec of integration suite via eg -H
## Code After:
from fabric.api import env
class Integration(object):
def setup(self):
if not env.host_string: # Allow runtime selection
env.host_string = "127.0.0.1"
|
...
class Integration(object):
def setup(self):
if not env.host_string: # Allow runtime selection
env.host_string = "127.0.0.1"
...
|
4b245b9a859552adb9c19fafc4bdfab5780782f2
|
d1_common_python/src/d1_common/__init__.py
|
d1_common_python/src/d1_common/__init__.py
|
__version__ = "2.1.0"
__all__ = [
'const',
'exceptions',
'upload',
'xmlrunner',
'types.exceptions',
'types.dataoneTypes',
'types.dataoneErrors',
'ext.mimeparser',
]
|
__version__ = "2.1.0"
# Set default logging handler to avoid "No handler found" warnings.
import logging
try:
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
logging.getLogger(__name__).addHandler(NullHandler())
|
Add logging NullHandler to prevent "no handler found" errors
|
Add logging NullHandler to prevent "no handler found" errors
This fixes the issue where "no handler found" errors would be printed by
the library if library clients did not set up logging.
|
Python
|
apache-2.0
|
DataONEorg/d1_python,DataONEorg/d1_python,DataONEorg/d1_python,DataONEorg/d1_python
|
__version__ = "2.1.0"
+ # Set default logging handler to avoid "No handler found" warnings.
+ import logging
- __all__ = [
- 'const',
- 'exceptions',
- 'upload',
- 'xmlrunner',
- 'types.exceptions',
- 'types.dataoneTypes',
- 'types.dataoneErrors',
- 'ext.mimeparser',
- ]
+ try:
+ from logging import NullHandler
+ except ImportError:
+ class NullHandler(logging.Handler):
+ def emit(self, record):
+ pass
+
+ logging.getLogger(__name__).addHandler(NullHandler())
+
|
Add logging NullHandler to prevent "no handler found" errors
|
## Code Before:
__version__ = "2.1.0"
__all__ = [
'const',
'exceptions',
'upload',
'xmlrunner',
'types.exceptions',
'types.dataoneTypes',
'types.dataoneErrors',
'ext.mimeparser',
]
## Instruction:
Add logging NullHandler to prevent "no handler found" errors
## Code After:
__version__ = "2.1.0"
# Set default logging handler to avoid "No handler found" warnings.
import logging
try:
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
logging.getLogger(__name__).addHandler(NullHandler())
|
// ... existing code ...
__version__ = "2.1.0"
# Set default logging handler to avoid "No handler found" warnings.
import logging
try:
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
logging.getLogger(__name__).addHandler(NullHandler())
// ... rest of the code ...
|
c8e11b602eb7525789ed1c5f4ea686f45b44f304
|
src/diamond/handler/httpHandler.py
|
src/diamond/handler/httpHandler.py
|
from Handler import Handler
import urllib
import urllib2
class HttpPostHandler(Handler):
# Inititalize Handler with url and batch size
def __init__(self, config=None):
Handler.__init__(self, config)
self.metrics = []
self.batch_size = int(self.config.get('batch', 100))
self.url = self.config.get('url')
# Join batched metrics and push to url mentioned in config
def process(self, metric):
self.metrics.append(str(metric))
if len(self.metrics) >= self.batch_size:
req = urllib2.Request(self.url, "\n".join(self.metrics))
urllib2.urlopen(req)
self.metrics = []
|
from Handler import Handler
import urllib2
class HttpPostHandler(Handler):
# Inititalize Handler with url and batch size
def __init__(self, config=None):
Handler.__init__(self, config)
self.metrics = []
self.batch_size = int(self.config.get('batch', 100))
self.url = self.config.get('url')
# Join batched metrics and push to url mentioned in config
def process(self, metric):
self.metrics.append(str(metric))
if len(self.metrics) >= self.batch_size:
req = urllib2.Request(self.url, "\n".join(self.metrics))
urllib2.urlopen(req)
self.metrics = []
|
Remove unneeded import, fix python path and add coding
|
Remove unneeded import, fix python path and add coding
|
Python
|
mit
|
signalfx/Diamond,ramjothikumar/Diamond,jriguera/Diamond,anandbhoraskar/Diamond,jriguera/Diamond,Precis/Diamond,jriguera/Diamond,socialwareinc/Diamond,saucelabs/Diamond,acquia/Diamond,dcsquared13/Diamond,stuartbfox/Diamond,hvnsweeting/Diamond,h00dy/Diamond,cannium/Diamond,dcsquared13/Diamond,Ssawa/Diamond,bmhatfield/Diamond,TAKEALOT/Diamond,tellapart/Diamond,krbaker/Diamond,MediaMath/Diamond,Netuitive/netuitive-diamond,MediaMath/Diamond,tuenti/Diamond,timchenxiaoyu/Diamond,tellapart/Diamond,EzyInsights/Diamond,russss/Diamond,tusharmakkar08/Diamond,Netuitive/Diamond,Ormod/Diamond,jriguera/Diamond,codepython/Diamond,sebbrandt87/Diamond,actmd/Diamond,Netuitive/netuitive-diamond,socialwareinc/Diamond,mzupan/Diamond,EzyInsights/Diamond,russss/Diamond,TinLe/Diamond,disqus/Diamond,jumping/Diamond,metamx/Diamond,Basis/Diamond,socialwareinc/Diamond,Nihn/Diamond-1,gg7/diamond,Ensighten/Diamond,joel-airspring/Diamond,jumping/Diamond,zoidbergwill/Diamond,rtoma/Diamond,timchenxiaoyu/Diamond,ceph/Diamond,signalfx/Diamond,Netuitive/Diamond,stuartbfox/Diamond,acquia/Diamond,disqus/Diamond,anandbhoraskar/Diamond,MichaelDoyle/Diamond,cannium/Diamond,sebbrandt87/Diamond,jaingaurav/Diamond,codepython/Diamond,h00dy/Diamond,works-mobile/Diamond,janisz/Diamond-1,krbaker/Diamond,MediaMath/Diamond,thardie/Diamond,eMerzh/Diamond-1,tusharmakkar08/Diamond,Ormod/Diamond,krbaker/Diamond,Netuitive/netuitive-diamond,hvnsweeting/Diamond,sebbrandt87/Diamond,python-diamond/Diamond,mzupan/Diamond,codepython/Diamond,skbkontur/Diamond,tellapart/Diamond,Nihn/Diamond-1,Clever/Diamond,metamx/Diamond,mfriedenhagen/Diamond,russss/Diamond,gg7/diamond,Ssawa/Diamond,Netuitive/Diamond,tellapart/Diamond,stuartbfox/Diamond,datafiniti/Diamond,Precis/Diamond,gg7/diamond,mfriedenhagen/Diamond,TinLe/Diamond,python-diamond/Diamond,jumping/Diamond,thardie/Diamond,eMerzh/Diamond-1,hamelg/Diamond,rtoma/Diamond,Netuitive/netuitive-diamond,mzupan/Diamond,TAKEALOT/Diamond,Basis/Diamond,skbkontur/Diamond,h00dy/Diamond,Ensighten/Diamond,jaingaurav/Diamond,joel-airspring/Diamond,hamelg/Diamond,mfriedenhagen/Diamond,joel-airspring/Diamond,szibis/Diamond,Precis/Diamond,Ensighten/Diamond,Slach/Diamond,saucelabs/Diamond,anandbhoraskar/Diamond,ceph/Diamond,Precis/Diamond,actmd/Diamond,Clever/Diamond,cannium/Diamond,cannium/Diamond,h00dy/Diamond,mzupan/Diamond,tuenti/Diamond,saucelabs/Diamond,thardie/Diamond,janisz/Diamond-1,bmhatfield/Diamond,works-mobile/Diamond,zoidbergwill/Diamond,works-mobile/Diamond,datafiniti/Diamond,gg7/diamond,MichaelDoyle/Diamond,bmhatfield/Diamond,tuenti/Diamond,TinLe/Diamond,signalfx/Diamond,sebbrandt87/Diamond,acquia/Diamond,saucelabs/Diamond,janisz/Diamond-1,Clever/Diamond,zoidbergwill/Diamond,actmd/Diamond,janisz/Diamond-1,skbkontur/Diamond,Ensighten/Diamond,codepython/Diamond,MediaMath/Diamond,acquia/Diamond,bmhatfield/Diamond,TAKEALOT/Diamond,MichaelDoyle/Diamond,stuartbfox/Diamond,zoidbergwill/Diamond,works-mobile/Diamond,jumping/Diamond,TAKEALOT/Diamond,krbaker/Diamond,hvnsweeting/Diamond,MichaelDoyle/Diamond,Basis/Diamond,disqus/Diamond,ramjothikumar/Diamond,CYBERBUGJR/Diamond,signalfx/Diamond,szibis/Diamond,CYBERBUGJR/Diamond,jaingaurav/Diamond,ceph/Diamond,Ssawa/Diamond,timchenxiaoyu/Diamond,hvnsweeting/Diamond,CYBERBUGJR/Diamond,actmd/Diamond,EzyInsights/Diamond,hamelg/Diamond,szibis/Diamond,szibis/Diamond,Ssawa/Diamond,rtoma/Diamond,Nihn/Diamond-1,datafiniti/Diamond,dcsquared13/Diamond,rtoma/Diamond,eMerzh/Diamond-1,Slach/Diamond,jaingaurav/Diamond,dcsquared13/Diamond,Nihn/Diamond-1,tusharmakkar08/Diamond,anandbhoraskar/Diamond,Clever/Diamond,Ormod/Diamond,ramjothikumar/Diamond,hamelg/Diamond,Basis/Diamond,russss/Diamond,joel-airspring/Diamond,Slach/Diamond,metamx/Diamond,CYBERBUGJR/Diamond,datafiniti/Diamond,thardie/Diamond,tuenti/Diamond,ramjothikumar/Diamond,Ormod/Diamond,TinLe/Diamond,tusharmakkar08/Diamond,EzyInsights/Diamond,timchenxiaoyu/Diamond,ceph/Diamond,mfriedenhagen/Diamond,skbkontur/Diamond,socialwareinc/Diamond,eMerzh/Diamond-1,Netuitive/Diamond,python-diamond/Diamond,Slach/Diamond
|
from Handler import Handler
- import urllib
import urllib2
class HttpPostHandler(Handler):
# Inititalize Handler with url and batch size
def __init__(self, config=None):
Handler.__init__(self, config)
self.metrics = []
self.batch_size = int(self.config.get('batch', 100))
self.url = self.config.get('url')
# Join batched metrics and push to url mentioned in config
def process(self, metric):
self.metrics.append(str(metric))
if len(self.metrics) >= self.batch_size:
req = urllib2.Request(self.url, "\n".join(self.metrics))
urllib2.urlopen(req)
self.metrics = []
|
Remove unneeded import, fix python path and add coding
|
## Code Before:
from Handler import Handler
import urllib
import urllib2
class HttpPostHandler(Handler):
# Inititalize Handler with url and batch size
def __init__(self, config=None):
Handler.__init__(self, config)
self.metrics = []
self.batch_size = int(self.config.get('batch', 100))
self.url = self.config.get('url')
# Join batched metrics and push to url mentioned in config
def process(self, metric):
self.metrics.append(str(metric))
if len(self.metrics) >= self.batch_size:
req = urllib2.Request(self.url, "\n".join(self.metrics))
urllib2.urlopen(req)
self.metrics = []
## Instruction:
Remove unneeded import, fix python path and add coding
## Code After:
from Handler import Handler
import urllib2
class HttpPostHandler(Handler):
# Inititalize Handler with url and batch size
def __init__(self, config=None):
Handler.__init__(self, config)
self.metrics = []
self.batch_size = int(self.config.get('batch', 100))
self.url = self.config.get('url')
# Join batched metrics and push to url mentioned in config
def process(self, metric):
self.metrics.append(str(metric))
if len(self.metrics) >= self.batch_size:
req = urllib2.Request(self.url, "\n".join(self.metrics))
urllib2.urlopen(req)
self.metrics = []
|
# ... existing code ...
from Handler import Handler
import urllib2
# ... rest of the code ...
|
182b94f777b1743671b706c939ce14f89c31efca
|
lint/queue.py
|
lint/queue.py
|
from . import persist
import time
import threading
# Map from view_id to threading.Timer objects
timers = {}
# For compatibility this is a class with unchanged API from SL3.
class Daemon:
def start(self, callback):
self._callback = callback
def hit(self, view):
assert self._callback, "Queue: Can't hit before start."
vid = view.id()
delay = get_delay() # [seconds]
return queue_lint(vid, delay, self._callback)
def queue_lint(vid, delay, callback):
hit_time = time.monotonic()
def worker():
callback(vid, hit_time)
try:
timers[vid].cancel()
except KeyError:
pass
timers[vid] = timer = threading.Timer(delay, worker)
timer.start()
return hit_time
MIN_DELAY = 0.1
def get_delay():
"""Return the delay between a lint request and when it will be processed.
If the lint mode is not background, there is no delay. Otherwise, if
a "delay" setting is not available in any of the settings, MIN_DELAY is used.
"""
if persist.settings.get('lint_mode') != 'background':
return 0
return persist.settings.get('delay', MIN_DELAY)
queue = Daemon()
|
from . import persist
import time
import threading
# Map from view_id to threading.Timer objects
timers = {}
# For compatibility this is a class with unchanged API from SL3.
class Daemon:
def start(self, callback):
self._callback = callback
def hit(self, view):
assert self._callback, "Queue: Can't hit before start."
vid = view.id()
delay = get_delay() # [seconds]
return queue_lint(vid, delay, self._callback)
def queue_lint(vid, delay, callback):
hit_time = time.monotonic()
def worker():
callback(vid, hit_time)
try:
timers[vid].cancel()
except KeyError:
pass
timers[vid] = timer = threading.Timer(delay, worker)
timer.start()
return hit_time
def get_delay():
"""Return the delay between a lint request and when it will be processed.
If the lint mode is not background, there is no delay. Otherwise, if
a "delay" setting is not available in any of the settings, MIN_DELAY is used.
"""
if persist.settings.get('lint_mode') != 'background':
return 0
return persist.settings.get('delay')
queue = Daemon()
|
Remove MIN_DELAY bc a default setting is guaranteed
|
Remove MIN_DELAY bc a default setting is guaranteed
|
Python
|
mit
|
SublimeLinter/SublimeLinter3,SublimeLinter/SublimeLinter3
|
from . import persist
import time
import threading
# Map from view_id to threading.Timer objects
timers = {}
# For compatibility this is a class with unchanged API from SL3.
class Daemon:
def start(self, callback):
self._callback = callback
def hit(self, view):
assert self._callback, "Queue: Can't hit before start."
vid = view.id()
delay = get_delay() # [seconds]
return queue_lint(vid, delay, self._callback)
def queue_lint(vid, delay, callback):
hit_time = time.monotonic()
def worker():
callback(vid, hit_time)
try:
timers[vid].cancel()
except KeyError:
pass
timers[vid] = timer = threading.Timer(delay, worker)
timer.start()
return hit_time
- MIN_DELAY = 0.1
-
-
def get_delay():
"""Return the delay between a lint request and when it will be processed.
If the lint mode is not background, there is no delay. Otherwise, if
a "delay" setting is not available in any of the settings, MIN_DELAY is used.
"""
if persist.settings.get('lint_mode') != 'background':
return 0
- return persist.settings.get('delay', MIN_DELAY)
+ return persist.settings.get('delay')
queue = Daemon()
|
Remove MIN_DELAY bc a default setting is guaranteed
|
## Code Before:
from . import persist
import time
import threading
# Map from view_id to threading.Timer objects
timers = {}
# For compatibility this is a class with unchanged API from SL3.
class Daemon:
def start(self, callback):
self._callback = callback
def hit(self, view):
assert self._callback, "Queue: Can't hit before start."
vid = view.id()
delay = get_delay() # [seconds]
return queue_lint(vid, delay, self._callback)
def queue_lint(vid, delay, callback):
hit_time = time.monotonic()
def worker():
callback(vid, hit_time)
try:
timers[vid].cancel()
except KeyError:
pass
timers[vid] = timer = threading.Timer(delay, worker)
timer.start()
return hit_time
MIN_DELAY = 0.1
def get_delay():
"""Return the delay between a lint request and when it will be processed.
If the lint mode is not background, there is no delay. Otherwise, if
a "delay" setting is not available in any of the settings, MIN_DELAY is used.
"""
if persist.settings.get('lint_mode') != 'background':
return 0
return persist.settings.get('delay', MIN_DELAY)
queue = Daemon()
## Instruction:
Remove MIN_DELAY bc a default setting is guaranteed
## Code After:
from . import persist
import time
import threading
# Map from view_id to threading.Timer objects
timers = {}
# For compatibility this is a class with unchanged API from SL3.
class Daemon:
def start(self, callback):
self._callback = callback
def hit(self, view):
assert self._callback, "Queue: Can't hit before start."
vid = view.id()
delay = get_delay() # [seconds]
return queue_lint(vid, delay, self._callback)
def queue_lint(vid, delay, callback):
hit_time = time.monotonic()
def worker():
callback(vid, hit_time)
try:
timers[vid].cancel()
except KeyError:
pass
timers[vid] = timer = threading.Timer(delay, worker)
timer.start()
return hit_time
def get_delay():
"""Return the delay between a lint request and when it will be processed.
If the lint mode is not background, there is no delay. Otherwise, if
a "delay" setting is not available in any of the settings, MIN_DELAY is used.
"""
if persist.settings.get('lint_mode') != 'background':
return 0
return persist.settings.get('delay')
queue = Daemon()
|
...
return hit_time
def get_delay():
"""Return the delay between a lint request and when it will be processed.
...
if persist.settings.get('lint_mode') != 'background':
return 0
return persist.settings.get('delay')
queue = Daemon()
...
|
0434baddfc2eb3691180e6fa461be3323852eea9
|
clubadm/middleware.py
|
clubadm/middleware.py
|
from django.http import Http404
from django.utils import timezone
from clubadm.models import Member, Season
class SeasonMiddleware(object):
def process_view(self, request, view_func, view_args, view_kwargs):
if "year" in view_kwargs:
year = int(view_kwargs["year"])
try:
request.season = Season.objects.get_by_year(year)
except Season.DoesNotExist:
raise Http404("Такой сезон еще не создан")
class MemberMiddleware(object):
def process_view(self, request, view_func, view_args, view_kwargs):
if "year" in view_kwargs and request.user.is_authenticated:
year = int(view_kwargs["year"])
try:
request.member = Member.objects.get_by_user_and_year(
request.user, year)
except Member.DoesNotExist:
request.member = None
class XUserMiddleware(object):
def process_response(self, request, response):
if request.user.is_anonymous:
return response
# Чтобы Nginx мог писать имя пользователя в логи
response["X-User"] = request.user.username
return response
|
from django.http import Http404
from django.utils import timezone
from clubadm.models import Member, Season
class SeasonMiddleware(object):
def process_view(self, request, view_func, view_args, view_kwargs):
if "year" in view_kwargs:
year = int(view_kwargs["year"])
try:
request.season = Season.objects.get_by_year(year)
except Season.DoesNotExist:
raise Http404("Такой сезон еще не создан")
class MemberMiddleware(object):
def process_view(self, request, view_func, view_args, view_kwargs):
if "year" in view_kwargs and request.user.is_authenticated:
year = int(view_kwargs["year"])
try:
request.member = Member.objects.get_by_user_and_year(
request.user, year)
except Member.DoesNotExist:
request.member = None
class XUserMiddleware(object):
def process_response(self, request, response):
if not hasattr(request, "user"):
return response
if request.user.is_anonymous:
return response
# Чтобы Nginx мог писать имя пользователя в логи
response["X-User"] = request.user.username
return response
|
Handle an authentication edge case
|
Handle an authentication edge case
|
Python
|
mit
|
clubadm/clubadm,clubadm/clubadm,clubadm/clubadm
|
from django.http import Http404
from django.utils import timezone
from clubadm.models import Member, Season
class SeasonMiddleware(object):
def process_view(self, request, view_func, view_args, view_kwargs):
if "year" in view_kwargs:
year = int(view_kwargs["year"])
try:
request.season = Season.objects.get_by_year(year)
except Season.DoesNotExist:
raise Http404("Такой сезон еще не создан")
class MemberMiddleware(object):
def process_view(self, request, view_func, view_args, view_kwargs):
if "year" in view_kwargs and request.user.is_authenticated:
year = int(view_kwargs["year"])
try:
request.member = Member.objects.get_by_user_and_year(
request.user, year)
except Member.DoesNotExist:
request.member = None
class XUserMiddleware(object):
def process_response(self, request, response):
+ if not hasattr(request, "user"):
+ return response
if request.user.is_anonymous:
return response
# Чтобы Nginx мог писать имя пользователя в логи
response["X-User"] = request.user.username
return response
|
Handle an authentication edge case
|
## Code Before:
from django.http import Http404
from django.utils import timezone
from clubadm.models import Member, Season
class SeasonMiddleware(object):
def process_view(self, request, view_func, view_args, view_kwargs):
if "year" in view_kwargs:
year = int(view_kwargs["year"])
try:
request.season = Season.objects.get_by_year(year)
except Season.DoesNotExist:
raise Http404("Такой сезон еще не создан")
class MemberMiddleware(object):
def process_view(self, request, view_func, view_args, view_kwargs):
if "year" in view_kwargs and request.user.is_authenticated:
year = int(view_kwargs["year"])
try:
request.member = Member.objects.get_by_user_and_year(
request.user, year)
except Member.DoesNotExist:
request.member = None
class XUserMiddleware(object):
def process_response(self, request, response):
if request.user.is_anonymous:
return response
# Чтобы Nginx мог писать имя пользователя в логи
response["X-User"] = request.user.username
return response
## Instruction:
Handle an authentication edge case
## Code After:
from django.http import Http404
from django.utils import timezone
from clubadm.models import Member, Season
class SeasonMiddleware(object):
def process_view(self, request, view_func, view_args, view_kwargs):
if "year" in view_kwargs:
year = int(view_kwargs["year"])
try:
request.season = Season.objects.get_by_year(year)
except Season.DoesNotExist:
raise Http404("Такой сезон еще не создан")
class MemberMiddleware(object):
def process_view(self, request, view_func, view_args, view_kwargs):
if "year" in view_kwargs and request.user.is_authenticated:
year = int(view_kwargs["year"])
try:
request.member = Member.objects.get_by_user_and_year(
request.user, year)
except Member.DoesNotExist:
request.member = None
class XUserMiddleware(object):
def process_response(self, request, response):
if not hasattr(request, "user"):
return response
if request.user.is_anonymous:
return response
# Чтобы Nginx мог писать имя пользователя в логи
response["X-User"] = request.user.username
return response
|
...
class XUserMiddleware(object):
def process_response(self, request, response):
if not hasattr(request, "user"):
return response
if request.user.is_anonymous:
return response
# Чтобы Nginx мог писать имя пользователя в логи
...
|
ef2b13ec19d28b56647c0a11044cba6d400f9175
|
vimiv/image_enhance.py
|
vimiv/image_enhance.py
|
"""Wrapper functions for the _image_enhance C extension."""
from gi.repository import GdkPixbuf, GLib
from vimiv import _image_enhance
def enhance_bc(pixbuf, brightness, contrast):
"""Enhance brightness and contrast of a GdkPixbuf.Pixbuf.
Args:
pixbuf: Original GdkPixbuf.Pixbuf to work with.
brightness: Float between -1.0 and 1.0 to change brightness.
contrast: Float between -1.0 and 1.0 to change contrast.
Return:
The enhanced GdkPixbuf.Pixbuf
"""
width = pixbuf.get_width()
height = pixbuf.get_height()
data = pixbuf.get_pixels()
has_alpha = pixbuf.get_has_alpha()
c_has_alpha = 1 if has_alpha else 0 # Numbers are easier for C
rowstride = 4 * width if has_alpha else 3 * width
# Update plain bytes using C extension
# Pylint does not read this properly
# pylint: disable=no-member
data = _image_enhance.enhance_bc(data, c_has_alpha, brightness, contrast)
gdata = GLib.Bytes.new(data)
return GdkPixbuf.Pixbuf.new_from_bytes(
gdata, GdkPixbuf.Colorspace.RGB, has_alpha, 8, width, height, rowstride)
|
"""Wrapper functions for the _image_enhance C extension."""
from gi.repository import GdkPixbuf, GLib
from vimiv import _image_enhance
def enhance_bc(pixbuf, brightness, contrast):
"""Enhance brightness and contrast of a GdkPixbuf.Pixbuf.
Args:
pixbuf: Original GdkPixbuf.Pixbuf to work with.
brightness: Float between -1.0 and 1.0 to change brightness.
contrast: Float between -1.0 and 1.0 to change contrast.
Return:
The enhanced GdkPixbuf.Pixbuf
"""
data = pixbuf.get_pixels()
has_alpha = pixbuf.get_has_alpha()
c_has_alpha = 1 if has_alpha else 0 # Numbers are easier for C
# Update plain bytes using C extension
# Pylint does not read this properly
# pylint: disable=no-member
data = _image_enhance.enhance_bc(data, c_has_alpha, brightness, contrast)
gdata = GLib.Bytes.new(data)
return GdkPixbuf.Pixbuf.new_from_bytes(gdata,
pixbuf.get_colorspace(),
has_alpha,
pixbuf.get_bits_per_sample(),
pixbuf.get_width(),
pixbuf.get_height(),
pixbuf.get_rowstride())
|
Use rowstride directly from GdkPixbuf in enhance
|
Use rowstride directly from GdkPixbuf in enhance
The custom calculation of rowstride failed for images with weird
dimensions and completely broke enhance.
fixes #51
|
Python
|
mit
|
karlch/vimiv,karlch/vimiv,karlch/vimiv
|
"""Wrapper functions for the _image_enhance C extension."""
from gi.repository import GdkPixbuf, GLib
from vimiv import _image_enhance
def enhance_bc(pixbuf, brightness, contrast):
"""Enhance brightness and contrast of a GdkPixbuf.Pixbuf.
Args:
pixbuf: Original GdkPixbuf.Pixbuf to work with.
brightness: Float between -1.0 and 1.0 to change brightness.
contrast: Float between -1.0 and 1.0 to change contrast.
Return:
The enhanced GdkPixbuf.Pixbuf
"""
- width = pixbuf.get_width()
- height = pixbuf.get_height()
data = pixbuf.get_pixels()
has_alpha = pixbuf.get_has_alpha()
c_has_alpha = 1 if has_alpha else 0 # Numbers are easier for C
- rowstride = 4 * width if has_alpha else 3 * width
# Update plain bytes using C extension
# Pylint does not read this properly
# pylint: disable=no-member
data = _image_enhance.enhance_bc(data, c_has_alpha, brightness, contrast)
gdata = GLib.Bytes.new(data)
- return GdkPixbuf.Pixbuf.new_from_bytes(
+ return GdkPixbuf.Pixbuf.new_from_bytes(gdata,
- gdata, GdkPixbuf.Colorspace.RGB, has_alpha, 8, width, height, rowstride)
+ pixbuf.get_colorspace(),
+ has_alpha,
+ pixbuf.get_bits_per_sample(),
+ pixbuf.get_width(),
+ pixbuf.get_height(),
+ pixbuf.get_rowstride())
|
Use rowstride directly from GdkPixbuf in enhance
|
## Code Before:
"""Wrapper functions for the _image_enhance C extension."""
from gi.repository import GdkPixbuf, GLib
from vimiv import _image_enhance
def enhance_bc(pixbuf, brightness, contrast):
"""Enhance brightness and contrast of a GdkPixbuf.Pixbuf.
Args:
pixbuf: Original GdkPixbuf.Pixbuf to work with.
brightness: Float between -1.0 and 1.0 to change brightness.
contrast: Float between -1.0 and 1.0 to change contrast.
Return:
The enhanced GdkPixbuf.Pixbuf
"""
width = pixbuf.get_width()
height = pixbuf.get_height()
data = pixbuf.get_pixels()
has_alpha = pixbuf.get_has_alpha()
c_has_alpha = 1 if has_alpha else 0 # Numbers are easier for C
rowstride = 4 * width if has_alpha else 3 * width
# Update plain bytes using C extension
# Pylint does not read this properly
# pylint: disable=no-member
data = _image_enhance.enhance_bc(data, c_has_alpha, brightness, contrast)
gdata = GLib.Bytes.new(data)
return GdkPixbuf.Pixbuf.new_from_bytes(
gdata, GdkPixbuf.Colorspace.RGB, has_alpha, 8, width, height, rowstride)
## Instruction:
Use rowstride directly from GdkPixbuf in enhance
## Code After:
"""Wrapper functions for the _image_enhance C extension."""
from gi.repository import GdkPixbuf, GLib
from vimiv import _image_enhance
def enhance_bc(pixbuf, brightness, contrast):
"""Enhance brightness and contrast of a GdkPixbuf.Pixbuf.
Args:
pixbuf: Original GdkPixbuf.Pixbuf to work with.
brightness: Float between -1.0 and 1.0 to change brightness.
contrast: Float between -1.0 and 1.0 to change contrast.
Return:
The enhanced GdkPixbuf.Pixbuf
"""
data = pixbuf.get_pixels()
has_alpha = pixbuf.get_has_alpha()
c_has_alpha = 1 if has_alpha else 0 # Numbers are easier for C
# Update plain bytes using C extension
# Pylint does not read this properly
# pylint: disable=no-member
data = _image_enhance.enhance_bc(data, c_has_alpha, brightness, contrast)
gdata = GLib.Bytes.new(data)
return GdkPixbuf.Pixbuf.new_from_bytes(gdata,
pixbuf.get_colorspace(),
has_alpha,
pixbuf.get_bits_per_sample(),
pixbuf.get_width(),
pixbuf.get_height(),
pixbuf.get_rowstride())
|
...
Return:
The enhanced GdkPixbuf.Pixbuf
"""
data = pixbuf.get_pixels()
has_alpha = pixbuf.get_has_alpha()
c_has_alpha = 1 if has_alpha else 0 # Numbers are easier for C
# Update plain bytes using C extension
# Pylint does not read this properly
# pylint: disable=no-member
data = _image_enhance.enhance_bc(data, c_has_alpha, brightness, contrast)
gdata = GLib.Bytes.new(data)
return GdkPixbuf.Pixbuf.new_from_bytes(gdata,
pixbuf.get_colorspace(),
has_alpha,
pixbuf.get_bits_per_sample(),
pixbuf.get_width(),
pixbuf.get_height(),
pixbuf.get_rowstride())
...
|
3a5cbdbe4a79efd59114ca11f86e282aee0eac5c
|
tests/trunk_aware.py
|
tests/trunk_aware.py
|
import functools
import sys
import nose
from preparation.resources.Resource import trunks_registered, applied_modifiers, resource_by_trunk
__author__ = 'moskupols'
_multiprocess_shared_ = True
_all_trunks = set(trunks_registered())
_trunk_filter = _all_trunks
def trunk_parametrized(trunks=set(trunks_registered())):
def decorate(tester):
@functools.wraps(tester)
def generate_tests(*args):
for t in _trunk_filter & trunks:
yield (tester, t) + args
return generate_tests
return decorate
@functools.lru_cache()
def asset_cache(trunk):
return tuple(applied_modifiers(resource_by_trunk(trunk)()))
def main(args=None):
global _trunk_filter
if args is None:
args = sys.argv
_trunk_filter = _all_trunks & set(args)
if len(_trunk_filter) == 0:
_trunk_filter = _all_trunks
args = [arg for arg in args if arg not in _trunk_filter]
nose.main(argv=args)
|
import functools
import sys
import nose
from preparation.resources.Resource import trunks_registered, applied_modifiers, resource_by_trunk
__author__ = 'moskupols'
_multiprocess_shared_ = True
_all_trunks = set(trunks_registered())
_trunk_filter = _all_trunks
def trunk_parametrized(trunks=set(trunks_registered())):
def decorate(tester):
@functools.wraps(tester)
def generate_tests(*args):
for t in _trunk_filter & trunks:
yield (tester, t) + args
return generate_tests
return decorate
@functools.lru_cache()
def asset_cache(trunk):
return tuple(applied_modifiers(resource_by_trunk(trunk)()))
def main(args=None):
global _trunk_filter
if args is None:
args = sys.argv
include = _all_trunks & set(args)
exclude_percented = set('%' + t for t in _all_trunks) & set(args)
exclude = set(e[1:] for e in exclude_percented)
if len(include) == 0:
include = _all_trunks
_trunk_filter = include - exclude
args = [arg for arg in args if arg not in include | exclude_percented]
nose.main(argv=args)
|
Add ability to exclude trunks by passing % before it
|
Add ability to exclude trunks by passing % before it
For example, ./run_nose -v %FilmTitles %BookTitles
|
Python
|
mit
|
hatbot-team/hatbot_resources
|
import functools
import sys
import nose
from preparation.resources.Resource import trunks_registered, applied_modifiers, resource_by_trunk
__author__ = 'moskupols'
_multiprocess_shared_ = True
_all_trunks = set(trunks_registered())
_trunk_filter = _all_trunks
def trunk_parametrized(trunks=set(trunks_registered())):
def decorate(tester):
@functools.wraps(tester)
def generate_tests(*args):
for t in _trunk_filter & trunks:
yield (tester, t) + args
return generate_tests
return decorate
@functools.lru_cache()
def asset_cache(trunk):
return tuple(applied_modifiers(resource_by_trunk(trunk)()))
def main(args=None):
global _trunk_filter
if args is None:
args = sys.argv
- _trunk_filter = _all_trunks & set(args)
+ include = _all_trunks & set(args)
- if len(_trunk_filter) == 0:
- _trunk_filter = _all_trunks
+ exclude_percented = set('%' + t for t in _all_trunks) & set(args)
+ exclude = set(e[1:] for e in exclude_percented)
+ if len(include) == 0:
+ include = _all_trunks
+ _trunk_filter = include - exclude
+
- args = [arg for arg in args if arg not in _trunk_filter]
+ args = [arg for arg in args if arg not in include | exclude_percented]
nose.main(argv=args)
|
Add ability to exclude trunks by passing % before it
|
## Code Before:
import functools
import sys
import nose
from preparation.resources.Resource import trunks_registered, applied_modifiers, resource_by_trunk
__author__ = 'moskupols'
_multiprocess_shared_ = True
_all_trunks = set(trunks_registered())
_trunk_filter = _all_trunks
def trunk_parametrized(trunks=set(trunks_registered())):
def decorate(tester):
@functools.wraps(tester)
def generate_tests(*args):
for t in _trunk_filter & trunks:
yield (tester, t) + args
return generate_tests
return decorate
@functools.lru_cache()
def asset_cache(trunk):
return tuple(applied_modifiers(resource_by_trunk(trunk)()))
def main(args=None):
global _trunk_filter
if args is None:
args = sys.argv
_trunk_filter = _all_trunks & set(args)
if len(_trunk_filter) == 0:
_trunk_filter = _all_trunks
args = [arg for arg in args if arg not in _trunk_filter]
nose.main(argv=args)
## Instruction:
Add ability to exclude trunks by passing % before it
## Code After:
import functools
import sys
import nose
from preparation.resources.Resource import trunks_registered, applied_modifiers, resource_by_trunk
__author__ = 'moskupols'
_multiprocess_shared_ = True
_all_trunks = set(trunks_registered())
_trunk_filter = _all_trunks
def trunk_parametrized(trunks=set(trunks_registered())):
def decorate(tester):
@functools.wraps(tester)
def generate_tests(*args):
for t in _trunk_filter & trunks:
yield (tester, t) + args
return generate_tests
return decorate
@functools.lru_cache()
def asset_cache(trunk):
return tuple(applied_modifiers(resource_by_trunk(trunk)()))
def main(args=None):
global _trunk_filter
if args is None:
args = sys.argv
include = _all_trunks & set(args)
exclude_percented = set('%' + t for t in _all_trunks) & set(args)
exclude = set(e[1:] for e in exclude_percented)
if len(include) == 0:
include = _all_trunks
_trunk_filter = include - exclude
args = [arg for arg in args if arg not in include | exclude_percented]
nose.main(argv=args)
|
# ... existing code ...
if args is None:
args = sys.argv
include = _all_trunks & set(args)
exclude_percented = set('%' + t for t in _all_trunks) & set(args)
exclude = set(e[1:] for e in exclude_percented)
if len(include) == 0:
include = _all_trunks
_trunk_filter = include - exclude
args = [arg for arg in args if arg not in include | exclude_percented]
nose.main(argv=args)
# ... rest of the code ...
|
72216757991a2120bb81e0003496eee908373b0c
|
keystone/common/policies/ec2_credential.py
|
keystone/common/policies/ec2_credential.py
|
from oslo_policy import policy
from keystone.common.policies import base
ec2_credential_policies = [
policy.RuleDefault(
name=base.IDENTITY % 'ec2_get_credential',
check_str=base.RULE_ADMIN_OR_CREDENTIAL_OWNER),
policy.RuleDefault(
name=base.IDENTITY % 'ec2_list_credentials',
check_str=base.RULE_ADMIN_REQUIRED),
policy.RuleDefault(
name=base.IDENTITY % 'ec2_create_credential',
check_str=base.RULE_ADMIN_REQUIRED),
policy.RuleDefault(
name=base.IDENTITY % 'ec2_delete_credential',
check_str=base.RULE_ADMIN_OR_CREDENTIAL_OWNER),
]
def list_rules():
return ec2_credential_policies
|
from oslo_policy import policy
from keystone.common.policies import base
ec2_credential_policies = [
policy.RuleDefault(
name=base.IDENTITY % 'ec2_get_credential',
check_str=base.RULE_ADMIN_OR_CREDENTIAL_OWNER),
policy.RuleDefault(
name=base.IDENTITY % 'ec2_list_credentials',
check_str=base.RULE_ADMIN_OR_OWNER),
policy.RuleDefault(
name=base.IDENTITY % 'ec2_create_credential',
check_str=base.RULE_ADMIN_OR_OWNER),
policy.RuleDefault(
name=base.IDENTITY % 'ec2_delete_credential',
check_str=base.RULE_ADMIN_OR_CREDENTIAL_OWNER),
]
def list_rules():
return ec2_credential_policies
|
Set the correct in-code policy for ec2 operations
|
Set the correct in-code policy for ec2 operations
In I8bd0fa342cdfee00acd3c7a33f7232fe0a87e23f we moved some of the policy
defaults into code. Some of the policy were accidentally changed.
Change-Id: Ib744317025d928c7397ab00dc706172592a9abaf
Closes-Bug: #1675377
|
Python
|
apache-2.0
|
ilay09/keystone,rajalokan/keystone,openstack/keystone,ilay09/keystone,mahak/keystone,ilay09/keystone,mahak/keystone,openstack/keystone,rajalokan/keystone,rajalokan/keystone,mahak/keystone,openstack/keystone
|
from oslo_policy import policy
from keystone.common.policies import base
ec2_credential_policies = [
policy.RuleDefault(
name=base.IDENTITY % 'ec2_get_credential',
check_str=base.RULE_ADMIN_OR_CREDENTIAL_OWNER),
policy.RuleDefault(
name=base.IDENTITY % 'ec2_list_credentials',
- check_str=base.RULE_ADMIN_REQUIRED),
+ check_str=base.RULE_ADMIN_OR_OWNER),
policy.RuleDefault(
name=base.IDENTITY % 'ec2_create_credential',
- check_str=base.RULE_ADMIN_REQUIRED),
+ check_str=base.RULE_ADMIN_OR_OWNER),
policy.RuleDefault(
name=base.IDENTITY % 'ec2_delete_credential',
check_str=base.RULE_ADMIN_OR_CREDENTIAL_OWNER),
]
def list_rules():
return ec2_credential_policies
|
Set the correct in-code policy for ec2 operations
|
## Code Before:
from oslo_policy import policy
from keystone.common.policies import base
ec2_credential_policies = [
policy.RuleDefault(
name=base.IDENTITY % 'ec2_get_credential',
check_str=base.RULE_ADMIN_OR_CREDENTIAL_OWNER),
policy.RuleDefault(
name=base.IDENTITY % 'ec2_list_credentials',
check_str=base.RULE_ADMIN_REQUIRED),
policy.RuleDefault(
name=base.IDENTITY % 'ec2_create_credential',
check_str=base.RULE_ADMIN_REQUIRED),
policy.RuleDefault(
name=base.IDENTITY % 'ec2_delete_credential',
check_str=base.RULE_ADMIN_OR_CREDENTIAL_OWNER),
]
def list_rules():
return ec2_credential_policies
## Instruction:
Set the correct in-code policy for ec2 operations
## Code After:
from oslo_policy import policy
from keystone.common.policies import base
ec2_credential_policies = [
policy.RuleDefault(
name=base.IDENTITY % 'ec2_get_credential',
check_str=base.RULE_ADMIN_OR_CREDENTIAL_OWNER),
policy.RuleDefault(
name=base.IDENTITY % 'ec2_list_credentials',
check_str=base.RULE_ADMIN_OR_OWNER),
policy.RuleDefault(
name=base.IDENTITY % 'ec2_create_credential',
check_str=base.RULE_ADMIN_OR_OWNER),
policy.RuleDefault(
name=base.IDENTITY % 'ec2_delete_credential',
check_str=base.RULE_ADMIN_OR_CREDENTIAL_OWNER),
]
def list_rules():
return ec2_credential_policies
|
...
check_str=base.RULE_ADMIN_OR_CREDENTIAL_OWNER),
policy.RuleDefault(
name=base.IDENTITY % 'ec2_list_credentials',
check_str=base.RULE_ADMIN_OR_OWNER),
policy.RuleDefault(
name=base.IDENTITY % 'ec2_create_credential',
check_str=base.RULE_ADMIN_OR_OWNER),
policy.RuleDefault(
name=base.IDENTITY % 'ec2_delete_credential',
check_str=base.RULE_ADMIN_OR_CREDENTIAL_OWNER),
...
|
3c2663d4c8ca523d072b6e82bf872f412aba9321
|
mrgeo-python/src/main/python/pymrgeo/rastermapop.py
|
mrgeo-python/src/main/python/pymrgeo/rastermapop.py
|
import copy
import json
from py4j.java_gateway import JavaClass, java_import
from pymrgeo.instance import is_instance_of as iio
class RasterMapOp(object):
mapop = None
gateway = None
context = None
job = None
def __init__(self, gateway=None, context=None, mapop=None, job=None):
self.gateway = gateway
self.context = context
self.mapop = mapop
self.job = job
@staticmethod
def nan():
return float('nan')
def clone(self):
return copy.copy(self)
def is_instance_of(self, java_object, java_class):
return iio(self.gateway, java_object, java_class)
def metadata(self):
if self.mapop is None:
return None
jvm = self.gateway.jvm
java_import(jvm, "org.mrgeo.mapalgebra.raster.RasterMapOp")
java_import(jvm, "org.mrgeo.image.MrsPyramidMetadata")
meta = self.mapop.metadata().getOrElse(None)
if meta is None:
return None
java_import(jvm, "com.fasterxml.jackson.databind.ObjectMapper")
mapper = jvm.com.fasterxml.jackson.databind.ObjectMapper()
jsonstr = mapper.writeValueAsString(meta)
# print(mapper.writerWithDefaultPrettyPrinter().writeValueAsString(meta))
return json.loads(jsonstr)
|
import copy
import json
from py4j.java_gateway import java_import
from pymrgeo.instance import is_instance_of as iio
class RasterMapOp(object):
mapop = None
gateway = None
context = None
job = None
def __init__(self, gateway=None, context=None, mapop=None, job=None):
self.gateway = gateway
self.context = context
self.mapop = mapop
self.job = job
@staticmethod
def nan():
return float('nan')
def clone(self):
return copy.copy(self)
def is_instance_of(self, java_object, java_class):
return iio(self.gateway, java_object, java_class)
def metadata(self):
if self.mapop is None:
return None
jvm = self.gateway.jvm
java_import(jvm, "org.mrgeo.mapalgebra.raster.RasterMapOp")
java_import(jvm, "org.mrgeo.image.MrsPyramidMetadata")
if self.mapop.metadata().isEmpty():
return None
meta = self.mapop.metadata().get()
java_import(jvm, "com.fasterxml.jackson.databind.ObjectMapper")
mapper = jvm.com.fasterxml.jackson.databind.ObjectMapper()
jsonstr = mapper.writeValueAsString(meta)
# print(mapper.writerWithDefaultPrettyPrinter().writeValueAsString(meta))
return json.loads(jsonstr)
|
Implement empty metadata a little differently
|
Implement empty metadata a little differently
|
Python
|
apache-2.0
|
ngageoint/mrgeo,ngageoint/mrgeo,ngageoint/mrgeo
|
import copy
import json
- from py4j.java_gateway import JavaClass, java_import
+ from py4j.java_gateway import java_import
from pymrgeo.instance import is_instance_of as iio
class RasterMapOp(object):
mapop = None
gateway = None
context = None
job = None
def __init__(self, gateway=None, context=None, mapop=None, job=None):
self.gateway = gateway
self.context = context
self.mapop = mapop
self.job = job
@staticmethod
def nan():
return float('nan')
def clone(self):
return copy.copy(self)
def is_instance_of(self, java_object, java_class):
return iio(self.gateway, java_object, java_class)
def metadata(self):
if self.mapop is None:
return None
jvm = self.gateway.jvm
java_import(jvm, "org.mrgeo.mapalgebra.raster.RasterMapOp")
java_import(jvm, "org.mrgeo.image.MrsPyramidMetadata")
+ if self.mapop.metadata().isEmpty():
- meta = self.mapop.metadata().getOrElse(None)
- if meta is None:
return None
+
+ meta = self.mapop.metadata().get()
java_import(jvm, "com.fasterxml.jackson.databind.ObjectMapper")
mapper = jvm.com.fasterxml.jackson.databind.ObjectMapper()
jsonstr = mapper.writeValueAsString(meta)
# print(mapper.writerWithDefaultPrettyPrinter().writeValueAsString(meta))
return json.loads(jsonstr)
|
Implement empty metadata a little differently
|
## Code Before:
import copy
import json
from py4j.java_gateway import JavaClass, java_import
from pymrgeo.instance import is_instance_of as iio
class RasterMapOp(object):
mapop = None
gateway = None
context = None
job = None
def __init__(self, gateway=None, context=None, mapop=None, job=None):
self.gateway = gateway
self.context = context
self.mapop = mapop
self.job = job
@staticmethod
def nan():
return float('nan')
def clone(self):
return copy.copy(self)
def is_instance_of(self, java_object, java_class):
return iio(self.gateway, java_object, java_class)
def metadata(self):
if self.mapop is None:
return None
jvm = self.gateway.jvm
java_import(jvm, "org.mrgeo.mapalgebra.raster.RasterMapOp")
java_import(jvm, "org.mrgeo.image.MrsPyramidMetadata")
meta = self.mapop.metadata().getOrElse(None)
if meta is None:
return None
java_import(jvm, "com.fasterxml.jackson.databind.ObjectMapper")
mapper = jvm.com.fasterxml.jackson.databind.ObjectMapper()
jsonstr = mapper.writeValueAsString(meta)
# print(mapper.writerWithDefaultPrettyPrinter().writeValueAsString(meta))
return json.loads(jsonstr)
## Instruction:
Implement empty metadata a little differently
## Code After:
import copy
import json
from py4j.java_gateway import java_import
from pymrgeo.instance import is_instance_of as iio
class RasterMapOp(object):
mapop = None
gateway = None
context = None
job = None
def __init__(self, gateway=None, context=None, mapop=None, job=None):
self.gateway = gateway
self.context = context
self.mapop = mapop
self.job = job
@staticmethod
def nan():
return float('nan')
def clone(self):
return copy.copy(self)
def is_instance_of(self, java_object, java_class):
return iio(self.gateway, java_object, java_class)
def metadata(self):
if self.mapop is None:
return None
jvm = self.gateway.jvm
java_import(jvm, "org.mrgeo.mapalgebra.raster.RasterMapOp")
java_import(jvm, "org.mrgeo.image.MrsPyramidMetadata")
if self.mapop.metadata().isEmpty():
return None
meta = self.mapop.metadata().get()
java_import(jvm, "com.fasterxml.jackson.databind.ObjectMapper")
mapper = jvm.com.fasterxml.jackson.databind.ObjectMapper()
jsonstr = mapper.writeValueAsString(meta)
# print(mapper.writerWithDefaultPrettyPrinter().writeValueAsString(meta))
return json.loads(jsonstr)
|
# ... existing code ...
import copy
import json
from py4j.java_gateway import java_import
from pymrgeo.instance import is_instance_of as iio
class RasterMapOp(object):
# ... modified code ...
java_import(jvm, "org.mrgeo.mapalgebra.raster.RasterMapOp")
java_import(jvm, "org.mrgeo.image.MrsPyramidMetadata")
if self.mapop.metadata().isEmpty():
return None
meta = self.mapop.metadata().get()
java_import(jvm, "com.fasterxml.jackson.databind.ObjectMapper")
# ... rest of the code ...
|
ea20f912696974a2543a8fa15f63f0a3b64d7263
|
froide/helper/utils.py
|
froide/helper/utils.py
|
from django.shortcuts import render
def get_next(request):
# This is not a view
return request.GET.get("next", request.META.get("HTTP_REFERER", "/"))
def render_code(code, request, context={}):
return render(request, "%d.html" % code, context,
status=code)
def render_400(request):
return render_code(400, request)
def render_405(request):
return render_code(405, request)
def render_403(request, message=''):
return render_code(403, request,
context={"message": message})
|
from django.shortcuts import render
def get_next(request):
# This is not a view
return request.GET.get("next", request.META.get("HTTP_REFERER", "/"))
def render_code(code, request, context={}):
return render(request, "%d.html" % code, context,
status=code)
def render_400(request):
return render_code(400, request)
def render_405(request):
return render_code(405, request)
def render_403(request, message=''):
return render_code(403, request,
context={"message": message})
def get_client_ip(request):
x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR')
if x_forwarded_for:
ip = x_forwarded_for.split(',')[-1].strip()
else:
ip = request.META.get('REMOTE_ADDR')
return ip
|
Add utility function to get client IP from request
|
Add utility function to get client IP from request
|
Python
|
mit
|
ryankanno/froide,fin/froide,CodeforHawaii/froide,fin/froide,catcosmo/froide,LilithWittmann/froide,okfse/froide,okfse/froide,LilithWittmann/froide,ryankanno/froide,stefanw/froide,fin/froide,stefanw/froide,catcosmo/froide,okfse/froide,CodeforHawaii/froide,ryankanno/froide,stefanw/froide,catcosmo/froide,LilithWittmann/froide,fin/froide,catcosmo/froide,okfse/froide,catcosmo/froide,ryankanno/froide,okfse/froide,stefanw/froide,CodeforHawaii/froide,CodeforHawaii/froide,LilithWittmann/froide,CodeforHawaii/froide,LilithWittmann/froide,ryankanno/froide,stefanw/froide
|
from django.shortcuts import render
def get_next(request):
# This is not a view
return request.GET.get("next", request.META.get("HTTP_REFERER", "/"))
def render_code(code, request, context={}):
return render(request, "%d.html" % code, context,
status=code)
def render_400(request):
return render_code(400, request)
def render_405(request):
return render_code(405, request)
def render_403(request, message=''):
return render_code(403, request,
context={"message": message})
+
+ def get_client_ip(request):
+ x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR')
+ if x_forwarded_for:
+ ip = x_forwarded_for.split(',')[-1].strip()
+ else:
+ ip = request.META.get('REMOTE_ADDR')
+ return ip
+
|
Add utility function to get client IP from request
|
## Code Before:
from django.shortcuts import render
def get_next(request):
# This is not a view
return request.GET.get("next", request.META.get("HTTP_REFERER", "/"))
def render_code(code, request, context={}):
return render(request, "%d.html" % code, context,
status=code)
def render_400(request):
return render_code(400, request)
def render_405(request):
return render_code(405, request)
def render_403(request, message=''):
return render_code(403, request,
context={"message": message})
## Instruction:
Add utility function to get client IP from request
## Code After:
from django.shortcuts import render
def get_next(request):
# This is not a view
return request.GET.get("next", request.META.get("HTTP_REFERER", "/"))
def render_code(code, request, context={}):
return render(request, "%d.html" % code, context,
status=code)
def render_400(request):
return render_code(400, request)
def render_405(request):
return render_code(405, request)
def render_403(request, message=''):
return render_code(403, request,
context={"message": message})
def get_client_ip(request):
x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR')
if x_forwarded_for:
ip = x_forwarded_for.split(',')[-1].strip()
else:
ip = request.META.get('REMOTE_ADDR')
return ip
|
// ... existing code ...
def render_403(request, message=''):
return render_code(403, request,
context={"message": message})
def get_client_ip(request):
x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR')
if x_forwarded_for:
ip = x_forwarded_for.split(',')[-1].strip()
else:
ip = request.META.get('REMOTE_ADDR')
return ip
// ... rest of the code ...
|
2088b3df274fd31c28baa6193c937046c04b98a6
|
scripts/generate_wiki_languages.py
|
scripts/generate_wiki_languages.py
|
from urllib2 import urlopen
import csv
import lxml.builder as lb
from lxml import etree
# Returns CSV of all wikipedias, ordered by number of 'good' articles
URL = "https://wikistats.wmflabs.org/api.php?action=dump&table=wikipedias&format=csv&s=good"
data = csv.reader(urlopen(URL))
# Column 2 is the language code
lang_keys = [row[2] for row in data]
del lang_keys[0] # Get rid of the headers
# Generate the XML
x = lb.E
keys = [x.item(k) for k in lang_keys]
resources = x.resources(
getattr(x, 'string-array')(*keys, name="preference_language_keys"),
)
open("languages_list.xml", "w").write(
etree.tostring(resources, pretty_print=True, encoding="utf-8", xml_declaration=True)
)
|
from urllib2 import urlopen
import csv
import json
import lxml.builder as lb
from lxml import etree
# Returns CSV of all wikipedias, ordered by number of 'good' articles
URL = "https://wikistats.wmflabs.org/api.php?action=dump&table=wikipedias&format=csv&s=good"
data = csv.reader(urlopen(URL))
lang_keys = []
lang_local_names = []
lang_eng_names = []
for row in data:
lang_keys.append(row[2])
lang_local_names.append(row[10])
lang_eng_names.append(row[1])
# Generate the XML, for Android
x = lb.E
keys = [x.item(k) for k in lang_keys]
# Skip the headers!
del keys[0]
resources = x.resources(
getattr(x, 'string-array')(*keys, name="preference_language_keys"),
)
open("languages_list.xml", "w").write(
etree.tostring(resources, pretty_print=True, encoding="utf-8", xml_declaration=True)
)
# Generate the JSON, for iOS
langs_json = []
# Start from 1, to skip the headers
for i in xrange(1, len(lang_keys)):
langs_json.append({
"code": lang_keys[i],
"name": lang_local_names[i],
"canonical_name": lang_eng_names[i]
})
open("languages_list.json", "w").write(json.dumps(langs_json, indent=4))
|
Modify language generation script to make JSON for iOS
|
Modify language generation script to make JSON for iOS
Change-Id: Ib5aec2f6cfcb5bd1187cf8863ecd50f1b1a2d20c
|
Python
|
apache-2.0
|
Wikinaut/wikipedia-app,carloshwa/apps-android-wikipedia,dbrant/apps-android-wikipedia,creaITve/apps-android-tbrc-works,reproio/apps-android-wikipedia,anirudh24seven/apps-android-wikipedia,reproio/apps-android-wikipedia,wikimedia/apps-android-wikipedia,BrunoMRodrigues/apps-android-tbrc-work,BrunoMRodrigues/apps-android-tbrc-work,carloshwa/apps-android-wikipedia,creaITve/apps-android-tbrc-works,BrunoMRodrigues/apps-android-tbrc-work,Wikinaut/wikipedia-app,Wikinaut/wikipedia-app,BrunoMRodrigues/apps-android-tbrc-work,wikimedia/apps-android-wikipedia,anirudh24seven/apps-android-wikipedia,anirudh24seven/apps-android-wikipedia,carloshwa/apps-android-wikipedia,wikimedia/apps-android-wikipedia,Wikinaut/wikipedia-app,parvez3019/apps-android-wikipedia,carloshwa/apps-android-wikipedia,Duct-and-rice/KrswtkhrWiki4Android,anirudh24seven/apps-android-wikipedia,SAGROUP2/apps-android-wikipedia,parvez3019/apps-android-wikipedia,dbrant/apps-android-wikipedia,SAGROUP2/apps-android-wikipedia,dbrant/apps-android-wikipedia,reproio/apps-android-wikipedia,reproio/apps-android-wikipedia,creaITve/apps-android-tbrc-works,anirudh24seven/apps-android-wikipedia,dbrant/apps-android-wikipedia,SAGROUP2/apps-android-wikipedia,Duct-and-rice/KrswtkhrWiki4Android,parvez3019/apps-android-wikipedia,parvez3019/apps-android-wikipedia,SAGROUP2/apps-android-wikipedia,creaITve/apps-android-tbrc-works,wikimedia/apps-android-wikipedia,Duct-and-rice/KrswtkhrWiki4Android,SAGROUP2/apps-android-wikipedia,parvez3019/apps-android-wikipedia,carloshwa/apps-android-wikipedia,dbrant/apps-android-wikipedia,reproio/apps-android-wikipedia,Duct-and-rice/KrswtkhrWiki4Android,Duct-and-rice/KrswtkhrWiki4Android,Wikinaut/wikipedia-app
|
from urllib2 import urlopen
import csv
+ import json
import lxml.builder as lb
from lxml import etree
# Returns CSV of all wikipedias, ordered by number of 'good' articles
URL = "https://wikistats.wmflabs.org/api.php?action=dump&table=wikipedias&format=csv&s=good"
data = csv.reader(urlopen(URL))
- # Column 2 is the language code
- lang_keys = [row[2] for row in data]
+ lang_keys = []
+ lang_local_names = []
+ lang_eng_names = []
+ for row in data:
+ lang_keys.append(row[2])
+ lang_local_names.append(row[10])
+ lang_eng_names.append(row[1])
+ # Generate the XML, for Android
- del lang_keys[0] # Get rid of the headers
-
- # Generate the XML
x = lb.E
keys = [x.item(k) for k in lang_keys]
+ # Skip the headers!
+ del keys[0]
resources = x.resources(
getattr(x, 'string-array')(*keys, name="preference_language_keys"),
)
open("languages_list.xml", "w").write(
etree.tostring(resources, pretty_print=True, encoding="utf-8", xml_declaration=True)
)
+ # Generate the JSON, for iOS
+ langs_json = []
+ # Start from 1, to skip the headers
+ for i in xrange(1, len(lang_keys)):
+ langs_json.append({
+ "code": lang_keys[i],
+ "name": lang_local_names[i],
+ "canonical_name": lang_eng_names[i]
+ })
+
+ open("languages_list.json", "w").write(json.dumps(langs_json, indent=4))
+
|
Modify language generation script to make JSON for iOS
|
## Code Before:
from urllib2 import urlopen
import csv
import lxml.builder as lb
from lxml import etree
# Returns CSV of all wikipedias, ordered by number of 'good' articles
URL = "https://wikistats.wmflabs.org/api.php?action=dump&table=wikipedias&format=csv&s=good"
data = csv.reader(urlopen(URL))
# Column 2 is the language code
lang_keys = [row[2] for row in data]
del lang_keys[0] # Get rid of the headers
# Generate the XML
x = lb.E
keys = [x.item(k) for k in lang_keys]
resources = x.resources(
getattr(x, 'string-array')(*keys, name="preference_language_keys"),
)
open("languages_list.xml", "w").write(
etree.tostring(resources, pretty_print=True, encoding="utf-8", xml_declaration=True)
)
## Instruction:
Modify language generation script to make JSON for iOS
## Code After:
from urllib2 import urlopen
import csv
import json
import lxml.builder as lb
from lxml import etree
# Returns CSV of all wikipedias, ordered by number of 'good' articles
URL = "https://wikistats.wmflabs.org/api.php?action=dump&table=wikipedias&format=csv&s=good"
data = csv.reader(urlopen(URL))
lang_keys = []
lang_local_names = []
lang_eng_names = []
for row in data:
lang_keys.append(row[2])
lang_local_names.append(row[10])
lang_eng_names.append(row[1])
# Generate the XML, for Android
x = lb.E
keys = [x.item(k) for k in lang_keys]
# Skip the headers!
del keys[0]
resources = x.resources(
getattr(x, 'string-array')(*keys, name="preference_language_keys"),
)
open("languages_list.xml", "w").write(
etree.tostring(resources, pretty_print=True, encoding="utf-8", xml_declaration=True)
)
# Generate the JSON, for iOS
langs_json = []
# Start from 1, to skip the headers
for i in xrange(1, len(lang_keys)):
langs_json.append({
"code": lang_keys[i],
"name": lang_local_names[i],
"canonical_name": lang_eng_names[i]
})
open("languages_list.json", "w").write(json.dumps(langs_json, indent=4))
|
# ... existing code ...
from urllib2 import urlopen
import csv
import json
import lxml.builder as lb
from lxml import etree
# ... modified code ...
data = csv.reader(urlopen(URL))
lang_keys = []
lang_local_names = []
lang_eng_names = []
for row in data:
lang_keys.append(row[2])
lang_local_names.append(row[10])
lang_eng_names.append(row[1])
# Generate the XML, for Android
x = lb.E
keys = [x.item(k) for k in lang_keys]
# Skip the headers!
del keys[0]
resources = x.resources(
getattr(x, 'string-array')(*keys, name="preference_language_keys"),
)
...
etree.tostring(resources, pretty_print=True, encoding="utf-8", xml_declaration=True)
)
# Generate the JSON, for iOS
langs_json = []
# Start from 1, to skip the headers
for i in xrange(1, len(lang_keys)):
langs_json.append({
"code": lang_keys[i],
"name": lang_local_names[i],
"canonical_name": lang_eng_names[i]
})
open("languages_list.json", "w").write(json.dumps(langs_json, indent=4))
# ... rest of the code ...
|
4e31e5c776c40997cccd76d4ce592d7f3d5de752
|
example/runner.py
|
example/runner.py
|
import argparse
import sys
def args():
parser = argparse.ArgumentParser(description='Run the Furious Examples.')
parser.add_argument('--gae-sdk-path', metavar='S', dest="gae_lib_path",
default="/usr/local/google_appengine",
help='path to the GAE SDK')
parser.add_argument('--url', metavar='U', dest="url", default="",
help="the endpoint to run")
return parser.parse_args()
def setup(options):
sys.path.insert(0, options.gae_lib_path)
from dev_appserver import fix_sys_path
fix_sys_path()
def run(options):
from google.appengine.tools import appengine_rpc
from google.appengine.tools import appcfg
source = 'furious'
user_agent = appcfg.GetUserAgent()
server = appengine_rpc.HttpRpcServer(
'localhost:8080', lambda: ('[email protected]', 'password'), user_agent,
source, secure=False)
server._DevAppServerAuthenticate()
server.Send(options.url, content_type="text/html; charset=utf-8",
payload=None)
def main():
options = args()
setup(options)
run(options)
if __name__ == "__main__":
main()
|
import argparse
import sys
def args():
parser = argparse.ArgumentParser(description='Run the Furious Examples.')
parser.add_argument('--gae-sdk-path', metavar='S', dest="gae_lib_path",
default="/usr/local/google_appengine",
help='path to the GAE SDK')
parser.add_argument('url', metavar='U', default="", nargs=1,
help="the endpoint to run")
return parser.parse_args()
def setup(options):
sys.path.insert(0, options.gae_lib_path)
from dev_appserver import fix_sys_path
fix_sys_path()
def run(options):
from google.appengine.tools import appengine_rpc
from google.appengine.tools import appcfg
source = 'furious'
user_agent = appcfg.GetUserAgent()
server = appengine_rpc.HttpRpcServer(
'localhost:8080', lambda: ('[email protected]', 'password'), user_agent,
source, secure=False)
url = "/"
if options.url:
url += options.url[0]
server._DevAppServerAuthenticate()
server.Send(url, content_type="text/html; charset=utf-8",
payload=None)
def main():
options = args()
setup(options)
run(options)
if __name__ == "__main__":
main()
|
Update the way the url is handled.
|
Update the way the url is handled.
|
Python
|
apache-2.0
|
andreleblanc-wf/furious,Workiva/furious,rosshendrickson-wf/furious,beaulyddon-wf/furious,mattsanders-wf/furious,rosshendrickson-wf/furious,mattsanders-wf/furious,beaulyddon-wf/furious,andreleblanc-wf/furious,Workiva/furious
|
import argparse
import sys
def args():
parser = argparse.ArgumentParser(description='Run the Furious Examples.')
parser.add_argument('--gae-sdk-path', metavar='S', dest="gae_lib_path",
default="/usr/local/google_appengine",
help='path to the GAE SDK')
- parser.add_argument('--url', metavar='U', dest="url", default="",
+ parser.add_argument('url', metavar='U', default="", nargs=1,
help="the endpoint to run")
return parser.parse_args()
def setup(options):
sys.path.insert(0, options.gae_lib_path)
from dev_appserver import fix_sys_path
fix_sys_path()
def run(options):
from google.appengine.tools import appengine_rpc
from google.appengine.tools import appcfg
source = 'furious'
user_agent = appcfg.GetUserAgent()
server = appengine_rpc.HttpRpcServer(
'localhost:8080', lambda: ('[email protected]', 'password'), user_agent,
source, secure=False)
+ url = "/"
+ if options.url:
+ url += options.url[0]
+
server._DevAppServerAuthenticate()
- server.Send(options.url, content_type="text/html; charset=utf-8",
+ server.Send(url, content_type="text/html; charset=utf-8",
payload=None)
def main():
options = args()
setup(options)
run(options)
if __name__ == "__main__":
main()
|
Update the way the url is handled.
|
## Code Before:
import argparse
import sys
def args():
parser = argparse.ArgumentParser(description='Run the Furious Examples.')
parser.add_argument('--gae-sdk-path', metavar='S', dest="gae_lib_path",
default="/usr/local/google_appengine",
help='path to the GAE SDK')
parser.add_argument('--url', metavar='U', dest="url", default="",
help="the endpoint to run")
return parser.parse_args()
def setup(options):
sys.path.insert(0, options.gae_lib_path)
from dev_appserver import fix_sys_path
fix_sys_path()
def run(options):
from google.appengine.tools import appengine_rpc
from google.appengine.tools import appcfg
source = 'furious'
user_agent = appcfg.GetUserAgent()
server = appengine_rpc.HttpRpcServer(
'localhost:8080', lambda: ('[email protected]', 'password'), user_agent,
source, secure=False)
server._DevAppServerAuthenticate()
server.Send(options.url, content_type="text/html; charset=utf-8",
payload=None)
def main():
options = args()
setup(options)
run(options)
if __name__ == "__main__":
main()
## Instruction:
Update the way the url is handled.
## Code After:
import argparse
import sys
def args():
parser = argparse.ArgumentParser(description='Run the Furious Examples.')
parser.add_argument('--gae-sdk-path', metavar='S', dest="gae_lib_path",
default="/usr/local/google_appengine",
help='path to the GAE SDK')
parser.add_argument('url', metavar='U', default="", nargs=1,
help="the endpoint to run")
return parser.parse_args()
def setup(options):
sys.path.insert(0, options.gae_lib_path)
from dev_appserver import fix_sys_path
fix_sys_path()
def run(options):
from google.appengine.tools import appengine_rpc
from google.appengine.tools import appcfg
source = 'furious'
user_agent = appcfg.GetUserAgent()
server = appengine_rpc.HttpRpcServer(
'localhost:8080', lambda: ('[email protected]', 'password'), user_agent,
source, secure=False)
url = "/"
if options.url:
url += options.url[0]
server._DevAppServerAuthenticate()
server.Send(url, content_type="text/html; charset=utf-8",
payload=None)
def main():
options = args()
setup(options)
run(options)
if __name__ == "__main__":
main()
|
...
default="/usr/local/google_appengine",
help='path to the GAE SDK')
parser.add_argument('url', metavar='U', default="", nargs=1,
help="the endpoint to run")
return parser.parse_args()
...
'localhost:8080', lambda: ('[email protected]', 'password'), user_agent,
source, secure=False)
url = "/"
if options.url:
url += options.url[0]
server._DevAppServerAuthenticate()
server.Send(url, content_type="text/html; charset=utf-8",
payload=None)
...
|
99be93029ecec0ed4c1e17e0fc2f199c2ad0f6c6
|
go/apps/dialogue/dialogue_api.py
|
go/apps/dialogue/dialogue_api.py
|
"""Go API action dispatcher for dialogue conversations."""
from go.api.go_api.action_dispatcher import ConversationActionDispatcher
class DialogueActionDispatcher(ConversationActionDispatcher):
def handle_get_poll(self, conv):
pass
def handle_save_poll(self, conv, poll):
pass
|
"""Go API action dispatcher for dialogue conversations."""
from go.api.go_api.action_dispatcher import ConversationActionDispatcher
class DialogueActionDispatcher(ConversationActionDispatcher):
def handle_get_poll(self, conv):
return {"poll": conv.config.get("poll")}
def handle_save_poll(self, conv, poll):
conv.config["poll"] = poll
d = conv.save()
d.addCallback(lambda r: {"saved": True})
return d
|
Implement poll saving and getting.
|
Implement poll saving and getting.
|
Python
|
bsd-3-clause
|
praekelt/vumi-go,praekelt/vumi-go,praekelt/vumi-go,praekelt/vumi-go
|
"""Go API action dispatcher for dialogue conversations."""
from go.api.go_api.action_dispatcher import ConversationActionDispatcher
class DialogueActionDispatcher(ConversationActionDispatcher):
def handle_get_poll(self, conv):
- pass
+ return {"poll": conv.config.get("poll")}
def handle_save_poll(self, conv, poll):
- pass
+ conv.config["poll"] = poll
+ d = conv.save()
+ d.addCallback(lambda r: {"saved": True})
+ return d
|
Implement poll saving and getting.
|
## Code Before:
"""Go API action dispatcher for dialogue conversations."""
from go.api.go_api.action_dispatcher import ConversationActionDispatcher
class DialogueActionDispatcher(ConversationActionDispatcher):
def handle_get_poll(self, conv):
pass
def handle_save_poll(self, conv, poll):
pass
## Instruction:
Implement poll saving and getting.
## Code After:
"""Go API action dispatcher for dialogue conversations."""
from go.api.go_api.action_dispatcher import ConversationActionDispatcher
class DialogueActionDispatcher(ConversationActionDispatcher):
def handle_get_poll(self, conv):
return {"poll": conv.config.get("poll")}
def handle_save_poll(self, conv, poll):
conv.config["poll"] = poll
d = conv.save()
d.addCallback(lambda r: {"saved": True})
return d
|
# ... existing code ...
class DialogueActionDispatcher(ConversationActionDispatcher):
def handle_get_poll(self, conv):
return {"poll": conv.config.get("poll")}
def handle_save_poll(self, conv, poll):
conv.config["poll"] = poll
d = conv.save()
d.addCallback(lambda r: {"saved": True})
return d
# ... rest of the code ...
|
5f430b076ad70c23c430017a6aa7a7893530e995
|
deflect/management/commands/checkurls.py
|
deflect/management/commands/checkurls.py
|
from django.contrib.sites.models import Site
from django.core.mail import mail_managers
from django.core.management.base import NoArgsCommand
from django.core.urlresolvers import reverse
import requests
from deflect.models import ShortURL
class Command(NoArgsCommand):
help = "Validate short URL redirect targets"
def handle_noargs(self, *args, **options):
message = ''
for url in ShortURL.objects.all():
try:
url.check_status()
except requests.exceptions.RequestException as e:
message += self.bad_redirect_text(url, e)
mail_managers('go.corban.edu URL report', message)
def bad_redirect_text(self, url, exception):
"""
Return informational text for a URL that raised an
exception.
"""
base = 'http://%s' % Site.objects.get_current().domain
return """
Redirect {key} with target {target} returns {error}
Edit this short URL: {edit}
""".format(key=url.key, target=url.long_url, error=exception,
edit=base + reverse('admin:deflect_shorturl_change', args=(url.id,)))
|
from django.contrib.sites.models import Site
from django.core.mail import mail_managers
from django.core.management.base import NoArgsCommand
from django.core.urlresolvers import reverse
import requests
from deflect.models import ShortURL
class Command(NoArgsCommand):
help = "Validate short URL redirect targets"
domain = Site.objects.get_current().domain
def handle_noargs(self, *args, **options):
message = ''
for url in ShortURL.objects.all():
try:
url.check_status()
except requests.exceptions.RequestException as e:
message += self.url_exception_text(url, e)
mail_managers('URL report for %s' % self.domain, message)
def url_exception_text(self, url, exception):
"""Return text block for a URL exception."""
base = 'http://%s' % self.domain
return """
Redirect {key} with target {target} returned {error}
Edit this short URL: {edit}
""".format(key=url.key, target=url.long_url, error=exception,
edit=base + reverse('admin:deflect_shorturl_change', args=(url.id,)))
|
Improve subject and text of URL report email
|
Improve subject and text of URL report email
|
Python
|
bsd-3-clause
|
jbittel/django-deflect
|
from django.contrib.sites.models import Site
from django.core.mail import mail_managers
from django.core.management.base import NoArgsCommand
from django.core.urlresolvers import reverse
import requests
from deflect.models import ShortURL
class Command(NoArgsCommand):
help = "Validate short URL redirect targets"
+ domain = Site.objects.get_current().domain
def handle_noargs(self, *args, **options):
message = ''
for url in ShortURL.objects.all():
try:
url.check_status()
except requests.exceptions.RequestException as e:
- message += self.bad_redirect_text(url, e)
+ message += self.url_exception_text(url, e)
- mail_managers('go.corban.edu URL report', message)
+ mail_managers('URL report for %s' % self.domain, message)
- def bad_redirect_text(self, url, exception):
+ def url_exception_text(self, url, exception):
+ """Return text block for a URL exception."""
+ base = 'http://%s' % self.domain
- """
- Return informational text for a URL that raised an
- exception.
- """
- base = 'http://%s' % Site.objects.get_current().domain
return """
+
- Redirect {key} with target {target} returns {error}
+ Redirect {key} with target {target} returned {error}
Edit this short URL: {edit}
""".format(key=url.key, target=url.long_url, error=exception,
edit=base + reverse('admin:deflect_shorturl_change', args=(url.id,)))
|
Improve subject and text of URL report email
|
## Code Before:
from django.contrib.sites.models import Site
from django.core.mail import mail_managers
from django.core.management.base import NoArgsCommand
from django.core.urlresolvers import reverse
import requests
from deflect.models import ShortURL
class Command(NoArgsCommand):
help = "Validate short URL redirect targets"
def handle_noargs(self, *args, **options):
message = ''
for url in ShortURL.objects.all():
try:
url.check_status()
except requests.exceptions.RequestException as e:
message += self.bad_redirect_text(url, e)
mail_managers('go.corban.edu URL report', message)
def bad_redirect_text(self, url, exception):
"""
Return informational text for a URL that raised an
exception.
"""
base = 'http://%s' % Site.objects.get_current().domain
return """
Redirect {key} with target {target} returns {error}
Edit this short URL: {edit}
""".format(key=url.key, target=url.long_url, error=exception,
edit=base + reverse('admin:deflect_shorturl_change', args=(url.id,)))
## Instruction:
Improve subject and text of URL report email
## Code After:
from django.contrib.sites.models import Site
from django.core.mail import mail_managers
from django.core.management.base import NoArgsCommand
from django.core.urlresolvers import reverse
import requests
from deflect.models import ShortURL
class Command(NoArgsCommand):
help = "Validate short URL redirect targets"
domain = Site.objects.get_current().domain
def handle_noargs(self, *args, **options):
message = ''
for url in ShortURL.objects.all():
try:
url.check_status()
except requests.exceptions.RequestException as e:
message += self.url_exception_text(url, e)
mail_managers('URL report for %s' % self.domain, message)
def url_exception_text(self, url, exception):
"""Return text block for a URL exception."""
base = 'http://%s' % self.domain
return """
Redirect {key} with target {target} returned {error}
Edit this short URL: {edit}
""".format(key=url.key, target=url.long_url, error=exception,
edit=base + reverse('admin:deflect_shorturl_change', args=(url.id,)))
|
# ... existing code ...
class Command(NoArgsCommand):
help = "Validate short URL redirect targets"
domain = Site.objects.get_current().domain
def handle_noargs(self, *args, **options):
message = ''
# ... modified code ...
try:
url.check_status()
except requests.exceptions.RequestException as e:
message += self.url_exception_text(url, e)
mail_managers('URL report for %s' % self.domain, message)
def url_exception_text(self, url, exception):
"""Return text block for a URL exception."""
base = 'http://%s' % self.domain
return """
Redirect {key} with target {target} returned {error}
Edit this short URL: {edit}
""".format(key=url.key, target=url.long_url, error=exception,
# ... rest of the code ...
|
ff8f1067ac95a8f3fbb4c02e510da033623edeee
|
gargoyle/helpers.py
|
gargoyle/helpers.py
|
from django.http import HttpRequest
class MockRequest(HttpRequest):
"""
A mock request object which stores a user
instance and the ip address.
"""
def __init__(self, user=None, ip_address=None):
from django.contrib.auth.models import AnonymousUser
self.user = user or AnonymousUser()
self.META = {
'REMOTE_ADDR': ip_address,
}
|
from django.http import HttpRequest
class MockRequest(HttpRequest):
"""
A mock request object which stores a user
instance and the ip address.
"""
def __init__(self, user=None, ip_address=None):
from django.contrib.auth.models import AnonymousUser
self.user = user or AnonymousUser()
self.GET = {}
self.POST = {}
self.COOKIES = {}
self.META = {
'REMOTE_ADDR': ip_address,
}
|
Set POST/GET/COOKIES on MockRequest so repr works
|
Set POST/GET/COOKIES on MockRequest so repr works
|
Python
|
apache-2.0
|
disqus/gutter-django,nkovshov/gargoyle,nkovshov/gargoyle,nkovshov/gargoyle,frewsxcv/gargoyle,brilliant-org/gargoyle,frewsxcv/gargoyle,YPlan/gargoyle,roverdotcom/gargoyle,monokrome/gargoyle,monokrome/gargoyle,disqus/gutter,vikingco/gargoyle,disqus/gutter-django,blueprinthealth/gargoyle,YPlan/gargoyle,graingert/gutter-django,disqus/gargoyle,disqus/gutter-django,brilliant-org/gargoyle,Raekkeri/gargoyle,kalail/gutter,graingert/gutter-django,vikingco/gargoyle,kalail/gutter,Raekkeri/gargoyle,blueprinthealth/gargoyle,disqus/gargoyle,vikingco/gargoyle,frewsxcv/gargoyle,disqus/gutter,roverdotcom/gargoyle,zapier/gargoyle,blueprinthealth/gargoyle,graingert/gutter-django,disqus/gargoyle,monokrome/gargoyle,disqus/gutter-django,roverdotcom/gargoyle,YPlan/gargoyle,kalail/gutter,Raekkeri/gargoyle,zapier/gargoyle,brilliant-org/gargoyle
|
from django.http import HttpRequest
class MockRequest(HttpRequest):
"""
A mock request object which stores a user
instance and the ip address.
"""
def __init__(self, user=None, ip_address=None):
from django.contrib.auth.models import AnonymousUser
self.user = user or AnonymousUser()
+ self.GET = {}
+ self.POST = {}
+ self.COOKIES = {}
self.META = {
'REMOTE_ADDR': ip_address,
}
|
Set POST/GET/COOKIES on MockRequest so repr works
|
## Code Before:
from django.http import HttpRequest
class MockRequest(HttpRequest):
"""
A mock request object which stores a user
instance and the ip address.
"""
def __init__(self, user=None, ip_address=None):
from django.contrib.auth.models import AnonymousUser
self.user = user or AnonymousUser()
self.META = {
'REMOTE_ADDR': ip_address,
}
## Instruction:
Set POST/GET/COOKIES on MockRequest so repr works
## Code After:
from django.http import HttpRequest
class MockRequest(HttpRequest):
"""
A mock request object which stores a user
instance and the ip address.
"""
def __init__(self, user=None, ip_address=None):
from django.contrib.auth.models import AnonymousUser
self.user = user or AnonymousUser()
self.GET = {}
self.POST = {}
self.COOKIES = {}
self.META = {
'REMOTE_ADDR': ip_address,
}
|
# ... existing code ...
from django.contrib.auth.models import AnonymousUser
self.user = user or AnonymousUser()
self.GET = {}
self.POST = {}
self.COOKIES = {}
self.META = {
'REMOTE_ADDR': ip_address,
}
# ... rest of the code ...
|
856207c8399d94e99a6f2ffb1e10befecb6150cf
|
src/generate-jobs/calculate_quad_key.py
|
src/generate-jobs/calculate_quad_key.py
|
import system
import csv
from docopt import docopt
def quad_tree(tx, ty, zoom):
"""
Converts XYZ tile coordinates to Microsoft QuadTree
http://www.maptiler.org/google-maps-coordinates-tile-bounds-projection/
"""
quad_key = ''
for i in range(zoom, 0, -1):
digit = 0
mask = 1 << (i-1)
if (tx & mask) != 0:
digit += 1
if (ty & mask) != 0:
digit += 2
quad_key += str(digit)
return quad_key
if __name__ == '__main__':
args = docopt(__doc__, version='0.1')
writer = csv.writer(system.out)
with open(args['<list_file>'], "r") as file_handle:
for line in file_handle:
z, x, y = line.split('/')
writer.writerow([
line,
quad_tree(int(x), int(y), int(z))]
)
|
import sys
import csv
from docopt import docopt
def quad_tree(tx, ty, zoom):
"""
Converts XYZ tile coordinates to Microsoft QuadTree
http://www.maptiler.org/google-maps-coordinates-tile-bounds-projection/
"""
quad_key = ''
for i in range(zoom, 0, -1):
digit = 0
mask = 1 << (i-1)
if (tx & mask) != 0:
digit += 1
if (ty & mask) != 0:
digit += 2
quad_key += str(digit)
return quad_key
if __name__ == '__main__':
args = docopt(__doc__, version='0.1')
writer = csv.writer(sys.stdout, delimiter='\t')
with open(args['<list_file>'], "r") as file_handle:
for line in file_handle:
z, x, y = line.split('/')
writer.writerow([
line.strip(),
quad_tree(int(x), int(y), int(z))]
)
|
Fix line endings in CSV and stdout typo
|
Fix line endings in CSV and stdout typo
|
Python
|
mit
|
geometalab/osm2vectortiles,geometalab/osm2vectortiles,osm2vectortiles/osm2vectortiles,osm2vectortiles/osm2vectortiles
|
- import system
+ import sys
import csv
from docopt import docopt
def quad_tree(tx, ty, zoom):
"""
Converts XYZ tile coordinates to Microsoft QuadTree
http://www.maptiler.org/google-maps-coordinates-tile-bounds-projection/
"""
quad_key = ''
for i in range(zoom, 0, -1):
digit = 0
mask = 1 << (i-1)
if (tx & mask) != 0:
digit += 1
if (ty & mask) != 0:
digit += 2
quad_key += str(digit)
return quad_key
if __name__ == '__main__':
args = docopt(__doc__, version='0.1')
- writer = csv.writer(system.out)
+ writer = csv.writer(sys.stdout, delimiter='\t')
with open(args['<list_file>'], "r") as file_handle:
for line in file_handle:
z, x, y = line.split('/')
writer.writerow([
- line,
+ line.strip(),
quad_tree(int(x), int(y), int(z))]
)
|
Fix line endings in CSV and stdout typo
|
## Code Before:
import system
import csv
from docopt import docopt
def quad_tree(tx, ty, zoom):
"""
Converts XYZ tile coordinates to Microsoft QuadTree
http://www.maptiler.org/google-maps-coordinates-tile-bounds-projection/
"""
quad_key = ''
for i in range(zoom, 0, -1):
digit = 0
mask = 1 << (i-1)
if (tx & mask) != 0:
digit += 1
if (ty & mask) != 0:
digit += 2
quad_key += str(digit)
return quad_key
if __name__ == '__main__':
args = docopt(__doc__, version='0.1')
writer = csv.writer(system.out)
with open(args['<list_file>'], "r") as file_handle:
for line in file_handle:
z, x, y = line.split('/')
writer.writerow([
line,
quad_tree(int(x), int(y), int(z))]
)
## Instruction:
Fix line endings in CSV and stdout typo
## Code After:
import sys
import csv
from docopt import docopt
def quad_tree(tx, ty, zoom):
"""
Converts XYZ tile coordinates to Microsoft QuadTree
http://www.maptiler.org/google-maps-coordinates-tile-bounds-projection/
"""
quad_key = ''
for i in range(zoom, 0, -1):
digit = 0
mask = 1 << (i-1)
if (tx & mask) != 0:
digit += 1
if (ty & mask) != 0:
digit += 2
quad_key += str(digit)
return quad_key
if __name__ == '__main__':
args = docopt(__doc__, version='0.1')
writer = csv.writer(sys.stdout, delimiter='\t')
with open(args['<list_file>'], "r") as file_handle:
for line in file_handle:
z, x, y = line.split('/')
writer.writerow([
line.strip(),
quad_tree(int(x), int(y), int(z))]
)
|
# ... existing code ...
import sys
import csv
from docopt import docopt
# ... modified code ...
if __name__ == '__main__':
args = docopt(__doc__, version='0.1')
writer = csv.writer(sys.stdout, delimiter='\t')
with open(args['<list_file>'], "r") as file_handle:
for line in file_handle:
z, x, y = line.split('/')
writer.writerow([
line.strip(),
quad_tree(int(x), int(y), int(z))]
)
# ... rest of the code ...
|
aacfe5de01dd11486f7f39bb414c87853c8c8857
|
likert_field/templatetags/likert_star_tools.py
|
likert_field/templatetags/likert_star_tools.py
|
from __future__ import unicode_literals
from six import string_types
def render_stars(num, max_stars, star_set):
"""
Star renderer returns a HTML string of stars
If num is None or a blank string, it returns the unanswered tag
Otherwise, the returned string will contain num solid stars
followed by max_stars - num empty stars
If num > max_stars, render max_stars solid stars
star_set is a dictionary of strings with keys: star, unlit, noanswer
"""
if num is None or (isinstance(num, string_types) and len(num) == 0):
return star_set['noanswer']
difference = int(max_stars) - int(num)
if difference < 0:
num = max_stars
difference = 0
return ''.join(
star_set['star'] * int(num) + star_set['unlit'] * difference)
|
from __future__ import unicode_literals
from django.utils.six import string_types
def render_stars(num, max_stars, star_set):
"""
Star renderer returns a HTML string of stars
If num is None or a blank string, it returns the unanswered tag
Otherwise, the returned string will contain num solid stars
followed by max_stars - num empty stars
If num > max_stars, render max_stars solid stars
star_set is a dictionary of strings with keys: star, unlit, noanswer
"""
if num is None or (isinstance(num, string_types) and len(num) == 0):
return star_set['noanswer']
difference = int(max_stars) - int(num)
if difference < 0:
num = max_stars
difference = 0
return ''.join(
star_set['star'] * int(num) + star_set['unlit'] * difference)
|
Use Dj provided compat tools
|
Use Dj provided compat tools
|
Python
|
bsd-3-clause
|
kelvinwong-ca/django-likert-field,kelvinwong-ca/django-likert-field
|
from __future__ import unicode_literals
- from six import string_types
+ from django.utils.six import string_types
def render_stars(num, max_stars, star_set):
"""
Star renderer returns a HTML string of stars
If num is None or a blank string, it returns the unanswered tag
Otherwise, the returned string will contain num solid stars
followed by max_stars - num empty stars
If num > max_stars, render max_stars solid stars
star_set is a dictionary of strings with keys: star, unlit, noanswer
"""
if num is None or (isinstance(num, string_types) and len(num) == 0):
return star_set['noanswer']
difference = int(max_stars) - int(num)
if difference < 0:
num = max_stars
difference = 0
return ''.join(
star_set['star'] * int(num) + star_set['unlit'] * difference)
|
Use Dj provided compat tools
|
## Code Before:
from __future__ import unicode_literals
from six import string_types
def render_stars(num, max_stars, star_set):
"""
Star renderer returns a HTML string of stars
If num is None or a blank string, it returns the unanswered tag
Otherwise, the returned string will contain num solid stars
followed by max_stars - num empty stars
If num > max_stars, render max_stars solid stars
star_set is a dictionary of strings with keys: star, unlit, noanswer
"""
if num is None or (isinstance(num, string_types) and len(num) == 0):
return star_set['noanswer']
difference = int(max_stars) - int(num)
if difference < 0:
num = max_stars
difference = 0
return ''.join(
star_set['star'] * int(num) + star_set['unlit'] * difference)
## Instruction:
Use Dj provided compat tools
## Code After:
from __future__ import unicode_literals
from django.utils.six import string_types
def render_stars(num, max_stars, star_set):
"""
Star renderer returns a HTML string of stars
If num is None or a blank string, it returns the unanswered tag
Otherwise, the returned string will contain num solid stars
followed by max_stars - num empty stars
If num > max_stars, render max_stars solid stars
star_set is a dictionary of strings with keys: star, unlit, noanswer
"""
if num is None or (isinstance(num, string_types) and len(num) == 0):
return star_set['noanswer']
difference = int(max_stars) - int(num)
if difference < 0:
num = max_stars
difference = 0
return ''.join(
star_set['star'] * int(num) + star_set['unlit'] * difference)
|
...
from __future__ import unicode_literals
from django.utils.six import string_types
def render_stars(num, max_stars, star_set):
...
|
1c19d7fb5914554b470a6d067902a9c61882ff4a
|
packs/softlayer/actions/destroy_instance.py
|
packs/softlayer/actions/destroy_instance.py
|
from lib.softlayer import SoftlayerBaseAction
class SoftlayerDeleteInstance(SoftlayerBaseAction):
def run(self, name):
driver = self._get_driver()
# go from name to Node Object
node = [n for n in driver.list_nodes() if n.extra['hostname'] == name][0]
# destroy the node
self.logger.info('Destroying node...')
node = driver.destroy_node(node)
self.logger.info('Node successfully destroyed: {}'.format(node))
return
|
from lib.softlayer import SoftlayerBaseAction
class SoftlayerDeleteInstance(SoftlayerBaseAction):
def run(self, name):
driver = self._get_driver()
# go from name to Node Object
try:
node = [n for n in driver.list_nodes() if n.extra['hostname'] == name][0]
except IndexError:
raise Exception("Node with name {} not found in Softlayer".format(name))
# destroy the node
self.logger.info('Destroying node...')
node = driver.destroy_node(node)
self.logger.info('Node successfully destroyed: {}'.format(node))
return
|
Return a sane error if there is no Nodes with that name instead of IndexError
|
Return a sane error if there is no Nodes with that name instead of IndexError
|
Python
|
apache-2.0
|
tonybaloney/st2contrib,StackStorm/st2contrib,tonybaloney/st2contrib,pearsontechnology/st2contrib,meirwah/st2contrib,psychopenguin/st2contrib,pidah/st2contrib,digideskio/st2contrib,digideskio/st2contrib,psychopenguin/st2contrib,meirwah/st2contrib,lmEshoo/st2contrib,pinterb/st2contrib,tonybaloney/st2contrib,pidah/st2contrib,lmEshoo/st2contrib,armab/st2contrib,pidah/st2contrib,dennybaa/st2contrib,StackStorm/st2contrib,dennybaa/st2contrib,pearsontechnology/st2contrib,StackStorm/st2contrib,armab/st2contrib,armab/st2contrib,pearsontechnology/st2contrib,pinterb/st2contrib,pearsontechnology/st2contrib
|
from lib.softlayer import SoftlayerBaseAction
class SoftlayerDeleteInstance(SoftlayerBaseAction):
def run(self, name):
driver = self._get_driver()
# go from name to Node Object
+ try:
- node = [n for n in driver.list_nodes() if n.extra['hostname'] == name][0]
+ node = [n for n in driver.list_nodes() if n.extra['hostname'] == name][0]
+ except IndexError:
+ raise Exception("Node with name {} not found in Softlayer".format(name))
# destroy the node
self.logger.info('Destroying node...')
node = driver.destroy_node(node)
self.logger.info('Node successfully destroyed: {}'.format(node))
return
|
Return a sane error if there is no Nodes with that name instead of IndexError
|
## Code Before:
from lib.softlayer import SoftlayerBaseAction
class SoftlayerDeleteInstance(SoftlayerBaseAction):
def run(self, name):
driver = self._get_driver()
# go from name to Node Object
node = [n for n in driver.list_nodes() if n.extra['hostname'] == name][0]
# destroy the node
self.logger.info('Destroying node...')
node = driver.destroy_node(node)
self.logger.info('Node successfully destroyed: {}'.format(node))
return
## Instruction:
Return a sane error if there is no Nodes with that name instead of IndexError
## Code After:
from lib.softlayer import SoftlayerBaseAction
class SoftlayerDeleteInstance(SoftlayerBaseAction):
def run(self, name):
driver = self._get_driver()
# go from name to Node Object
try:
node = [n for n in driver.list_nodes() if n.extra['hostname'] == name][0]
except IndexError:
raise Exception("Node with name {} not found in Softlayer".format(name))
# destroy the node
self.logger.info('Destroying node...')
node = driver.destroy_node(node)
self.logger.info('Node successfully destroyed: {}'.format(node))
return
|
...
def run(self, name):
driver = self._get_driver()
# go from name to Node Object
try:
node = [n for n in driver.list_nodes() if n.extra['hostname'] == name][0]
except IndexError:
raise Exception("Node with name {} not found in Softlayer".format(name))
# destroy the node
self.logger.info('Destroying node...')
node = driver.destroy_node(node)
...
|
2073942c49cb85664c068412951f2c1f7351679f
|
add_random_answers.py
|
add_random_answers.py
|
import pandas as pd
import time
from datetime import datetime, date
start_date = date(2014, 1, 1)
end_date = datetime.now()
date_range = pd.date_range(start_date, end_date)
for date in date_range:
print(date)
|
import pandas as pd
import time
from datetime import datetime, date
from random import randint
start_date = date(2014, 1, 1)
end_date = datetime.now()
date_range = pd.date_range(start_date, end_date)
for date in date_range:
random_hour = randint(10, 17)
random_minute = randint(0, 59)
random_second = randint(0, 59)
new_date = (date.year, date.month, date.day, random_hour, random_minute, random_second)
print(new_date)
|
Print random time based on date range
|
Print random time based on date range
|
Python
|
mit
|
andrewlrogers/srvy
|
import pandas as pd
import time
from datetime import datetime, date
+ from random import randint
start_date = date(2014, 1, 1)
end_date = datetime.now()
date_range = pd.date_range(start_date, end_date)
for date in date_range:
+ random_hour = randint(10, 17)
+ random_minute = randint(0, 59)
+ random_second = randint(0, 59)
+ new_date = (date.year, date.month, date.day, random_hour, random_minute, random_second)
- print(date)
+ print(new_date)
|
Print random time based on date range
|
## Code Before:
import pandas as pd
import time
from datetime import datetime, date
start_date = date(2014, 1, 1)
end_date = datetime.now()
date_range = pd.date_range(start_date, end_date)
for date in date_range:
print(date)
## Instruction:
Print random time based on date range
## Code After:
import pandas as pd
import time
from datetime import datetime, date
from random import randint
start_date = date(2014, 1, 1)
end_date = datetime.now()
date_range = pd.date_range(start_date, end_date)
for date in date_range:
random_hour = randint(10, 17)
random_minute = randint(0, 59)
random_second = randint(0, 59)
new_date = (date.year, date.month, date.day, random_hour, random_minute, random_second)
print(new_date)
|
...
import pandas as pd
import time
from datetime import datetime, date
from random import randint
start_date = date(2014, 1, 1)
end_date = datetime.now()
...
date_range = pd.date_range(start_date, end_date)
for date in date_range:
random_hour = randint(10, 17)
random_minute = randint(0, 59)
random_second = randint(0, 59)
new_date = (date.year, date.month, date.day, random_hour, random_minute, random_second)
print(new_date)
...
|
1e17e868ff332003da959a397b8846c9386b35e8
|
API_to_backend.py
|
API_to_backend.py
|
from multiprocessing import Queue, Process
import time
import backend
command_queue = Queue()
response_queue = Queue()
def start_backend():
if handler:
handler.stop()
handler = Process(target=backend.start, args=(command_queue, response_queue))
handler.start()
def get_for(url, queue, timeout):
beginning = time.time()
result = queue.get(timeout=timeout)
if result["url"] == url:
return result["body"]
else:
queue.put(result)
return get_for(url, queue, timeout - (time.time()-beginning))
|
from multiprocessing import Queue, Process
import time
import backend
command_queue = Queue()
response_queue = Queue()
def start_backend():
handler = Process(target=backend.start, args=(command_queue, response_queue))
handler.start()
def get_for(url, queue, timeout):
beginning = time.time()
result = queue.get(timeout=timeout)
if result["url"] == url:
return result["body"]
else:
queue.put(result)
return get_for(url, queue, timeout - (time.time()-beginning))
|
Revert "Quit Backend If Running"
|
Revert "Quit Backend If Running"
This reverts commit a00432191e2575aba0f20ffb1a96a323699ae4fc.
|
Python
|
mit
|
IAPark/PITherm
|
from multiprocessing import Queue, Process
import time
import backend
command_queue = Queue()
response_queue = Queue()
def start_backend():
- if handler:
- handler.stop()
handler = Process(target=backend.start, args=(command_queue, response_queue))
handler.start()
def get_for(url, queue, timeout):
beginning = time.time()
result = queue.get(timeout=timeout)
if result["url"] == url:
return result["body"]
else:
queue.put(result)
return get_for(url, queue, timeout - (time.time()-beginning))
|
Revert "Quit Backend If Running"
|
## Code Before:
from multiprocessing import Queue, Process
import time
import backend
command_queue = Queue()
response_queue = Queue()
def start_backend():
if handler:
handler.stop()
handler = Process(target=backend.start, args=(command_queue, response_queue))
handler.start()
def get_for(url, queue, timeout):
beginning = time.time()
result = queue.get(timeout=timeout)
if result["url"] == url:
return result["body"]
else:
queue.put(result)
return get_for(url, queue, timeout - (time.time()-beginning))
## Instruction:
Revert "Quit Backend If Running"
## Code After:
from multiprocessing import Queue, Process
import time
import backend
command_queue = Queue()
response_queue = Queue()
def start_backend():
handler = Process(target=backend.start, args=(command_queue, response_queue))
handler.start()
def get_for(url, queue, timeout):
beginning = time.time()
result = queue.get(timeout=timeout)
if result["url"] == url:
return result["body"]
else:
queue.put(result)
return get_for(url, queue, timeout - (time.time()-beginning))
|
// ... existing code ...
def start_backend():
handler = Process(target=backend.start, args=(command_queue, response_queue))
handler.start()
// ... rest of the code ...
|
da2376744ec5b1823ea75f3cefbb0de0ac000c1b
|
tests/secrets.py
|
tests/secrets.py
|
import os
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
OPS_KEY = os.environ['OPS_KEY']
OPS_SECRET = os.environ['OPS_SECRET']
|
import os
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
OPS_KEY = os.environ['OPS_KEY']
OPS_SECRET = os.environ['OPS_SECRET']
TWITTER_CONSUMER_ACCESS = os.environ['TWITTER_CONSUMER_ACCESS']
TWITTER_CONSUMER_SECRET = os.environ['TWITTER_CONSUMER_SECRET']
TWITTER_ACCESS = os.environ['TWITTER_ACCESS']
TWITTER_SECRET = os.environ['TWITTER_SECRET']
|
Read twitter tokens from .env
|
Read twitter tokens from .env
|
Python
|
mit
|
nestauk/inet
|
import os
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
OPS_KEY = os.environ['OPS_KEY']
OPS_SECRET = os.environ['OPS_SECRET']
+ TWITTER_CONSUMER_ACCESS = os.environ['TWITTER_CONSUMER_ACCESS']
+ TWITTER_CONSUMER_SECRET = os.environ['TWITTER_CONSUMER_SECRET']
+ TWITTER_ACCESS = os.environ['TWITTER_ACCESS']
+ TWITTER_SECRET = os.environ['TWITTER_SECRET']
|
Read twitter tokens from .env
|
## Code Before:
import os
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
OPS_KEY = os.environ['OPS_KEY']
OPS_SECRET = os.environ['OPS_SECRET']
## Instruction:
Read twitter tokens from .env
## Code After:
import os
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
OPS_KEY = os.environ['OPS_KEY']
OPS_SECRET = os.environ['OPS_SECRET']
TWITTER_CONSUMER_ACCESS = os.environ['TWITTER_CONSUMER_ACCESS']
TWITTER_CONSUMER_SECRET = os.environ['TWITTER_CONSUMER_SECRET']
TWITTER_ACCESS = os.environ['TWITTER_ACCESS']
TWITTER_SECRET = os.environ['TWITTER_SECRET']
|
// ... existing code ...
load_dotenv(find_dotenv())
OPS_KEY = os.environ['OPS_KEY']
OPS_SECRET = os.environ['OPS_SECRET']
TWITTER_CONSUMER_ACCESS = os.environ['TWITTER_CONSUMER_ACCESS']
TWITTER_CONSUMER_SECRET = os.environ['TWITTER_CONSUMER_SECRET']
TWITTER_ACCESS = os.environ['TWITTER_ACCESS']
TWITTER_SECRET = os.environ['TWITTER_SECRET']
// ... rest of the code ...
|
5c7161858fa7ca2962f08b66f6d20ae49715c206
|
ci_scripts/buildLinuxWheels.py
|
ci_scripts/buildLinuxWheels.py
|
from subprocess import call, check_output
import sys
isPython3 = sys.version_info.major == 3
# https://stackoverflow.com/a/3357357
command = 'git log --format=%B -n 1'.split()
out = check_output(command)
if b'build wheels' not in out.lower() or not isPython3:
exit(0)
path = os.path.abspath(argv[1])
call('pip install cibuildwheel==0.7.0'.split())
call('cibuildwheel --output-dir {}'.format(path).split())
from dropboxUpload import uploadAll
uploadAll(path)
|
from subprocess import call, check_output
import sys
import os
isPython3 = sys.version_info.major == 3
# https://stackoverflow.com/a/3357357
command = 'git log --format=%B -n 1'.split()
out = check_output(command)
if b'build wheels' not in out.lower() or not isPython3:
exit(0)
path = os.path.abspath(argv[1])
call('pip install cibuildwheel==0.7.0'.split())
call('cibuildwheel --output-dir {}'.format(path).split())
from dropboxUpload import uploadAll
uploadAll(path)
|
Fix build wheels and upload 3.
|
Fix build wheels and upload 3.
|
Python
|
bsd-3-clause
|
jr-garcia/AssimpCy,jr-garcia/AssimpCy
|
from subprocess import call, check_output
import sys
+ import os
isPython3 = sys.version_info.major == 3
# https://stackoverflow.com/a/3357357
command = 'git log --format=%B -n 1'.split()
out = check_output(command)
if b'build wheels' not in out.lower() or not isPython3:
exit(0)
path = os.path.abspath(argv[1])
call('pip install cibuildwheel==0.7.0'.split())
call('cibuildwheel --output-dir {}'.format(path).split())
from dropboxUpload import uploadAll
uploadAll(path)
|
Fix build wheels and upload 3.
|
## Code Before:
from subprocess import call, check_output
import sys
isPython3 = sys.version_info.major == 3
# https://stackoverflow.com/a/3357357
command = 'git log --format=%B -n 1'.split()
out = check_output(command)
if b'build wheels' not in out.lower() or not isPython3:
exit(0)
path = os.path.abspath(argv[1])
call('pip install cibuildwheel==0.7.0'.split())
call('cibuildwheel --output-dir {}'.format(path).split())
from dropboxUpload import uploadAll
uploadAll(path)
## Instruction:
Fix build wheels and upload 3.
## Code After:
from subprocess import call, check_output
import sys
import os
isPython3 = sys.version_info.major == 3
# https://stackoverflow.com/a/3357357
command = 'git log --format=%B -n 1'.split()
out = check_output(command)
if b'build wheels' not in out.lower() or not isPython3:
exit(0)
path = os.path.abspath(argv[1])
call('pip install cibuildwheel==0.7.0'.split())
call('cibuildwheel --output-dir {}'.format(path).split())
from dropboxUpload import uploadAll
uploadAll(path)
|
// ... existing code ...
from subprocess import call, check_output
import sys
import os
isPython3 = sys.version_info.major == 3
// ... rest of the code ...
|
a452adfb297ff40ec3db71108681829769b1fba4
|
pyface/tasks/enaml_editor.py
|
pyface/tasks/enaml_editor.py
|
from traits.api import Instance, on_trait_change
from enaml.components.constraints_widget import ConstraintsWidget
# local imports
from pyface.tasks.editor import Editor
class EnamlEditor(Editor):
""" Create an Editor for Enaml Components.
"""
#### EnamlEditor interface ##############################################
component = Instance(ConstraintsWidget)
def create_component(self):
raise NotImplementedError
###########################################################################
# 'IEditor' interface.
###########################################################################
def create(self, parent):
self.component = self.create_component()
self.component.setup(parent=parent)
self.control = self.component.toolkit_widget
self.component.on_trait_change(self.size_hint_changed, 'size_hint_updated')
def destroy(self):
self.control = None
self.component.destroy()
|
from traits.api import Instance, on_trait_change
from enaml.components.constraints_widget import ConstraintsWidget
# local imports
from pyface.tasks.editor import Editor
class EnamlEditor(Editor):
""" Create an Editor for Enaml Components.
"""
#### EnamlEditor interface ##############################################
component = Instance(ConstraintsWidget)
def create_component(self):
raise NotImplementedError
###########################################################################
# 'IEditor' interface.
###########################################################################
def create(self, parent):
self.component = self.create_component()
self.component.setup(parent=parent)
self.control = self.component.toolkit_widget
def destroy(self):
self.control = None
self.component.destroy()
|
Remove call of unimplemented method.
|
BUG: Remove call of unimplemented method.
|
Python
|
bsd-3-clause
|
brett-patterson/pyface,pankajp/pyface,geggo/pyface,geggo/pyface
|
from traits.api import Instance, on_trait_change
from enaml.components.constraints_widget import ConstraintsWidget
# local imports
from pyface.tasks.editor import Editor
class EnamlEditor(Editor):
""" Create an Editor for Enaml Components.
"""
#### EnamlEditor interface ##############################################
component = Instance(ConstraintsWidget)
def create_component(self):
raise NotImplementedError
###########################################################################
# 'IEditor' interface.
###########################################################################
def create(self, parent):
self.component = self.create_component()
self.component.setup(parent=parent)
self.control = self.component.toolkit_widget
- self.component.on_trait_change(self.size_hint_changed, 'size_hint_updated')
def destroy(self):
self.control = None
self.component.destroy()
|
Remove call of unimplemented method.
|
## Code Before:
from traits.api import Instance, on_trait_change
from enaml.components.constraints_widget import ConstraintsWidget
# local imports
from pyface.tasks.editor import Editor
class EnamlEditor(Editor):
""" Create an Editor for Enaml Components.
"""
#### EnamlEditor interface ##############################################
component = Instance(ConstraintsWidget)
def create_component(self):
raise NotImplementedError
###########################################################################
# 'IEditor' interface.
###########################################################################
def create(self, parent):
self.component = self.create_component()
self.component.setup(parent=parent)
self.control = self.component.toolkit_widget
self.component.on_trait_change(self.size_hint_changed, 'size_hint_updated')
def destroy(self):
self.control = None
self.component.destroy()
## Instruction:
Remove call of unimplemented method.
## Code After:
from traits.api import Instance, on_trait_change
from enaml.components.constraints_widget import ConstraintsWidget
# local imports
from pyface.tasks.editor import Editor
class EnamlEditor(Editor):
""" Create an Editor for Enaml Components.
"""
#### EnamlEditor interface ##############################################
component = Instance(ConstraintsWidget)
def create_component(self):
raise NotImplementedError
###########################################################################
# 'IEditor' interface.
###########################################################################
def create(self, parent):
self.component = self.create_component()
self.component.setup(parent=parent)
self.control = self.component.toolkit_widget
def destroy(self):
self.control = None
self.component.destroy()
|
...
self.component = self.create_component()
self.component.setup(parent=parent)
self.control = self.component.toolkit_widget
def destroy(self):
self.control = None
...
|
e08395a35c37fa7f7c0311cc4c7a71537b8b4227
|
tests/misc/print_exception.py
|
tests/misc/print_exception.py
|
try:
import uio as io
except ImportError:
import io
import sys
if hasattr(sys, 'print_exception'):
print_exception = sys.print_exception
else:
import traceback
print_exception = lambda e, f: traceback.print_exception(None, e, sys.exc_info()[2], file=f)
def print_exc(e):
buf = io.StringIO()
print_exception(e, buf)
s = buf.getvalue()
for l in s.split("\n"):
# uPy on pyboard prints <stdin> as file, so remove filename.
if l.startswith(" File "):
l = l.split('"')
print(l[0], l[2])
# uPy and CPy tracebacks differ in that CPy prints a source line for
# each traceback entry. In this case, we know that offending line
# has 4-space indent, so filter it out.
elif not l.startswith(" "):
print(l)
# basic exception message
try:
1/0
except Exception as e:
print('caught')
print_exc(e)
# exception message with more than 1 source-code line
def f():
g()
def g():
2/0
try:
f()
except Exception as e:
print('caught')
print_exc(e)
|
try:
import uio as io
except ImportError:
import io
import sys
if hasattr(sys, 'print_exception'):
print_exception = sys.print_exception
else:
import traceback
print_exception = lambda e, f: traceback.print_exception(None, e, sys.exc_info()[2], file=f)
def print_exc(e):
buf = io.StringIO()
print_exception(e, buf)
s = buf.getvalue()
for l in s.split("\n"):
# uPy on pyboard prints <stdin> as file, so remove filename.
if l.startswith(" File "):
l = l.split('"')
print(l[0], l[2])
# uPy and CPy tracebacks differ in that CPy prints a source line for
# each traceback entry. In this case, we know that offending line
# has 4-space indent, so filter it out.
elif not l.startswith(" "):
print(l)
# basic exception message
try:
1/0
except Exception as e:
print('caught')
print_exc(e)
# exception message with more than 1 source-code line
def f():
g()
def g():
2/0
try:
f()
except Exception as e:
print('caught')
print_exc(e)
# Here we have a function with lots of bytecode generated for a single source-line, and
# there is an error right at the end of the bytecode. It should report the correct line.
def f():
f([1, 2], [1, 2], [1, 2], {1:1, 1:1, 1:1, 1:1, 1:1, 1:1, 1:X})
return 1
try:
f()
except Exception as e:
print_exc(e)
|
Add test for line number printing with large bytecode chunk.
|
tests/misc: Add test for line number printing with large bytecode chunk.
|
Python
|
mit
|
henriknelson/micropython,AriZuu/micropython,AriZuu/micropython,micropython/micropython-esp32,micropython/micropython-esp32,PappaPeppar/micropython,MrSurly/micropython,MrSurly/micropython-esp32,infinnovation/micropython,trezor/micropython,micropython/micropython-esp32,lowRISC/micropython,torwag/micropython,PappaPeppar/micropython,swegener/micropython,MrSurly/micropython,Peetz0r/micropython-esp32,TDAbboud/micropython,hiway/micropython,kerneltask/micropython,cwyark/micropython,adafruit/micropython,trezor/micropython,adafruit/micropython,bvernoux/micropython,henriknelson/micropython,pramasoul/micropython,kerneltask/micropython,MrSurly/micropython-esp32,trezor/micropython,cwyark/micropython,torwag/micropython,hiway/micropython,adafruit/circuitpython,cwyark/micropython,tobbad/micropython,MrSurly/micropython,adafruit/circuitpython,henriknelson/micropython,MrSurly/micropython-esp32,pramasoul/micropython,tralamazza/micropython,pozetroninc/micropython,deshipu/micropython,cwyark/micropython,chrisdearman/micropython,adafruit/circuitpython,HenrikSolver/micropython,hiway/micropython,oopy/micropython,henriknelson/micropython,ryannathans/micropython,dmazzella/micropython,swegener/micropython,pramasoul/micropython,adafruit/circuitpython,pozetroninc/micropython,Peetz0r/micropython-esp32,blazewicz/micropython,toolmacher/micropython,ryannathans/micropython,alex-robbins/micropython,SHA2017-badge/micropython-esp32,bvernoux/micropython,chrisdearman/micropython,oopy/micropython,selste/micropython,pozetroninc/micropython,infinnovation/micropython,selste/micropython,pfalcon/micropython,puuu/micropython,SHA2017-badge/micropython-esp32,tralamazza/micropython,Peetz0r/micropython-esp32,Peetz0r/micropython-esp32,dmazzella/micropython,puuu/micropython,pfalcon/micropython,tobbad/micropython,chrisdearman/micropython,lowRISC/micropython,oopy/micropython,PappaPeppar/micropython,hiway/micropython,pfalcon/micropython,alex-robbins/micropython,cwyark/micropython,AriZuu/micropython,SHA2017-badge/micropython-esp32,TDAbboud/micropython,HenrikSolver/micropython,swegener/micropython,Peetz0r/micropython-esp32,MrSurly/micropython,torwag/micropython,alex-robbins/micropython,blazewicz/micropython,kerneltask/micropython,torwag/micropython,chrisdearman/micropython,trezor/micropython,alex-robbins/micropython,MrSurly/micropython-esp32,blazewicz/micropython,lowRISC/micropython,bvernoux/micropython,dmazzella/micropython,ryannathans/micropython,puuu/micropython,tobbad/micropython,ryannathans/micropython,pramasoul/micropython,AriZuu/micropython,Timmenem/micropython,blazewicz/micropython,bvernoux/micropython,tralamazza/micropython,MrSurly/micropython,swegener/micropython,blazewicz/micropython,deshipu/micropython,deshipu/micropython,trezor/micropython,selste/micropython,puuu/micropython,hiway/micropython,tralamazza/micropython,puuu/micropython,infinnovation/micropython,TDAbboud/micropython,TDAbboud/micropython,henriknelson/micropython,toolmacher/micropython,TDAbboud/micropython,selste/micropython,Timmenem/micropython,alex-robbins/micropython,pfalcon/micropython,oopy/micropython,AriZuu/micropython,lowRISC/micropython,pozetroninc/micropython,pramasoul/micropython,lowRISC/micropython,torwag/micropython,Timmenem/micropython,infinnovation/micropython,swegener/micropython,bvernoux/micropython,toolmacher/micropython,kerneltask/micropython,PappaPeppar/micropython,pfalcon/micropython,HenrikSolver/micropython,PappaPeppar/micropython,dmazzella/micropython,pozetroninc/micropython,tobbad/micropython,SHA2017-badge/micropython-esp32,deshipu/micropython,toolmacher/micropython,adafruit/micropython,HenrikSolver/micropython,SHA2017-badge/micropython-esp32,HenrikSolver/micropython,chrisdearman/micropython,adafruit/circuitpython,adafruit/micropython,Timmenem/micropython,micropython/micropython-esp32,infinnovation/micropython,micropython/micropython-esp32,deshipu/micropython,adafruit/circuitpython,adafruit/micropython,kerneltask/micropython,selste/micropython,Timmenem/micropython,toolmacher/micropython,ryannathans/micropython,oopy/micropython,tobbad/micropython,MrSurly/micropython-esp32
|
try:
import uio as io
except ImportError:
import io
import sys
if hasattr(sys, 'print_exception'):
print_exception = sys.print_exception
else:
import traceback
print_exception = lambda e, f: traceback.print_exception(None, e, sys.exc_info()[2], file=f)
def print_exc(e):
buf = io.StringIO()
print_exception(e, buf)
s = buf.getvalue()
for l in s.split("\n"):
# uPy on pyboard prints <stdin> as file, so remove filename.
if l.startswith(" File "):
l = l.split('"')
print(l[0], l[2])
# uPy and CPy tracebacks differ in that CPy prints a source line for
# each traceback entry. In this case, we know that offending line
# has 4-space indent, so filter it out.
elif not l.startswith(" "):
print(l)
# basic exception message
try:
1/0
except Exception as e:
print('caught')
print_exc(e)
# exception message with more than 1 source-code line
def f():
g()
def g():
2/0
try:
f()
except Exception as e:
print('caught')
print_exc(e)
+ # Here we have a function with lots of bytecode generated for a single source-line, and
+ # there is an error right at the end of the bytecode. It should report the correct line.
+ def f():
+ f([1, 2], [1, 2], [1, 2], {1:1, 1:1, 1:1, 1:1, 1:1, 1:1, 1:X})
+ return 1
+ try:
+ f()
+ except Exception as e:
+ print_exc(e)
+
|
Add test for line number printing with large bytecode chunk.
|
## Code Before:
try:
import uio as io
except ImportError:
import io
import sys
if hasattr(sys, 'print_exception'):
print_exception = sys.print_exception
else:
import traceback
print_exception = lambda e, f: traceback.print_exception(None, e, sys.exc_info()[2], file=f)
def print_exc(e):
buf = io.StringIO()
print_exception(e, buf)
s = buf.getvalue()
for l in s.split("\n"):
# uPy on pyboard prints <stdin> as file, so remove filename.
if l.startswith(" File "):
l = l.split('"')
print(l[0], l[2])
# uPy and CPy tracebacks differ in that CPy prints a source line for
# each traceback entry. In this case, we know that offending line
# has 4-space indent, so filter it out.
elif not l.startswith(" "):
print(l)
# basic exception message
try:
1/0
except Exception as e:
print('caught')
print_exc(e)
# exception message with more than 1 source-code line
def f():
g()
def g():
2/0
try:
f()
except Exception as e:
print('caught')
print_exc(e)
## Instruction:
Add test for line number printing with large bytecode chunk.
## Code After:
try:
import uio as io
except ImportError:
import io
import sys
if hasattr(sys, 'print_exception'):
print_exception = sys.print_exception
else:
import traceback
print_exception = lambda e, f: traceback.print_exception(None, e, sys.exc_info()[2], file=f)
def print_exc(e):
buf = io.StringIO()
print_exception(e, buf)
s = buf.getvalue()
for l in s.split("\n"):
# uPy on pyboard prints <stdin> as file, so remove filename.
if l.startswith(" File "):
l = l.split('"')
print(l[0], l[2])
# uPy and CPy tracebacks differ in that CPy prints a source line for
# each traceback entry. In this case, we know that offending line
# has 4-space indent, so filter it out.
elif not l.startswith(" "):
print(l)
# basic exception message
try:
1/0
except Exception as e:
print('caught')
print_exc(e)
# exception message with more than 1 source-code line
def f():
g()
def g():
2/0
try:
f()
except Exception as e:
print('caught')
print_exc(e)
# Here we have a function with lots of bytecode generated for a single source-line, and
# there is an error right at the end of the bytecode. It should report the correct line.
def f():
f([1, 2], [1, 2], [1, 2], {1:1, 1:1, 1:1, 1:1, 1:1, 1:1, 1:X})
return 1
try:
f()
except Exception as e:
print_exc(e)
|
// ... existing code ...
except Exception as e:
print('caught')
print_exc(e)
# Here we have a function with lots of bytecode generated for a single source-line, and
# there is an error right at the end of the bytecode. It should report the correct line.
def f():
f([1, 2], [1, 2], [1, 2], {1:1, 1:1, 1:1, 1:1, 1:1, 1:1, 1:X})
return 1
try:
f()
except Exception as e:
print_exc(e)
// ... rest of the code ...
|
c530ea901c374fef97390260e66492f37fc90a3f
|
setman/__init__.py
|
setman/__init__.py
|
from setman.lazy import LazySettings
__all__ = ('get_version', 'settings')
VERSION = (0, 1, 'beta')
settings = LazySettings()
def get_version(version=None):
"""
Return setman version number in human readable form.
You could call this function without args and in this case value from
``setman.VERSION`` would be used.
"""
version = version or VERSION
if len(version) > 2 and version[2] is not None:
if isinstance(version[2], int):
return '%d.%d.%d' % version
return '%d.%d-%s' % version
return '%d.%d' % version[:2]
|
try:
from setman.lazy import LazySettings
except ImportError:
# Do not care about "Settings cannot be imported, because environment
# variable DJANGO_SETTINGS_MODULE is undefined." errors
LazySettings = type('LazySettings', (object, ), {})
__all__ = ('get_version', 'settings')
VERSION = (0, 1, 'beta')
settings = LazySettings()
def get_version(version=None):
"""
Return setman version number in human readable form.
You could call this function without args and in this case value from
``setman.VERSION`` would be used.
"""
version = version or VERSION
if len(version) > 2 and version[2] is not None:
if isinstance(version[2], int):
return '%d.%d.%d' % version
return '%d.%d-%s' % version
return '%d.%d' % version[:2]
|
Fix installing ``django-setman`` via PIP.
|
Fix installing ``django-setman`` via PIP.
|
Python
|
bsd-3-clause
|
playpauseandstop/setman,owais/django-setman,owais/django-setman
|
+ try:
- from setman.lazy import LazySettings
+ from setman.lazy import LazySettings
+ except ImportError:
+ # Do not care about "Settings cannot be imported, because environment
+ # variable DJANGO_SETTINGS_MODULE is undefined." errors
+ LazySettings = type('LazySettings', (object, ), {})
__all__ = ('get_version', 'settings')
VERSION = (0, 1, 'beta')
settings = LazySettings()
def get_version(version=None):
"""
Return setman version number in human readable form.
You could call this function without args and in this case value from
``setman.VERSION`` would be used.
"""
version = version or VERSION
if len(version) > 2 and version[2] is not None:
if isinstance(version[2], int):
return '%d.%d.%d' % version
return '%d.%d-%s' % version
return '%d.%d' % version[:2]
|
Fix installing ``django-setman`` via PIP.
|
## Code Before:
from setman.lazy import LazySettings
__all__ = ('get_version', 'settings')
VERSION = (0, 1, 'beta')
settings = LazySettings()
def get_version(version=None):
"""
Return setman version number in human readable form.
You could call this function without args and in this case value from
``setman.VERSION`` would be used.
"""
version = version or VERSION
if len(version) > 2 and version[2] is not None:
if isinstance(version[2], int):
return '%d.%d.%d' % version
return '%d.%d-%s' % version
return '%d.%d' % version[:2]
## Instruction:
Fix installing ``django-setman`` via PIP.
## Code After:
try:
from setman.lazy import LazySettings
except ImportError:
# Do not care about "Settings cannot be imported, because environment
# variable DJANGO_SETTINGS_MODULE is undefined." errors
LazySettings = type('LazySettings', (object, ), {})
__all__ = ('get_version', 'settings')
VERSION = (0, 1, 'beta')
settings = LazySettings()
def get_version(version=None):
"""
Return setman version number in human readable form.
You could call this function without args and in this case value from
``setman.VERSION`` would be used.
"""
version = version or VERSION
if len(version) > 2 and version[2] is not None:
if isinstance(version[2], int):
return '%d.%d.%d' % version
return '%d.%d-%s' % version
return '%d.%d' % version[:2]
|
# ... existing code ...
try:
from setman.lazy import LazySettings
except ImportError:
# Do not care about "Settings cannot be imported, because environment
# variable DJANGO_SETTINGS_MODULE is undefined." errors
LazySettings = type('LazySettings', (object, ), {})
__all__ = ('get_version', 'settings')
# ... rest of the code ...
|
b503a6e893d71b96b3737e567dde16f110db5fc7
|
src/prepare_turk_batch.py
|
src/prepare_turk_batch.py
|
import os
import sys
import csv
import json
import html
def do_command(args):
assert os.path.exists(args.input)
writer = csv.writer(args.output)
writer.writerow(["document"])
for fname in os.listdir(args.input):
if not fname.endswith('.json'): continue
with open(os.path.join(args.input, fname)) as f:
doc = json.load(f)
writer.writerow([html.escape(json.dumps(doc))])
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description='')
parser.add_argument('-i', '--input', type=str, default='../data/pilot', help="Directory with JSON files")
parser.add_argument('-o', '--output', type=argparse.FileType('w'), default=sys.stdout, help="A CSV to use with MTurk")
parser.set_defaults(func=do_command)
#subparsers = parser.add_subparsers()
#command_parser = subparsers.add_parser('command', help='' )
#command_parser.set_defaults(func=do_command)
ARGS = parser.parse_args()
if ARGS.func is None:
parser.print_help()
sys.exit(1)
else:
ARGS.func(ARGS)
|
import os
import sys
import csv
import json
import html
def do_command(args):
assert os.path.exists(args.input)
writer = csv.writer(args.output)
writer.writerow(["document"])
for i, fname in enumerate(os.listdir(args.input)):
if not fname.endswith('.json'): continue
with open(os.path.join(args.input, fname)) as f:
doc = json.load(f)
for j, (prompt, time_range) in enumerate(doc["prompts"]):
doc["id"] = "doc-{}-{}".format(i,j)
doc["prompt"] = prompt
doc["recommendedMinWordCount"] = time_range[0]
doc["recommendedMaxWordCount"] = time_range[1]
writer.writerow([html.escape(json.dumps(doc))])
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description='')
parser.add_argument('-i', '--input', type=str, default='../data/pilot', help="Directory with JSON files")
parser.add_argument('-o', '--output', type=argparse.FileType('w'), default=sys.stdout, help="A CSV to use with MTurk")
parser.set_defaults(func=do_command)
#subparsers = parser.add_subparsers()
#command_parser = subparsers.add_parser('command', help='' )
#command_parser.set_defaults(func=do_command)
ARGS = parser.parse_args()
if ARGS.func is None:
parser.print_help()
sys.exit(1)
else:
ARGS.func(ARGS)
|
Prepare data with the new fiields and prompts
|
Prepare data with the new fiields and prompts
|
Python
|
mit
|
arunchaganty/briefly,arunchaganty/briefly,arunchaganty/briefly,arunchaganty/briefly
|
import os
import sys
import csv
import json
import html
def do_command(args):
assert os.path.exists(args.input)
writer = csv.writer(args.output)
writer.writerow(["document"])
- for fname in os.listdir(args.input):
+ for i, fname in enumerate(os.listdir(args.input)):
if not fname.endswith('.json'): continue
with open(os.path.join(args.input, fname)) as f:
doc = json.load(f)
+ for j, (prompt, time_range) in enumerate(doc["prompts"]):
+ doc["id"] = "doc-{}-{}".format(i,j)
+ doc["prompt"] = prompt
+ doc["recommendedMinWordCount"] = time_range[0]
+ doc["recommendedMaxWordCount"] = time_range[1]
- writer.writerow([html.escape(json.dumps(doc))])
+ writer.writerow([html.escape(json.dumps(doc))])
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description='')
parser.add_argument('-i', '--input', type=str, default='../data/pilot', help="Directory with JSON files")
parser.add_argument('-o', '--output', type=argparse.FileType('w'), default=sys.stdout, help="A CSV to use with MTurk")
parser.set_defaults(func=do_command)
#subparsers = parser.add_subparsers()
#command_parser = subparsers.add_parser('command', help='' )
#command_parser.set_defaults(func=do_command)
ARGS = parser.parse_args()
if ARGS.func is None:
parser.print_help()
sys.exit(1)
else:
ARGS.func(ARGS)
|
Prepare data with the new fiields and prompts
|
## Code Before:
import os
import sys
import csv
import json
import html
def do_command(args):
assert os.path.exists(args.input)
writer = csv.writer(args.output)
writer.writerow(["document"])
for fname in os.listdir(args.input):
if not fname.endswith('.json'): continue
with open(os.path.join(args.input, fname)) as f:
doc = json.load(f)
writer.writerow([html.escape(json.dumps(doc))])
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description='')
parser.add_argument('-i', '--input', type=str, default='../data/pilot', help="Directory with JSON files")
parser.add_argument('-o', '--output', type=argparse.FileType('w'), default=sys.stdout, help="A CSV to use with MTurk")
parser.set_defaults(func=do_command)
#subparsers = parser.add_subparsers()
#command_parser = subparsers.add_parser('command', help='' )
#command_parser.set_defaults(func=do_command)
ARGS = parser.parse_args()
if ARGS.func is None:
parser.print_help()
sys.exit(1)
else:
ARGS.func(ARGS)
## Instruction:
Prepare data with the new fiields and prompts
## Code After:
import os
import sys
import csv
import json
import html
def do_command(args):
assert os.path.exists(args.input)
writer = csv.writer(args.output)
writer.writerow(["document"])
for i, fname in enumerate(os.listdir(args.input)):
if not fname.endswith('.json'): continue
with open(os.path.join(args.input, fname)) as f:
doc = json.load(f)
for j, (prompt, time_range) in enumerate(doc["prompts"]):
doc["id"] = "doc-{}-{}".format(i,j)
doc["prompt"] = prompt
doc["recommendedMinWordCount"] = time_range[0]
doc["recommendedMaxWordCount"] = time_range[1]
writer.writerow([html.escape(json.dumps(doc))])
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description='')
parser.add_argument('-i', '--input', type=str, default='../data/pilot', help="Directory with JSON files")
parser.add_argument('-o', '--output', type=argparse.FileType('w'), default=sys.stdout, help="A CSV to use with MTurk")
parser.set_defaults(func=do_command)
#subparsers = parser.add_subparsers()
#command_parser = subparsers.add_parser('command', help='' )
#command_parser.set_defaults(func=do_command)
ARGS = parser.parse_args()
if ARGS.func is None:
parser.print_help()
sys.exit(1)
else:
ARGS.func(ARGS)
|
// ... existing code ...
writer = csv.writer(args.output)
writer.writerow(["document"])
for i, fname in enumerate(os.listdir(args.input)):
if not fname.endswith('.json'): continue
with open(os.path.join(args.input, fname)) as f:
doc = json.load(f)
for j, (prompt, time_range) in enumerate(doc["prompts"]):
doc["id"] = "doc-{}-{}".format(i,j)
doc["prompt"] = prompt
doc["recommendedMinWordCount"] = time_range[0]
doc["recommendedMaxWordCount"] = time_range[1]
writer.writerow([html.escape(json.dumps(doc))])
if __name__ == "__main__":
import argparse
// ... rest of the code ...
|
f012d59f163a8b8a693dc894d211f077ae015d11
|
Instanssi/kompomaatti/tests.py
|
Instanssi/kompomaatti/tests.py
|
from django.test import TestCase
from Instanssi.kompomaatti.models import Entry
VALID_YOUTUBE_URLS = [
# must handle various protocols in the video URL
"http://www.youtube.com/v/asdf123456",
"https://www.youtube.com/v/asdf123456/",
"//www.youtube.com/v/asdf123456",
"www.youtube.com/v/asdf123456",
# must handle various other ways to define the video
"www.youtube.com/watch?v=asdf123456",
"http://youtu.be/asdf123456",
"http://youtu.be/asdf123456/"
]
class KompomaattiTests(TestCase):
def setUp(self):
pass
def test_youtube_urls(self):
"""Test that various YouTube URLs are parsed properly."""
for url in VALID_YOUTUBE_URLS:
print("Test URL: %s" % url)
self.assertEqual(Entry.youtube_url_to_id(url), "asdf123456")
|
from django.test import TestCase
from Instanssi.kompomaatti.models import Entry
VALID_YOUTUBE_URLS = [
# must handle various protocols and hostnames in the video URL
"http://www.youtube.com/v/asdf123456",
"https://www.youtube.com/v/asdf123456/",
"//www.youtube.com/v/asdf123456",
"www.youtube.com/v/asdf123456",
"youtube.com/v/asdf123456/",
# must handle various other ways to define the video
"www.youtube.com/watch?v=asdf123456",
"http://youtu.be/asdf123456",
"https://youtu.be/asdf123456/"
]
class KompomaattiTests(TestCase):
def setUp(self):
pass
def test_youtube_urls(self):
"""Test YouTube video id extraction from URLs."""
for url in VALID_YOUTUBE_URLS:
self.assertEqual(Entry.youtube_url_to_id(url), "asdf123456",
msg="failing URL: %s" % url)
|
Add more test data; improve feedback on failing case
|
kompomaatti: Add more test data; improve feedback on failing case
|
Python
|
mit
|
Instanssi/Instanssi.org,Instanssi/Instanssi.org,Instanssi/Instanssi.org,Instanssi/Instanssi.org
|
from django.test import TestCase
from Instanssi.kompomaatti.models import Entry
VALID_YOUTUBE_URLS = [
- # must handle various protocols in the video URL
+ # must handle various protocols and hostnames in the video URL
"http://www.youtube.com/v/asdf123456",
"https://www.youtube.com/v/asdf123456/",
"//www.youtube.com/v/asdf123456",
"www.youtube.com/v/asdf123456",
+ "youtube.com/v/asdf123456/",
# must handle various other ways to define the video
"www.youtube.com/watch?v=asdf123456",
"http://youtu.be/asdf123456",
- "http://youtu.be/asdf123456/"
+ "https://youtu.be/asdf123456/"
]
class KompomaattiTests(TestCase):
def setUp(self):
pass
def test_youtube_urls(self):
- """Test that various YouTube URLs are parsed properly."""
+ """Test YouTube video id extraction from URLs."""
for url in VALID_YOUTUBE_URLS:
- print("Test URL: %s" % url)
- self.assertEqual(Entry.youtube_url_to_id(url), "asdf123456")
+ self.assertEqual(Entry.youtube_url_to_id(url), "asdf123456",
+ msg="failing URL: %s" % url)
|
Add more test data; improve feedback on failing case
|
## Code Before:
from django.test import TestCase
from Instanssi.kompomaatti.models import Entry
VALID_YOUTUBE_URLS = [
# must handle various protocols in the video URL
"http://www.youtube.com/v/asdf123456",
"https://www.youtube.com/v/asdf123456/",
"//www.youtube.com/v/asdf123456",
"www.youtube.com/v/asdf123456",
# must handle various other ways to define the video
"www.youtube.com/watch?v=asdf123456",
"http://youtu.be/asdf123456",
"http://youtu.be/asdf123456/"
]
class KompomaattiTests(TestCase):
def setUp(self):
pass
def test_youtube_urls(self):
"""Test that various YouTube URLs are parsed properly."""
for url in VALID_YOUTUBE_URLS:
print("Test URL: %s" % url)
self.assertEqual(Entry.youtube_url_to_id(url), "asdf123456")
## Instruction:
Add more test data; improve feedback on failing case
## Code After:
from django.test import TestCase
from Instanssi.kompomaatti.models import Entry
VALID_YOUTUBE_URLS = [
# must handle various protocols and hostnames in the video URL
"http://www.youtube.com/v/asdf123456",
"https://www.youtube.com/v/asdf123456/",
"//www.youtube.com/v/asdf123456",
"www.youtube.com/v/asdf123456",
"youtube.com/v/asdf123456/",
# must handle various other ways to define the video
"www.youtube.com/watch?v=asdf123456",
"http://youtu.be/asdf123456",
"https://youtu.be/asdf123456/"
]
class KompomaattiTests(TestCase):
def setUp(self):
pass
def test_youtube_urls(self):
"""Test YouTube video id extraction from URLs."""
for url in VALID_YOUTUBE_URLS:
self.assertEqual(Entry.youtube_url_to_id(url), "asdf123456",
msg="failing URL: %s" % url)
|
...
VALID_YOUTUBE_URLS = [
# must handle various protocols and hostnames in the video URL
"http://www.youtube.com/v/asdf123456",
"https://www.youtube.com/v/asdf123456/",
"//www.youtube.com/v/asdf123456",
"www.youtube.com/v/asdf123456",
"youtube.com/v/asdf123456/",
# must handle various other ways to define the video
"www.youtube.com/watch?v=asdf123456",
"http://youtu.be/asdf123456",
"https://youtu.be/asdf123456/"
]
...
pass
def test_youtube_urls(self):
"""Test YouTube video id extraction from URLs."""
for url in VALID_YOUTUBE_URLS:
self.assertEqual(Entry.youtube_url_to_id(url), "asdf123456",
msg="failing URL: %s" % url)
...
|
f4c8f003a4ffdd8e64468d261aa2cd34d58f1b9d
|
src/compdb/__init__.py
|
src/compdb/__init__.py
|
import warnings
from signac import *
msg = "compdb was renamed to signac. Please import signac in the future."
warnings.warn(DeprecationWarning, msg)
|
import warnings
from signac import *
__all__ = ['core', 'contrib', 'db']
msg = "compdb was renamed to signac. Please import signac in the future."
print('Warning!',msg)
warnings.warn(msg, DeprecationWarning)
|
Add surrogate compdb package, linking to signac.
|
Add surrogate compdb package, linking to signac.
Provided to guarantee compatibility.
Prints warning on import.
|
Python
|
bsd-3-clause
|
csadorf/signac,csadorf/signac
|
import warnings
from signac import *
+ __all__ = ['core', 'contrib', 'db']
msg = "compdb was renamed to signac. Please import signac in the future."
+ print('Warning!',msg)
- warnings.warn(DeprecationWarning, msg)
+ warnings.warn(msg, DeprecationWarning)
|
Add surrogate compdb package, linking to signac.
|
## Code Before:
import warnings
from signac import *
msg = "compdb was renamed to signac. Please import signac in the future."
warnings.warn(DeprecationWarning, msg)
## Instruction:
Add surrogate compdb package, linking to signac.
## Code After:
import warnings
from signac import *
__all__ = ['core', 'contrib', 'db']
msg = "compdb was renamed to signac. Please import signac in the future."
print('Warning!',msg)
warnings.warn(msg, DeprecationWarning)
|
...
import warnings
from signac import *
__all__ = ['core', 'contrib', 'db']
msg = "compdb was renamed to signac. Please import signac in the future."
print('Warning!',msg)
warnings.warn(msg, DeprecationWarning)
...
|
ba42df4296a02396e823ee9692fb84eb0deb8b7c
|
corehq/messaging/smsbackends/start_enterprise/views.py
|
corehq/messaging/smsbackends/start_enterprise/views.py
|
from __future__ import absolute_import
from corehq.apps.sms.views import IncomingBackendView
from corehq.messaging.smsbackends.start_enterprise.models import (
StartEnterpriseBackend,
StartEnterpriseDeliveryReceipt,
)
from datetime import datetime
from django.http import HttpResponse, HttpResponseBadRequest
class StartEnterpriseDeliveryReceiptView(IncomingBackendView):
urlname = 'start_enterprise_dlr'
@property
def backend_class(self):
return StartEnterpriseBackend
def get(self, request, api_key, *args, **kwargs):
message_id = request.GET.get('msgid')
if not message_id:
return HttpResponseBadRequest("Missing 'msgid'")
message_id = message_id.strip()
try:
dlr = StartEnterpriseDeliveryReceipt.objects.get(message_id=message_id)
except StartEnterpriseDeliveryReceipt.DoesNotExist:
dlr = None
if dlr:
dlr.received_on = datetime.utcnow()
dlr.info = request.GET.dict()
dlr.save()
# Based on the documentation, a response of "1" acknowledges receipt of the DLR
return HttpResponse("1")
|
from __future__ import absolute_import
import logging
from corehq.apps.sms.views import IncomingBackendView
from corehq.messaging.smsbackends.start_enterprise.models import (
StartEnterpriseBackend,
StartEnterpriseDeliveryReceipt,
)
from datetime import datetime
from django.http import HttpResponse, HttpResponseBadRequest
class StartEnterpriseDeliveryReceiptView(IncomingBackendView):
urlname = 'start_enterprise_dlr'
@property
def backend_class(self):
return StartEnterpriseBackend
def get(self, request, api_key, *args, **kwargs):
logging.info("Received Start Enterprise delivery receipt with items: %s" % request.GET.dict().keys())
message_id = request.GET.get('msgid')
if not message_id:
return HttpResponseBadRequest("Missing 'msgid'")
message_id = message_id.strip()
try:
dlr = StartEnterpriseDeliveryReceipt.objects.get(message_id=message_id)
except StartEnterpriseDeliveryReceipt.DoesNotExist:
dlr = None
if dlr:
dlr.received_on = datetime.utcnow()
dlr.info = request.GET.dict()
dlr.save()
# Based on the documentation, a response of "1" acknowledges receipt of the DLR
return HttpResponse("1")
|
Add logging to delivery receipt view
|
Add logging to delivery receipt view
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
from __future__ import absolute_import
+ import logging
from corehq.apps.sms.views import IncomingBackendView
from corehq.messaging.smsbackends.start_enterprise.models import (
StartEnterpriseBackend,
StartEnterpriseDeliveryReceipt,
)
from datetime import datetime
from django.http import HttpResponse, HttpResponseBadRequest
class StartEnterpriseDeliveryReceiptView(IncomingBackendView):
urlname = 'start_enterprise_dlr'
@property
def backend_class(self):
return StartEnterpriseBackend
def get(self, request, api_key, *args, **kwargs):
+ logging.info("Received Start Enterprise delivery receipt with items: %s" % request.GET.dict().keys())
+
message_id = request.GET.get('msgid')
if not message_id:
return HttpResponseBadRequest("Missing 'msgid'")
message_id = message_id.strip()
try:
dlr = StartEnterpriseDeliveryReceipt.objects.get(message_id=message_id)
except StartEnterpriseDeliveryReceipt.DoesNotExist:
dlr = None
if dlr:
dlr.received_on = datetime.utcnow()
dlr.info = request.GET.dict()
dlr.save()
# Based on the documentation, a response of "1" acknowledges receipt of the DLR
return HttpResponse("1")
|
Add logging to delivery receipt view
|
## Code Before:
from __future__ import absolute_import
from corehq.apps.sms.views import IncomingBackendView
from corehq.messaging.smsbackends.start_enterprise.models import (
StartEnterpriseBackend,
StartEnterpriseDeliveryReceipt,
)
from datetime import datetime
from django.http import HttpResponse, HttpResponseBadRequest
class StartEnterpriseDeliveryReceiptView(IncomingBackendView):
urlname = 'start_enterprise_dlr'
@property
def backend_class(self):
return StartEnterpriseBackend
def get(self, request, api_key, *args, **kwargs):
message_id = request.GET.get('msgid')
if not message_id:
return HttpResponseBadRequest("Missing 'msgid'")
message_id = message_id.strip()
try:
dlr = StartEnterpriseDeliveryReceipt.objects.get(message_id=message_id)
except StartEnterpriseDeliveryReceipt.DoesNotExist:
dlr = None
if dlr:
dlr.received_on = datetime.utcnow()
dlr.info = request.GET.dict()
dlr.save()
# Based on the documentation, a response of "1" acknowledges receipt of the DLR
return HttpResponse("1")
## Instruction:
Add logging to delivery receipt view
## Code After:
from __future__ import absolute_import
import logging
from corehq.apps.sms.views import IncomingBackendView
from corehq.messaging.smsbackends.start_enterprise.models import (
StartEnterpriseBackend,
StartEnterpriseDeliveryReceipt,
)
from datetime import datetime
from django.http import HttpResponse, HttpResponseBadRequest
class StartEnterpriseDeliveryReceiptView(IncomingBackendView):
urlname = 'start_enterprise_dlr'
@property
def backend_class(self):
return StartEnterpriseBackend
def get(self, request, api_key, *args, **kwargs):
logging.info("Received Start Enterprise delivery receipt with items: %s" % request.GET.dict().keys())
message_id = request.GET.get('msgid')
if not message_id:
return HttpResponseBadRequest("Missing 'msgid'")
message_id = message_id.strip()
try:
dlr = StartEnterpriseDeliveryReceipt.objects.get(message_id=message_id)
except StartEnterpriseDeliveryReceipt.DoesNotExist:
dlr = None
if dlr:
dlr.received_on = datetime.utcnow()
dlr.info = request.GET.dict()
dlr.save()
# Based on the documentation, a response of "1" acknowledges receipt of the DLR
return HttpResponse("1")
|
...
from __future__ import absolute_import
import logging
from corehq.apps.sms.views import IncomingBackendView
from corehq.messaging.smsbackends.start_enterprise.models import (
StartEnterpriseBackend,
...
return StartEnterpriseBackend
def get(self, request, api_key, *args, **kwargs):
logging.info("Received Start Enterprise delivery receipt with items: %s" % request.GET.dict().keys())
message_id = request.GET.get('msgid')
if not message_id:
...
|
7654d9dcebb0ad1e862e376b5b694234173289ed
|
twitter_helper/util.py
|
twitter_helper/util.py
|
import random
def random_line(afile, max_chars = 123, min_chars = 5):
line = next(afile)
for num, aline in enumerate(afile):
aline = aline.strip()
if (len(aline) < min_chars or aline[0].islower() or len(aline) > max_chars) or random.randrange(num + 2):
continue
line = aline
return line
def prepare_quote(text_file, signature=" -- Hamlet", max_chars = 123, min_chars = 5,):
line = random_line(text_file, max_chars, min_chars)
number = random.randrange(1,1000,2)
line = "{0}] " + line + signature
line = line.format(number)
return line
|
import random
def random_line(afile, max_chars = 123, min_chars = 5):
line = next(afile)
for num, aline in enumerate(afile):
aline = aline.strip()
if (len(aline) < min_chars or aline[0].islower() or len(aline) > max_chars) or random.randrange(num + 2):
continue
line = aline
#Be polite, put things back in the place you found them
afile.seek(0)
return line
def prepare_quote(text_file, signature=" -- Hamlet", max_chars = 123, min_chars = 5,):
line = random_line(text_file, max_chars, min_chars)
number = random.randrange(1,1000,2)
line = "{0}] " + line + signature
line = line.format(number)
return line
|
Reset pointer to the beginning of file once read it
|
Reset pointer to the beginning of file once read it
Be polite, put things back in the place you found them
|
Python
|
mit
|
kuzeko/Twitter-Importer,kuzeko/Twitter-Importer
|
import random
def random_line(afile, max_chars = 123, min_chars = 5):
line = next(afile)
for num, aline in enumerate(afile):
aline = aline.strip()
if (len(aline) < min_chars or aline[0].islower() or len(aline) > max_chars) or random.randrange(num + 2):
continue
line = aline
+ #Be polite, put things back in the place you found them
+ afile.seek(0)
return line
def prepare_quote(text_file, signature=" -- Hamlet", max_chars = 123, min_chars = 5,):
line = random_line(text_file, max_chars, min_chars)
number = random.randrange(1,1000,2)
line = "{0}] " + line + signature
line = line.format(number)
return line
|
Reset pointer to the beginning of file once read it
|
## Code Before:
import random
def random_line(afile, max_chars = 123, min_chars = 5):
line = next(afile)
for num, aline in enumerate(afile):
aline = aline.strip()
if (len(aline) < min_chars or aline[0].islower() or len(aline) > max_chars) or random.randrange(num + 2):
continue
line = aline
return line
def prepare_quote(text_file, signature=" -- Hamlet", max_chars = 123, min_chars = 5,):
line = random_line(text_file, max_chars, min_chars)
number = random.randrange(1,1000,2)
line = "{0}] " + line + signature
line = line.format(number)
return line
## Instruction:
Reset pointer to the beginning of file once read it
## Code After:
import random
def random_line(afile, max_chars = 123, min_chars = 5):
line = next(afile)
for num, aline in enumerate(afile):
aline = aline.strip()
if (len(aline) < min_chars or aline[0].islower() or len(aline) > max_chars) or random.randrange(num + 2):
continue
line = aline
#Be polite, put things back in the place you found them
afile.seek(0)
return line
def prepare_quote(text_file, signature=" -- Hamlet", max_chars = 123, min_chars = 5,):
line = random_line(text_file, max_chars, min_chars)
number = random.randrange(1,1000,2)
line = "{0}] " + line + signature
line = line.format(number)
return line
|
// ... existing code ...
if (len(aline) < min_chars or aline[0].islower() or len(aline) > max_chars) or random.randrange(num + 2):
continue
line = aline
#Be polite, put things back in the place you found them
afile.seek(0)
return line
def prepare_quote(text_file, signature=" -- Hamlet", max_chars = 123, min_chars = 5,):
// ... rest of the code ...
|
2459239188b4a6f9e46363ef84fc9dc252793774
|
trie_search/record_trie.py
|
trie_search/record_trie.py
|
from marisa_trie import RecordTrie
from .trie import TrieSearch
class RecordTrieSearch(RecordTrie, TrieSearch):
def __init__(self, record_format, records=None, filepath=None):
super(RecordTrieSearch, self).__init__(record_format, records)
if filepath:
self.load(filepath)
def search_all_patterns(self, text, splitter=u' ', min_weight=0.0):
for pattern, start_idx in super(
RecordTrie, self).search_all_patterns(text, splitter):
weight = self[pattern][0][0]
if weight < min_weight:
continue
yield pattern, start_idx, weight
def search_longest_patterns(self, text, splitter=u' ', min_weight=0.0):
all_patterns = self.search_all_patterns(text, splitter, min_weight)
check_field = [0] * len(text)
for pattern, start_idx, weight in sorted(
all_patterns,
key=lambda x: len(x[0].split(splitter)),
reverse=True):
target_field = check_field[start_idx:start_idx + len(pattern)]
check_sum = sum(target_field)
if check_sum != len(target_field):
for i in range(len(pattern)):
check_field[start_idx + i] = 1
yield pattern, start_idx, weight
|
from marisa_trie import RecordTrie
from .trie import TrieSearch
class RecordTrieSearch(RecordTrie, TrieSearch):
def __init__(self, record_format, records=None, filepath=None):
super(RecordTrieSearch, self).__init__(record_format, records)
if filepath:
self.load(filepath)
def search_all_patterns(self, text, splitter=u' ', min_weight=0.0):
for pattern, start_idx in super(
RecordTrie, self).search_all_patterns(text, splitter):
weight = self[pattern][0][0]
if weight < min_weight:
continue
yield pattern, start_idx, weight
def search_longest_patterns(self, text, splitter=u' ', min_weight=0.0):
all_patterns = self.search_all_patterns(text, splitter, min_weight)
check_field = [0] * len(text)
for pattern, start_idx, weight in sorted(
all_patterns, key=lambda x: len(x[0]), reverse=True):
target_field = check_field[start_idx:start_idx + len(pattern)]
check_sum = sum(target_field)
if check_sum != len(target_field):
for i in range(len(pattern)):
check_field[start_idx + i] = 1
yield pattern, start_idx, weight
|
Modify the condition for selection of longest patterns
|
Modify the condition for selection of longest patterns
|
Python
|
mit
|
nkmrtty/trie-search
|
from marisa_trie import RecordTrie
from .trie import TrieSearch
class RecordTrieSearch(RecordTrie, TrieSearch):
def __init__(self, record_format, records=None, filepath=None):
super(RecordTrieSearch, self).__init__(record_format, records)
if filepath:
self.load(filepath)
def search_all_patterns(self, text, splitter=u' ', min_weight=0.0):
for pattern, start_idx in super(
RecordTrie, self).search_all_patterns(text, splitter):
weight = self[pattern][0][0]
if weight < min_weight:
continue
yield pattern, start_idx, weight
def search_longest_patterns(self, text, splitter=u' ', min_weight=0.0):
all_patterns = self.search_all_patterns(text, splitter, min_weight)
check_field = [0] * len(text)
for pattern, start_idx, weight in sorted(
+ all_patterns, key=lambda x: len(x[0]), reverse=True):
- all_patterns,
- key=lambda x: len(x[0].split(splitter)),
- reverse=True):
target_field = check_field[start_idx:start_idx + len(pattern)]
check_sum = sum(target_field)
if check_sum != len(target_field):
for i in range(len(pattern)):
check_field[start_idx + i] = 1
yield pattern, start_idx, weight
|
Modify the condition for selection of longest patterns
|
## Code Before:
from marisa_trie import RecordTrie
from .trie import TrieSearch
class RecordTrieSearch(RecordTrie, TrieSearch):
def __init__(self, record_format, records=None, filepath=None):
super(RecordTrieSearch, self).__init__(record_format, records)
if filepath:
self.load(filepath)
def search_all_patterns(self, text, splitter=u' ', min_weight=0.0):
for pattern, start_idx in super(
RecordTrie, self).search_all_patterns(text, splitter):
weight = self[pattern][0][0]
if weight < min_weight:
continue
yield pattern, start_idx, weight
def search_longest_patterns(self, text, splitter=u' ', min_weight=0.0):
all_patterns = self.search_all_patterns(text, splitter, min_weight)
check_field = [0] * len(text)
for pattern, start_idx, weight in sorted(
all_patterns,
key=lambda x: len(x[0].split(splitter)),
reverse=True):
target_field = check_field[start_idx:start_idx + len(pattern)]
check_sum = sum(target_field)
if check_sum != len(target_field):
for i in range(len(pattern)):
check_field[start_idx + i] = 1
yield pattern, start_idx, weight
## Instruction:
Modify the condition for selection of longest patterns
## Code After:
from marisa_trie import RecordTrie
from .trie import TrieSearch
class RecordTrieSearch(RecordTrie, TrieSearch):
def __init__(self, record_format, records=None, filepath=None):
super(RecordTrieSearch, self).__init__(record_format, records)
if filepath:
self.load(filepath)
def search_all_patterns(self, text, splitter=u' ', min_weight=0.0):
for pattern, start_idx in super(
RecordTrie, self).search_all_patterns(text, splitter):
weight = self[pattern][0][0]
if weight < min_weight:
continue
yield pattern, start_idx, weight
def search_longest_patterns(self, text, splitter=u' ', min_weight=0.0):
all_patterns = self.search_all_patterns(text, splitter, min_weight)
check_field = [0] * len(text)
for pattern, start_idx, weight in sorted(
all_patterns, key=lambda x: len(x[0]), reverse=True):
target_field = check_field[start_idx:start_idx + len(pattern)]
check_sum = sum(target_field)
if check_sum != len(target_field):
for i in range(len(pattern)):
check_field[start_idx + i] = 1
yield pattern, start_idx, weight
|
# ... existing code ...
all_patterns = self.search_all_patterns(text, splitter, min_weight)
check_field = [0] * len(text)
for pattern, start_idx, weight in sorted(
all_patterns, key=lambda x: len(x[0]), reverse=True):
target_field = check_field[start_idx:start_idx + len(pattern)]
check_sum = sum(target_field)
if check_sum != len(target_field):
# ... rest of the code ...
|
969344a4ed822eafcfbf7bd9d666ca45bf38168f
|
mass_mailing_partner/wizard/partner_merge.py
|
mass_mailing_partner/wizard/partner_merge.py
|
from odoo import models
class BasePartnerMergeAutomaticWizard(models.TransientModel):
_inherit = "base.partner.merge.automatic.wizard"
def _merge(self, partner_ids, dst_partner=None, extra_checks=True):
if dst_partner:
contacts = self.env["mailing.contact"].search(
[("partner_id", "in", partner_ids)]
)
if contacts:
contacts = contacts.sorted(
lambda x: 1 if x.partner_id == dst_partner else 0
)
list_ids = contacts.mapped("list_ids").ids
contacts[1:].unlink()
contacts[0].partner_id = dst_partner
contacts[0].list_ids = [(4, x) for x in list_ids]
return super()._merge(
partner_ids, dst_partner=dst_partner, extra_checks=extra_checks
)
|
from odoo import models
class BasePartnerMergeAutomaticWizard(models.TransientModel):
_inherit = "base.partner.merge.automatic.wizard"
def _merge(self, partner_ids, dst_partner=None, extra_checks=True):
if dst_partner:
contacts = (
self.env["mailing.contact"]
.sudo()
.search([("partner_id", "in", partner_ids)])
)
if contacts:
contacts = contacts.sorted(
lambda x: 1 if x.partner_id == dst_partner else 0
)
list_ids = contacts.mapped("list_ids").ids
contacts[1:].unlink()
contacts[0].partner_id = dst_partner
contacts[0].list_ids = [(4, x) for x in list_ids]
return super()._merge(
partner_ids, dst_partner=dst_partner, extra_checks=extra_checks
)
|
Add sudo() to prevent user without mailing access try to merge contacts
|
[FIX] mass_mailing_partner: Add sudo() to prevent user without mailing access try to merge contacts
|
Python
|
agpl-3.0
|
OCA/social,OCA/social,OCA/social
|
from odoo import models
class BasePartnerMergeAutomaticWizard(models.TransientModel):
_inherit = "base.partner.merge.automatic.wizard"
def _merge(self, partner_ids, dst_partner=None, extra_checks=True):
if dst_partner:
+ contacts = (
- contacts = self.env["mailing.contact"].search(
+ self.env["mailing.contact"]
+ .sudo()
- [("partner_id", "in", partner_ids)]
+ .search([("partner_id", "in", partner_ids)])
)
if contacts:
contacts = contacts.sorted(
lambda x: 1 if x.partner_id == dst_partner else 0
)
list_ids = contacts.mapped("list_ids").ids
contacts[1:].unlink()
contacts[0].partner_id = dst_partner
contacts[0].list_ids = [(4, x) for x in list_ids]
return super()._merge(
partner_ids, dst_partner=dst_partner, extra_checks=extra_checks
)
|
Add sudo() to prevent user without mailing access try to merge contacts
|
## Code Before:
from odoo import models
class BasePartnerMergeAutomaticWizard(models.TransientModel):
_inherit = "base.partner.merge.automatic.wizard"
def _merge(self, partner_ids, dst_partner=None, extra_checks=True):
if dst_partner:
contacts = self.env["mailing.contact"].search(
[("partner_id", "in", partner_ids)]
)
if contacts:
contacts = contacts.sorted(
lambda x: 1 if x.partner_id == dst_partner else 0
)
list_ids = contacts.mapped("list_ids").ids
contacts[1:].unlink()
contacts[0].partner_id = dst_partner
contacts[0].list_ids = [(4, x) for x in list_ids]
return super()._merge(
partner_ids, dst_partner=dst_partner, extra_checks=extra_checks
)
## Instruction:
Add sudo() to prevent user without mailing access try to merge contacts
## Code After:
from odoo import models
class BasePartnerMergeAutomaticWizard(models.TransientModel):
_inherit = "base.partner.merge.automatic.wizard"
def _merge(self, partner_ids, dst_partner=None, extra_checks=True):
if dst_partner:
contacts = (
self.env["mailing.contact"]
.sudo()
.search([("partner_id", "in", partner_ids)])
)
if contacts:
contacts = contacts.sorted(
lambda x: 1 if x.partner_id == dst_partner else 0
)
list_ids = contacts.mapped("list_ids").ids
contacts[1:].unlink()
contacts[0].partner_id = dst_partner
contacts[0].list_ids = [(4, x) for x in list_ids]
return super()._merge(
partner_ids, dst_partner=dst_partner, extra_checks=extra_checks
)
|
...
def _merge(self, partner_ids, dst_partner=None, extra_checks=True):
if dst_partner:
contacts = (
self.env["mailing.contact"]
.sudo()
.search([("partner_id", "in", partner_ids)])
)
if contacts:
contacts = contacts.sorted(
...
|
fae501041857f1e4eea2b5157feb94a3f3c84f18
|
pinax/__init__.py
|
pinax/__init__.py
|
VERSION = (0, 9, 0, "a", 1) # following PEP 386
def get_version():
version = "%s.%s" % (VERSION[0], VERSION[1])
if VERSION[2]:
version = "%s.%s" % (version, VERSION[2])
if VERSION[3] != "f":
version = "%s%s%s" % (version, VERSION[3], VERSION[4])
return version
__version__ = get_version()
|
import os
VERSION = (0, 9, 0, "a", 1) # following PEP 386
def get_version():
version = "%s.%s" % (VERSION[0], VERSION[1])
if VERSION[2]:
version = "%s.%s" % (version, VERSION[2])
if VERSION[3] != "f":
version = "%s%s%s" % (version, VERSION[3], VERSION[4])
dev = os.environ.get("PINAX_DEV_N")
if dev:
version = "%s.dev%s" % (version, dev)
return version
__version__ = get_version()
|
Support development versions using an environment variable
|
Support development versions using an environment variable
|
Python
|
mit
|
amarandon/pinax,amarandon/pinax,amarandon/pinax,amarandon/pinax
|
+ import os
+
+
VERSION = (0, 9, 0, "a", 1) # following PEP 386
def get_version():
version = "%s.%s" % (VERSION[0], VERSION[1])
if VERSION[2]:
version = "%s.%s" % (version, VERSION[2])
if VERSION[3] != "f":
version = "%s%s%s" % (version, VERSION[3], VERSION[4])
+ dev = os.environ.get("PINAX_DEV_N")
+ if dev:
+ version = "%s.dev%s" % (version, dev)
return version
__version__ = get_version()
|
Support development versions using an environment variable
|
## Code Before:
VERSION = (0, 9, 0, "a", 1) # following PEP 386
def get_version():
version = "%s.%s" % (VERSION[0], VERSION[1])
if VERSION[2]:
version = "%s.%s" % (version, VERSION[2])
if VERSION[3] != "f":
version = "%s%s%s" % (version, VERSION[3], VERSION[4])
return version
__version__ = get_version()
## Instruction:
Support development versions using an environment variable
## Code After:
import os
VERSION = (0, 9, 0, "a", 1) # following PEP 386
def get_version():
version = "%s.%s" % (VERSION[0], VERSION[1])
if VERSION[2]:
version = "%s.%s" % (version, VERSION[2])
if VERSION[3] != "f":
version = "%s%s%s" % (version, VERSION[3], VERSION[4])
dev = os.environ.get("PINAX_DEV_N")
if dev:
version = "%s.dev%s" % (version, dev)
return version
__version__ = get_version()
|
// ... existing code ...
import os
VERSION = (0, 9, 0, "a", 1) # following PEP 386
// ... modified code ...
version = "%s.%s" % (version, VERSION[2])
if VERSION[3] != "f":
version = "%s%s%s" % (version, VERSION[3], VERSION[4])
dev = os.environ.get("PINAX_DEV_N")
if dev:
version = "%s.dev%s" % (version, dev)
return version
// ... rest of the code ...
|
ef011470ad361ca50b638461935d344392976821
|
pywwt/misc.py
|
pywwt/misc.py
|
from bs4 import BeautifulSoup
class WWTException(Exception):
pass
def handle_response(resp_str):
soup = BeautifulSoup(resp_str)
success = soup.layerapi.status.string
if success != "Success":
raise WWTException(success)
def parse_kwargs(params, kwargs):
if "date_time" in kwargs:
params["datetime"] = kwargs["date_time"]
if "time_rate" in kwargs:
params["timerate"] = str(kwargs["time_rate"])
if "fly_to" in kwargs:
params["flyto"] = ",".join([str(i) for i in kwargs["fly_to"]])
if "instant" in kwargs:
params["instant"] = str(kwargs["instant"])
if "auto_loop" in kwargs:
params["autoloop"] = str(kwargs["auto_loop"])
|
from bs4 import BeautifulSoup
class WWTException(Exception):
pass
def handle_response(resp_str):
soup = BeautifulSoup(resp_str)
try:
success = soup.layerapi.status.string
if success != "Success":
raise WWTException(success)
except AttributeError:
error = soup.html.body.h2.string
raise WWTException(error)
def parse_kwargs(params, kwargs):
if "date_time" in kwargs:
params["datetime"] = kwargs["date_time"]
if "time_rate" in kwargs:
params["timerate"] = str(kwargs["time_rate"])
if "fly_to" in kwargs:
params["flyto"] = ",".join([str(i) for i in kwargs["fly_to"]])
if "instant" in kwargs:
params["instant"] = str(kwargs["instant"])
if "auto_loop" in kwargs:
params["autoloop"] = str(kwargs["auto_loop"])
|
Handle other kinds of errors.
|
Handle other kinds of errors.
|
Python
|
bsd-3-clause
|
jzuhone/pywwt,vga101/pywwt,vga101/pywwt,jzuhone/pywwt
|
from bs4 import BeautifulSoup
class WWTException(Exception):
pass
def handle_response(resp_str):
soup = BeautifulSoup(resp_str)
+ try:
- success = soup.layerapi.status.string
+ success = soup.layerapi.status.string
- if success != "Success":
+ if success != "Success":
- raise WWTException(success)
+ raise WWTException(success)
+ except AttributeError:
+ error = soup.html.body.h2.string
+ raise WWTException(error)
def parse_kwargs(params, kwargs):
if "date_time" in kwargs:
params["datetime"] = kwargs["date_time"]
if "time_rate" in kwargs:
params["timerate"] = str(kwargs["time_rate"])
if "fly_to" in kwargs:
params["flyto"] = ",".join([str(i) for i in kwargs["fly_to"]])
if "instant" in kwargs:
params["instant"] = str(kwargs["instant"])
if "auto_loop" in kwargs:
params["autoloop"] = str(kwargs["auto_loop"])
|
Handle other kinds of errors.
|
## Code Before:
from bs4 import BeautifulSoup
class WWTException(Exception):
pass
def handle_response(resp_str):
soup = BeautifulSoup(resp_str)
success = soup.layerapi.status.string
if success != "Success":
raise WWTException(success)
def parse_kwargs(params, kwargs):
if "date_time" in kwargs:
params["datetime"] = kwargs["date_time"]
if "time_rate" in kwargs:
params["timerate"] = str(kwargs["time_rate"])
if "fly_to" in kwargs:
params["flyto"] = ",".join([str(i) for i in kwargs["fly_to"]])
if "instant" in kwargs:
params["instant"] = str(kwargs["instant"])
if "auto_loop" in kwargs:
params["autoloop"] = str(kwargs["auto_loop"])
## Instruction:
Handle other kinds of errors.
## Code After:
from bs4 import BeautifulSoup
class WWTException(Exception):
pass
def handle_response(resp_str):
soup = BeautifulSoup(resp_str)
try:
success = soup.layerapi.status.string
if success != "Success":
raise WWTException(success)
except AttributeError:
error = soup.html.body.h2.string
raise WWTException(error)
def parse_kwargs(params, kwargs):
if "date_time" in kwargs:
params["datetime"] = kwargs["date_time"]
if "time_rate" in kwargs:
params["timerate"] = str(kwargs["time_rate"])
if "fly_to" in kwargs:
params["flyto"] = ",".join([str(i) for i in kwargs["fly_to"]])
if "instant" in kwargs:
params["instant"] = str(kwargs["instant"])
if "auto_loop" in kwargs:
params["autoloop"] = str(kwargs["auto_loop"])
|
# ... existing code ...
def handle_response(resp_str):
soup = BeautifulSoup(resp_str)
try:
success = soup.layerapi.status.string
if success != "Success":
raise WWTException(success)
except AttributeError:
error = soup.html.body.h2.string
raise WWTException(error)
def parse_kwargs(params, kwargs):
if "date_time" in kwargs:
# ... rest of the code ...
|
ef003a3ebf14545927d055a0deda7e1982e90e53
|
scripts/capnp_test_pycapnp.py
|
scripts/capnp_test_pycapnp.py
|
import capnp
import os
capnp.add_import_hook([os.getcwd(), "/usr/local/include/"]) # change this to be auto-detected?
import test_capnp
import sys
def decode(name):
print getattr(test_capnp, name)._short_str()
def encode(name):
val = getattr(test_capnp, name)
class_name = name[0].upper() + name[1:]
message = getattr(test_capnp, class_name).from_dict(val.to_dict())
print message.to_bytes()
if sys.argv[1] == 'decode':
decode(sys.argv[2])
else:
encode(sys.argv[2])
|
from __future__ import print_function
import capnp
import os
capnp.add_import_hook([os.getcwd(), "/usr/local/include/"]) # change this to be auto-detected?
import test_capnp
import sys
def decode(name):
class_name = name[0].upper() + name[1:]
print(getattr(test_capnp, class_name).from_bytes(sys.stdin.read())._short_str())
def encode(name):
val = getattr(test_capnp, name)
class_name = name[0].upper() + name[1:]
message = getattr(test_capnp, class_name).from_dict(val.to_dict())
print(message.to_bytes())
if sys.argv[1] == 'decode':
decode(sys.argv[2])
else:
encode(sys.argv[2])
|
Fix decode test to actually decode message from stdin
|
Fix decode test to actually decode message from stdin
|
Python
|
bsd-2-clause
|
tempbottle/pycapnp,tempbottle/pycapnp,SymbiFlow/pycapnp,jparyani/pycapnp,SymbiFlow/pycapnp,SymbiFlow/pycapnp,rcrowder/pycapnp,jparyani/pycapnp,jparyani/pycapnp,rcrowder/pycapnp,SymbiFlow/pycapnp,jparyani/pycapnp,tempbottle/pycapnp,rcrowder/pycapnp,rcrowder/pycapnp,tempbottle/pycapnp
|
+ from __future__ import print_function
import capnp
import os
capnp.add_import_hook([os.getcwd(), "/usr/local/include/"]) # change this to be auto-detected?
import test_capnp
import sys
def decode(name):
- print getattr(test_capnp, name)._short_str()
+ class_name = name[0].upper() + name[1:]
+ print(getattr(test_capnp, class_name).from_bytes(sys.stdin.read())._short_str())
def encode(name):
val = getattr(test_capnp, name)
class_name = name[0].upper() + name[1:]
message = getattr(test_capnp, class_name).from_dict(val.to_dict())
- print message.to_bytes()
+ print(message.to_bytes())
if sys.argv[1] == 'decode':
decode(sys.argv[2])
else:
encode(sys.argv[2])
|
Fix decode test to actually decode message from stdin
|
## Code Before:
import capnp
import os
capnp.add_import_hook([os.getcwd(), "/usr/local/include/"]) # change this to be auto-detected?
import test_capnp
import sys
def decode(name):
print getattr(test_capnp, name)._short_str()
def encode(name):
val = getattr(test_capnp, name)
class_name = name[0].upper() + name[1:]
message = getattr(test_capnp, class_name).from_dict(val.to_dict())
print message.to_bytes()
if sys.argv[1] == 'decode':
decode(sys.argv[2])
else:
encode(sys.argv[2])
## Instruction:
Fix decode test to actually decode message from stdin
## Code After:
from __future__ import print_function
import capnp
import os
capnp.add_import_hook([os.getcwd(), "/usr/local/include/"]) # change this to be auto-detected?
import test_capnp
import sys
def decode(name):
class_name = name[0].upper() + name[1:]
print(getattr(test_capnp, class_name).from_bytes(sys.stdin.read())._short_str())
def encode(name):
val = getattr(test_capnp, name)
class_name = name[0].upper() + name[1:]
message = getattr(test_capnp, class_name).from_dict(val.to_dict())
print(message.to_bytes())
if sys.argv[1] == 'decode':
decode(sys.argv[2])
else:
encode(sys.argv[2])
|
# ... existing code ...
from __future__ import print_function
import capnp
import os
capnp.add_import_hook([os.getcwd(), "/usr/local/include/"]) # change this to be auto-detected?
# ... modified code ...
import sys
def decode(name):
class_name = name[0].upper() + name[1:]
print(getattr(test_capnp, class_name).from_bytes(sys.stdin.read())._short_str())
def encode(name):
val = getattr(test_capnp, name)
class_name = name[0].upper() + name[1:]
message = getattr(test_capnp, class_name).from_dict(val.to_dict())
print(message.to_bytes())
if sys.argv[1] == 'decode':
decode(sys.argv[2])
# ... rest of the code ...
|
23072e882edb6da55cb12ef0591a786235249670
|
ome/__main__.py
|
ome/__main__.py
|
import sys
from .command import command_args
from .error import OmeError
from .terminal import stderr
def main():
stderr.reset()
try:
from . import compiler
target = compiler.get_target(command_args.target)
build_options = compiler.BuildOptions(target)
backend = compiler.get_backend(target, command_args.backend)
if command_args.verbose:
print('ome: using target {}'.format(target.name))
print('ome: using backend {} {}'.format(backend.name, backend.version))
for filename in command_args.filename:
if command_args.verbose:
print('ome: compiling {}'.format(filename))
if command_args.print_code:
print(compiler.compile_file(filename, target).decode(target.encoding))
else:
build_options.make_executable(filename, backend)
except OmeError as error:
error.write_ansi(stderr)
stderr.reset()
sys.exit(1)
if __name__ == '__main__':
if sys.version_info[0] < 3:
sys.exit('ome: error: please use python 3.x')
main()
|
import sys
from .command import command_args
from .error import OmeError
from .terminal import stderr
def print_verbose(*args, **kwargs):
if command_args.verbose:
print(*args, **kwargs)
def main():
stderr.reset()
try:
from . import compiler
target = compiler.get_target(command_args.target)
build_options = compiler.BuildOptions(target)
backend = compiler.get_backend(target, command_args.backend)
print_verbose('ome: using target {}'.format(target.name))
print_verbose('ome: using backend {} {}'.format(backend.name, backend.version))
for filename in command_args.filename:
print_verbose('ome: compiling {}'.format(filename))
if command_args.print_code:
print(compiler.compile_file(filename, target).decode(target.encoding))
else:
build_options.make_executable(filename, backend)
except OmeError as error:
error.write_ansi(stderr)
stderr.reset()
sys.exit(1)
if __name__ == '__main__':
if sys.version_info[0] < 3:
sys.exit('ome: error: please use python 3.x')
main()
|
Use print_verbose for conditional printing.
|
Use print_verbose for conditional printing.
|
Python
|
mit
|
shaurz/ome,shaurz/ome
|
import sys
from .command import command_args
from .error import OmeError
from .terminal import stderr
+
+ def print_verbose(*args, **kwargs):
+ if command_args.verbose:
+ print(*args, **kwargs)
def main():
stderr.reset()
try:
from . import compiler
target = compiler.get_target(command_args.target)
build_options = compiler.BuildOptions(target)
backend = compiler.get_backend(target, command_args.backend)
- if command_args.verbose:
- print('ome: using target {}'.format(target.name))
+ print_verbose('ome: using target {}'.format(target.name))
- print('ome: using backend {} {}'.format(backend.name, backend.version))
+ print_verbose('ome: using backend {} {}'.format(backend.name, backend.version))
for filename in command_args.filename:
- if command_args.verbose:
- print('ome: compiling {}'.format(filename))
+ print_verbose('ome: compiling {}'.format(filename))
if command_args.print_code:
print(compiler.compile_file(filename, target).decode(target.encoding))
else:
build_options.make_executable(filename, backend)
except OmeError as error:
error.write_ansi(stderr)
stderr.reset()
sys.exit(1)
if __name__ == '__main__':
if sys.version_info[0] < 3:
sys.exit('ome: error: please use python 3.x')
main()
|
Use print_verbose for conditional printing.
|
## Code Before:
import sys
from .command import command_args
from .error import OmeError
from .terminal import stderr
def main():
stderr.reset()
try:
from . import compiler
target = compiler.get_target(command_args.target)
build_options = compiler.BuildOptions(target)
backend = compiler.get_backend(target, command_args.backend)
if command_args.verbose:
print('ome: using target {}'.format(target.name))
print('ome: using backend {} {}'.format(backend.name, backend.version))
for filename in command_args.filename:
if command_args.verbose:
print('ome: compiling {}'.format(filename))
if command_args.print_code:
print(compiler.compile_file(filename, target).decode(target.encoding))
else:
build_options.make_executable(filename, backend)
except OmeError as error:
error.write_ansi(stderr)
stderr.reset()
sys.exit(1)
if __name__ == '__main__':
if sys.version_info[0] < 3:
sys.exit('ome: error: please use python 3.x')
main()
## Instruction:
Use print_verbose for conditional printing.
## Code After:
import sys
from .command import command_args
from .error import OmeError
from .terminal import stderr
def print_verbose(*args, **kwargs):
if command_args.verbose:
print(*args, **kwargs)
def main():
stderr.reset()
try:
from . import compiler
target = compiler.get_target(command_args.target)
build_options = compiler.BuildOptions(target)
backend = compiler.get_backend(target, command_args.backend)
print_verbose('ome: using target {}'.format(target.name))
print_verbose('ome: using backend {} {}'.format(backend.name, backend.version))
for filename in command_args.filename:
print_verbose('ome: compiling {}'.format(filename))
if command_args.print_code:
print(compiler.compile_file(filename, target).decode(target.encoding))
else:
build_options.make_executable(filename, backend)
except OmeError as error:
error.write_ansi(stderr)
stderr.reset()
sys.exit(1)
if __name__ == '__main__':
if sys.version_info[0] < 3:
sys.exit('ome: error: please use python 3.x')
main()
|
...
from .command import command_args
from .error import OmeError
from .terminal import stderr
def print_verbose(*args, **kwargs):
if command_args.verbose:
print(*args, **kwargs)
def main():
stderr.reset()
...
target = compiler.get_target(command_args.target)
build_options = compiler.BuildOptions(target)
backend = compiler.get_backend(target, command_args.backend)
print_verbose('ome: using target {}'.format(target.name))
print_verbose('ome: using backend {} {}'.format(backend.name, backend.version))
for filename in command_args.filename:
print_verbose('ome: compiling {}'.format(filename))
if command_args.print_code:
print(compiler.compile_file(filename, target).decode(target.encoding))
else:
...
|
d8f6938649acd4a72a53d47c26a1b16adb0e8fe3
|
jupyterlab_gitsync/jupyterlab_gitsync/__init__.py
|
jupyterlab_gitsync/jupyterlab_gitsync/__init__.py
|
from notebook.utils import url_path_join
from jupyterlab_gitsync.handlers import *
from jupyterlab_gitsync.version import VERSION
__version__ = VERSION
def _jupyter_server_extension_paths():
return [{'module': 'jupyterlab_gitsync'}]
def load_jupyter_server_extension(nb_server_app):
"""
Called when the extension is loaded.
Args:
nb_server_app (NotebookWebApplication): handle to the Notebook webserver instance.
"""
host_pattern = '.*$'
app = nb_server_app.web_app
gcp_v1_endpoint = url_path_join(
app.settings['base_url'], 'jupyterlab_gitsync', 'v1')
app.add_handlers(host_pattern, [
(url_path_join(gcp_v1_endpoint, 'sync') + '(.*)', SyncHandler),
(url_path_join(gcp_v1_endpoint, 'setup') + '(.*)', SetupHandler)
])
|
from notebook.utils import url_path_join
from jupyterlab_gitsync.handlers import *
from jupyterlab_gitsync.version import VERSION
__version__ = VERSION
def _jupyter_server_extension_paths():
return [{'module': 'jupyterlab_gitsync'}]
def load_jupyter_server_extension(nb_server_app):
"""
Called when the extension is loaded.
Args:
nb_server_app (NotebookWebApplication): handle to the Notebook webserver instance.
"""
host_pattern = '.*$'
app = nb_server_app.web_app
gcp_v1_endpoint = url_path_join(
app.settings['base_url'], 'jupyterlab_gitsync', 'v1')
app.add_handlers(host_pattern, [
(url_path_join(gcp_v1_endpoint, 'sync') + '(.*)', SyncHandler),
(url_path_join(gcp_v1_endpoint, 'setup') + '(.*)', SetupHandler)
])
|
Fix indentation to pass tests
|
Fix indentation to pass tests
|
Python
|
apache-2.0
|
GoogleCloudPlatform/jupyter-extensions,GoogleCloudPlatform/jupyter-extensions,GoogleCloudPlatform/jupyter-extensions,GoogleCloudPlatform/jupyter-extensions
|
from notebook.utils import url_path_join
from jupyterlab_gitsync.handlers import *
from jupyterlab_gitsync.version import VERSION
__version__ = VERSION
def _jupyter_server_extension_paths():
return [{'module': 'jupyterlab_gitsync'}]
def load_jupyter_server_extension(nb_server_app):
"""
- Called when the extension is loaded.
+ Called when the extension is loaded.
- Args:
+ Args:
- nb_server_app (NotebookWebApplication): handle to the Notebook webserver instance.
+ nb_server_app (NotebookWebApplication): handle to the Notebook webserver instance.
- """
+ """
- host_pattern = '.*$'
+ host_pattern = '.*$'
- app = nb_server_app.web_app
+ app = nb_server_app.web_app
- gcp_v1_endpoint = url_path_join(
+ gcp_v1_endpoint = url_path_join(
- app.settings['base_url'], 'jupyterlab_gitsync', 'v1')
+ app.settings['base_url'], 'jupyterlab_gitsync', 'v1')
- app.add_handlers(host_pattern, [
+ app.add_handlers(host_pattern, [
- (url_path_join(gcp_v1_endpoint, 'sync') + '(.*)', SyncHandler),
+ (url_path_join(gcp_v1_endpoint, 'sync') + '(.*)', SyncHandler),
- (url_path_join(gcp_v1_endpoint, 'setup') + '(.*)', SetupHandler)
+ (url_path_join(gcp_v1_endpoint, 'setup') + '(.*)', SetupHandler)
- ])
+ ])
|
Fix indentation to pass tests
|
## Code Before:
from notebook.utils import url_path_join
from jupyterlab_gitsync.handlers import *
from jupyterlab_gitsync.version import VERSION
__version__ = VERSION
def _jupyter_server_extension_paths():
return [{'module': 'jupyterlab_gitsync'}]
def load_jupyter_server_extension(nb_server_app):
"""
Called when the extension is loaded.
Args:
nb_server_app (NotebookWebApplication): handle to the Notebook webserver instance.
"""
host_pattern = '.*$'
app = nb_server_app.web_app
gcp_v1_endpoint = url_path_join(
app.settings['base_url'], 'jupyterlab_gitsync', 'v1')
app.add_handlers(host_pattern, [
(url_path_join(gcp_v1_endpoint, 'sync') + '(.*)', SyncHandler),
(url_path_join(gcp_v1_endpoint, 'setup') + '(.*)', SetupHandler)
])
## Instruction:
Fix indentation to pass tests
## Code After:
from notebook.utils import url_path_join
from jupyterlab_gitsync.handlers import *
from jupyterlab_gitsync.version import VERSION
__version__ = VERSION
def _jupyter_server_extension_paths():
return [{'module': 'jupyterlab_gitsync'}]
def load_jupyter_server_extension(nb_server_app):
"""
Called when the extension is loaded.
Args:
nb_server_app (NotebookWebApplication): handle to the Notebook webserver instance.
"""
host_pattern = '.*$'
app = nb_server_app.web_app
gcp_v1_endpoint = url_path_join(
app.settings['base_url'], 'jupyterlab_gitsync', 'v1')
app.add_handlers(host_pattern, [
(url_path_join(gcp_v1_endpoint, 'sync') + '(.*)', SyncHandler),
(url_path_join(gcp_v1_endpoint, 'setup') + '(.*)', SetupHandler)
])
|
# ... existing code ...
def load_jupyter_server_extension(nb_server_app):
"""
Called when the extension is loaded.
Args:
nb_server_app (NotebookWebApplication): handle to the Notebook webserver instance.
"""
host_pattern = '.*$'
app = nb_server_app.web_app
gcp_v1_endpoint = url_path_join(
app.settings['base_url'], 'jupyterlab_gitsync', 'v1')
app.add_handlers(host_pattern, [
(url_path_join(gcp_v1_endpoint, 'sync') + '(.*)', SyncHandler),
(url_path_join(gcp_v1_endpoint, 'setup') + '(.*)', SetupHandler)
])
# ... rest of the code ...
|
2c7dc769874766b230bc11c7ec6f67d3c1157005
|
duplicatefiledir/__init__.py
|
duplicatefiledir/__init__.py
|
from fman import DirectoryPaneCommand, show_alert
import distutils
from distutils import dir_util, file_util
import os.path
class DuplicateFileDir(DirectoryPaneCommand):
def __call__(self):
selected_files = self.pane.get_selected_files()
if len(selected_files) >= 1 or (len(selected_files) == 0 and self.get_chosen_files()):
if len(selected_files) == 0 and self.get_chosen_files():
selected_files.append(self.get_chosen_files()[0])
#
# Loop through each file/directory selected.
#
for filedir in selected_files:
if os.path.isdir(filedir):
#
# It is a directory. Process as a directory.
#
newDir = filedir + "-copy"
distutils.dir_util.copy_tree(filedir,newDir)
else:
#
# It is a file. Process as a file.
#
dirPath, ofilenmc = os.path.split(filedir)
ofilenm, ext = os.path.splitext(ofilenmc)
nfilenm = os.path.join(dirPath,ofilenm + "-copy" + ext)
distutils.file_util.copy_file(filedir,nfilenm)
|
from fman import DirectoryPaneCommand, show_alert
from urllib.parse import urlparse
import os.path
from shutil import copytree, copyfile
class DuplicateFileDir(DirectoryPaneCommand):
def __call__(self):
selected_files = self.pane.get_selected_files()
if len(selected_files) >= 1 or (len(selected_files) == 0 and self.get_chosen_files()):
if len(selected_files) == 0 and self.get_chosen_files():
selected_files.append(self.get_chosen_files()[0])
#
# Loop through each file/directory selected.
#
for filedir in selected_files:
p = urlparse(filedir)
filepath = os.path.abspath(os.path.join(p.netloc, p.path))
if os.path.isdir(filepath):
#
# It is a directory. Process as a directory.
#
newDir = filepath + "-copy"
copytree(filepath, newDir)
else:
if os.path.isfile(filepath):
#
# It is a file. Process as a file.
#
dirPath, ofilenmc = os.path.split(filepath)
ofilenm, ext = os.path.splitext(ofilenmc)
nfilenm = os.path.join(dirPath,ofilenm + "-copy" + ext)
copyfile(filepath, nfilenm)
else:
show_alert('Bad file path : {0}'.format(filepath))
|
Make it work with last fman version (0.7) on linux
|
Make it work with last fman version (0.7) on linux
|
Python
|
mit
|
raguay/DuplicateFileDir
|
from fman import DirectoryPaneCommand, show_alert
+ from urllib.parse import urlparse
- import distutils
- from distutils import dir_util, file_util
import os.path
+ from shutil import copytree, copyfile
class DuplicateFileDir(DirectoryPaneCommand):
def __call__(self):
selected_files = self.pane.get_selected_files()
if len(selected_files) >= 1 or (len(selected_files) == 0 and self.get_chosen_files()):
if len(selected_files) == 0 and self.get_chosen_files():
selected_files.append(self.get_chosen_files()[0])
#
# Loop through each file/directory selected.
#
for filedir in selected_files:
+ p = urlparse(filedir)
+ filepath = os.path.abspath(os.path.join(p.netloc, p.path))
- if os.path.isdir(filedir):
+ if os.path.isdir(filepath):
#
# It is a directory. Process as a directory.
#
- newDir = filedir + "-copy"
+ newDir = filepath + "-copy"
- distutils.dir_util.copy_tree(filedir,newDir)
+ copytree(filepath, newDir)
else:
+ if os.path.isfile(filepath):
- #
+ #
- # It is a file. Process as a file.
+ # It is a file. Process as a file.
- #
+ #
- dirPath, ofilenmc = os.path.split(filedir)
+ dirPath, ofilenmc = os.path.split(filepath)
- ofilenm, ext = os.path.splitext(ofilenmc)
+ ofilenm, ext = os.path.splitext(ofilenmc)
- nfilenm = os.path.join(dirPath,ofilenm + "-copy" + ext)
+ nfilenm = os.path.join(dirPath,ofilenm + "-copy" + ext)
- distutils.file_util.copy_file(filedir,nfilenm)
+ copyfile(filepath, nfilenm)
+ else:
+ show_alert('Bad file path : {0}'.format(filepath))
|
Make it work with last fman version (0.7) on linux
|
## Code Before:
from fman import DirectoryPaneCommand, show_alert
import distutils
from distutils import dir_util, file_util
import os.path
class DuplicateFileDir(DirectoryPaneCommand):
def __call__(self):
selected_files = self.pane.get_selected_files()
if len(selected_files) >= 1 or (len(selected_files) == 0 and self.get_chosen_files()):
if len(selected_files) == 0 and self.get_chosen_files():
selected_files.append(self.get_chosen_files()[0])
#
# Loop through each file/directory selected.
#
for filedir in selected_files:
if os.path.isdir(filedir):
#
# It is a directory. Process as a directory.
#
newDir = filedir + "-copy"
distutils.dir_util.copy_tree(filedir,newDir)
else:
#
# It is a file. Process as a file.
#
dirPath, ofilenmc = os.path.split(filedir)
ofilenm, ext = os.path.splitext(ofilenmc)
nfilenm = os.path.join(dirPath,ofilenm + "-copy" + ext)
distutils.file_util.copy_file(filedir,nfilenm)
## Instruction:
Make it work with last fman version (0.7) on linux
## Code After:
from fman import DirectoryPaneCommand, show_alert
from urllib.parse import urlparse
import os.path
from shutil import copytree, copyfile
class DuplicateFileDir(DirectoryPaneCommand):
def __call__(self):
selected_files = self.pane.get_selected_files()
if len(selected_files) >= 1 or (len(selected_files) == 0 and self.get_chosen_files()):
if len(selected_files) == 0 and self.get_chosen_files():
selected_files.append(self.get_chosen_files()[0])
#
# Loop through each file/directory selected.
#
for filedir in selected_files:
p = urlparse(filedir)
filepath = os.path.abspath(os.path.join(p.netloc, p.path))
if os.path.isdir(filepath):
#
# It is a directory. Process as a directory.
#
newDir = filepath + "-copy"
copytree(filepath, newDir)
else:
if os.path.isfile(filepath):
#
# It is a file. Process as a file.
#
dirPath, ofilenmc = os.path.split(filepath)
ofilenm, ext = os.path.splitext(ofilenmc)
nfilenm = os.path.join(dirPath,ofilenm + "-copy" + ext)
copyfile(filepath, nfilenm)
else:
show_alert('Bad file path : {0}'.format(filepath))
|
...
from fman import DirectoryPaneCommand, show_alert
from urllib.parse import urlparse
import os.path
from shutil import copytree, copyfile
class DuplicateFileDir(DirectoryPaneCommand):
def __call__(self):
...
# Loop through each file/directory selected.
#
for filedir in selected_files:
p = urlparse(filedir)
filepath = os.path.abspath(os.path.join(p.netloc, p.path))
if os.path.isdir(filepath):
#
# It is a directory. Process as a directory.
#
newDir = filepath + "-copy"
copytree(filepath, newDir)
else:
if os.path.isfile(filepath):
#
# It is a file. Process as a file.
#
dirPath, ofilenmc = os.path.split(filepath)
ofilenm, ext = os.path.splitext(ofilenmc)
nfilenm = os.path.join(dirPath,ofilenm + "-copy" + ext)
copyfile(filepath, nfilenm)
else:
show_alert('Bad file path : {0}'.format(filepath))
...
|
ddec6067054cc4408ac174e3ea4ffeca2a962201
|
regulations/views/notice_home.py
|
regulations/views/notice_home.py
|
from __future__ import unicode_literals
from operator import itemgetter
import logging
from django.http import Http404
from django.template.response import TemplateResponse
from django.views.generic.base import View
from regulations.generator.api_reader import ApiReader
from regulations.views.preamble import (
notice_data, CommentState)
logger = logging.getLogger(__name__)
class NoticeHomeView(View):
"""
Basic view that provides a list of regulations and notices to the context.
"""
template_name = None # We should probably have a default notice template.
def get(self, request, *args, **kwargs):
notices = ApiReader().notices().get("results", [])
context = {}
notices_meta = []
for notice in notices:
try:
if notice.get("document_number"):
_, meta, _ = notice_data(notice["document_number"])
notices_meta.append(meta)
except Http404:
pass
notices_meta = sorted(notices_meta, key=itemgetter("publication_date"),
reverse=True)
context["notices"] = notices_meta
# Django templates won't show contents of CommentState as an Enum, so:
context["comment_state"] = {state.name: state.value for state in
CommentState}
assert self.template_name
template = self.template_name
return TemplateResponse(request=request, template=template,
context=context)
|
from __future__ import unicode_literals
from operator import itemgetter
import logging
from django.http import Http404
from django.template.response import TemplateResponse
from django.views.generic.base import View
from regulations.generator.api_reader import ApiReader
from regulations.views.preamble import (
notice_data, CommentState)
logger = logging.getLogger(__name__)
class NoticeHomeView(View):
"""
Basic view that provides a list of regulations and notices to the context.
"""
template_name = None # We should probably have a default notice template.
def get(self, request, *args, **kwargs):
notices = ApiReader().notices().get("results", [])
context = {}
notices_meta = []
for notice in notices:
try:
if notice.get("document_number"):
_, meta, _ = notice_data(notice["document_number"])
notices_meta.append(meta)
except Http404:
pass
notices_meta = sorted(notices_meta, key=itemgetter("publication_date"),
reverse=True)
context["notices"] = notices_meta
# Django templates won't show contents of CommentState as an Enum, so:
context["comment_state"] = {state.name: state.value for state in
CommentState}
template = self.template_name
return TemplateResponse(request=request, template=template,
context=context)
|
Remove unnecessary assert from view for Notice home.
|
Remove unnecessary assert from view for Notice home.
|
Python
|
cc0-1.0
|
18F/regulations-site,18F/regulations-site,eregs/regulations-site,tadhg-ohiggins/regulations-site,tadhg-ohiggins/regulations-site,tadhg-ohiggins/regulations-site,eregs/regulations-site,eregs/regulations-site,eregs/regulations-site,tadhg-ohiggins/regulations-site,18F/regulations-site,18F/regulations-site
|
from __future__ import unicode_literals
from operator import itemgetter
import logging
from django.http import Http404
from django.template.response import TemplateResponse
from django.views.generic.base import View
from regulations.generator.api_reader import ApiReader
from regulations.views.preamble import (
notice_data, CommentState)
logger = logging.getLogger(__name__)
class NoticeHomeView(View):
"""
Basic view that provides a list of regulations and notices to the context.
"""
template_name = None # We should probably have a default notice template.
def get(self, request, *args, **kwargs):
notices = ApiReader().notices().get("results", [])
context = {}
notices_meta = []
for notice in notices:
try:
if notice.get("document_number"):
_, meta, _ = notice_data(notice["document_number"])
notices_meta.append(meta)
except Http404:
pass
notices_meta = sorted(notices_meta, key=itemgetter("publication_date"),
reverse=True)
context["notices"] = notices_meta
# Django templates won't show contents of CommentState as an Enum, so:
context["comment_state"] = {state.name: state.value for state in
CommentState}
- assert self.template_name
template = self.template_name
return TemplateResponse(request=request, template=template,
context=context)
|
Remove unnecessary assert from view for Notice home.
|
## Code Before:
from __future__ import unicode_literals
from operator import itemgetter
import logging
from django.http import Http404
from django.template.response import TemplateResponse
from django.views.generic.base import View
from regulations.generator.api_reader import ApiReader
from regulations.views.preamble import (
notice_data, CommentState)
logger = logging.getLogger(__name__)
class NoticeHomeView(View):
"""
Basic view that provides a list of regulations and notices to the context.
"""
template_name = None # We should probably have a default notice template.
def get(self, request, *args, **kwargs):
notices = ApiReader().notices().get("results", [])
context = {}
notices_meta = []
for notice in notices:
try:
if notice.get("document_number"):
_, meta, _ = notice_data(notice["document_number"])
notices_meta.append(meta)
except Http404:
pass
notices_meta = sorted(notices_meta, key=itemgetter("publication_date"),
reverse=True)
context["notices"] = notices_meta
# Django templates won't show contents of CommentState as an Enum, so:
context["comment_state"] = {state.name: state.value for state in
CommentState}
assert self.template_name
template = self.template_name
return TemplateResponse(request=request, template=template,
context=context)
## Instruction:
Remove unnecessary assert from view for Notice home.
## Code After:
from __future__ import unicode_literals
from operator import itemgetter
import logging
from django.http import Http404
from django.template.response import TemplateResponse
from django.views.generic.base import View
from regulations.generator.api_reader import ApiReader
from regulations.views.preamble import (
notice_data, CommentState)
logger = logging.getLogger(__name__)
class NoticeHomeView(View):
"""
Basic view that provides a list of regulations and notices to the context.
"""
template_name = None # We should probably have a default notice template.
def get(self, request, *args, **kwargs):
notices = ApiReader().notices().get("results", [])
context = {}
notices_meta = []
for notice in notices:
try:
if notice.get("document_number"):
_, meta, _ = notice_data(notice["document_number"])
notices_meta.append(meta)
except Http404:
pass
notices_meta = sorted(notices_meta, key=itemgetter("publication_date"),
reverse=True)
context["notices"] = notices_meta
# Django templates won't show contents of CommentState as an Enum, so:
context["comment_state"] = {state.name: state.value for state in
CommentState}
template = self.template_name
return TemplateResponse(request=request, template=template,
context=context)
|
...
context["comment_state"] = {state.name: state.value for state in
CommentState}
template = self.template_name
return TemplateResponse(request=request, template=template,
context=context)
...
|
67b243915ef95ff1b9337bc67053d18df372e79d
|
unitypack/enums.py
|
unitypack/enums.py
|
from enum import IntEnum
class RuntimePlatform(IntEnum):
OSXEditor = 0
OSXPlayer = 1
WindowsPlayer = 2
OSXWebPlayer = 3
OSXDashboardPlayer = 4
WindowsWebPlayer = 5
WindowsEditor = 7
IPhonePlayer = 8
PS3 = 9
XBOX360 = 10
Android = 11
NaCl = 12
LinuxPlayer = 13
FlashPlayer = 15
WebGLPlayer = 17
MetroPlayerX86 = 18
WSAPlayerX86 = 18
MetroPlayerX64 = 19
WSAPlayerX64 = 19
MetroPlayerARM = 20
WSAPlayerARM = 20
WP8Player = 21
BB10Player = 22
BlackBerryPlayer = 22
TizenPlayer = 23
PSP2 = 24
PS4 = 25
PSM = 26
XboxOne = 27
|
from enum import IntEnum
class RuntimePlatform(IntEnum):
OSXEditor = 0
OSXPlayer = 1
WindowsPlayer = 2
OSXWebPlayer = 3
OSXDashboardPlayer = 4
WindowsWebPlayer = 5
WindowsEditor = 7
IPhonePlayer = 8
PS3 = 9
XBOX360 = 10
Android = 11
NaCl = 12
LinuxPlayer = 13
FlashPlayer = 15
WebGLPlayer = 17
MetroPlayerX86 = 18
WSAPlayerX86 = 18
MetroPlayerX64 = 19
WSAPlayerX64 = 19
MetroPlayerARM = 20
WSAPlayerARM = 20
WP8Player = 21
BB10Player = 22
BlackBerryPlayer = 22
TizenPlayer = 23
PSP2 = 24
PS4 = 25
PSM = 26
PSMPlayer = 26
XboxOne = 27
SamsungTVPlayer = 28
|
Add PSMPlayer and SamsungTVPlayer platforms
|
Add PSMPlayer and SamsungTVPlayer platforms
|
Python
|
mit
|
andburn/python-unitypack
|
from enum import IntEnum
class RuntimePlatform(IntEnum):
OSXEditor = 0
OSXPlayer = 1
WindowsPlayer = 2
OSXWebPlayer = 3
OSXDashboardPlayer = 4
WindowsWebPlayer = 5
WindowsEditor = 7
IPhonePlayer = 8
PS3 = 9
XBOX360 = 10
Android = 11
NaCl = 12
LinuxPlayer = 13
FlashPlayer = 15
WebGLPlayer = 17
MetroPlayerX86 = 18
WSAPlayerX86 = 18
MetroPlayerX64 = 19
WSAPlayerX64 = 19
MetroPlayerARM = 20
WSAPlayerARM = 20
WP8Player = 21
BB10Player = 22
BlackBerryPlayer = 22
TizenPlayer = 23
PSP2 = 24
PS4 = 25
PSM = 26
+ PSMPlayer = 26
XboxOne = 27
+ SamsungTVPlayer = 28
|
Add PSMPlayer and SamsungTVPlayer platforms
|
## Code Before:
from enum import IntEnum
class RuntimePlatform(IntEnum):
OSXEditor = 0
OSXPlayer = 1
WindowsPlayer = 2
OSXWebPlayer = 3
OSXDashboardPlayer = 4
WindowsWebPlayer = 5
WindowsEditor = 7
IPhonePlayer = 8
PS3 = 9
XBOX360 = 10
Android = 11
NaCl = 12
LinuxPlayer = 13
FlashPlayer = 15
WebGLPlayer = 17
MetroPlayerX86 = 18
WSAPlayerX86 = 18
MetroPlayerX64 = 19
WSAPlayerX64 = 19
MetroPlayerARM = 20
WSAPlayerARM = 20
WP8Player = 21
BB10Player = 22
BlackBerryPlayer = 22
TizenPlayer = 23
PSP2 = 24
PS4 = 25
PSM = 26
XboxOne = 27
## Instruction:
Add PSMPlayer and SamsungTVPlayer platforms
## Code After:
from enum import IntEnum
class RuntimePlatform(IntEnum):
OSXEditor = 0
OSXPlayer = 1
WindowsPlayer = 2
OSXWebPlayer = 3
OSXDashboardPlayer = 4
WindowsWebPlayer = 5
WindowsEditor = 7
IPhonePlayer = 8
PS3 = 9
XBOX360 = 10
Android = 11
NaCl = 12
LinuxPlayer = 13
FlashPlayer = 15
WebGLPlayer = 17
MetroPlayerX86 = 18
WSAPlayerX86 = 18
MetroPlayerX64 = 19
WSAPlayerX64 = 19
MetroPlayerARM = 20
WSAPlayerARM = 20
WP8Player = 21
BB10Player = 22
BlackBerryPlayer = 22
TizenPlayer = 23
PSP2 = 24
PS4 = 25
PSM = 26
PSMPlayer = 26
XboxOne = 27
SamsungTVPlayer = 28
|
// ... existing code ...
PSP2 = 24
PS4 = 25
PSM = 26
PSMPlayer = 26
XboxOne = 27
SamsungTVPlayer = 28
// ... rest of the code ...
|
6b4b51a7f8e89e023c933f99aaa3a8329c05e750
|
salt/runners/ssh.py
|
salt/runners/ssh.py
|
'''
A Runner module interface on top of the salt-ssh Python API
This allows for programmatic use from salt-api, the Reactor, Orchestrate, etc.
'''
import salt.client.ssh.client
def cmd(
tgt,
fun,
arg=(),
timeout=None,
expr_form='glob',
kwarg=None):
'''
Execute a single command via the salt-ssh subsystem and return all
routines at once
.. versionaddedd:: 2015.2
A wrapper around the :py:meth:`SSHClient.cmd
<salt.client.ssh.client.SSHClient.cmd>` method.
'''
client = salt.client.ssh.client.SSHClient(mopts=__opts__)
return client.cmd(
tgt,
fun,
arg,
timeout,
expr_form,
kwarg)
|
'''
A Runner module interface on top of the salt-ssh Python API.
This allows for programmatic use from salt-api, the Reactor, Orchestrate, etc.
'''
# Import Python Libs
from __future__ import absolute_import
# Import Salt Libs
import salt.client.ssh.client
def cmd(
tgt,
fun,
arg=(),
timeout=None,
expr_form='glob',
kwarg=None):
'''
Execute a single command via the salt-ssh subsystem and return all
routines at once
.. versionaddedd:: 2015.2
A wrapper around the :py:meth:`SSHClient.cmd
<salt.client.ssh.client.SSHClient.cmd>` method.
'''
client = salt.client.ssh.client.SSHClient(mopts=__opts__)
return client.cmd(
tgt,
fun,
arg,
timeout,
expr_form,
kwarg)
|
Fix pylint errors that snuck into 2015.2
|
Fix pylint errors that snuck into 2015.2
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
'''
- A Runner module interface on top of the salt-ssh Python API
+ A Runner module interface on top of the salt-ssh Python API.
This allows for programmatic use from salt-api, the Reactor, Orchestrate, etc.
'''
+
+ # Import Python Libs
+ from __future__ import absolute_import
+
+ # Import Salt Libs
import salt.client.ssh.client
+
def cmd(
tgt,
fun,
arg=(),
timeout=None,
expr_form='glob',
kwarg=None):
'''
Execute a single command via the salt-ssh subsystem and return all
routines at once
.. versionaddedd:: 2015.2
A wrapper around the :py:meth:`SSHClient.cmd
<salt.client.ssh.client.SSHClient.cmd>` method.
'''
client = salt.client.ssh.client.SSHClient(mopts=__opts__)
return client.cmd(
tgt,
fun,
arg,
timeout,
expr_form,
kwarg)
|
Fix pylint errors that snuck into 2015.2
|
## Code Before:
'''
A Runner module interface on top of the salt-ssh Python API
This allows for programmatic use from salt-api, the Reactor, Orchestrate, etc.
'''
import salt.client.ssh.client
def cmd(
tgt,
fun,
arg=(),
timeout=None,
expr_form='glob',
kwarg=None):
'''
Execute a single command via the salt-ssh subsystem and return all
routines at once
.. versionaddedd:: 2015.2
A wrapper around the :py:meth:`SSHClient.cmd
<salt.client.ssh.client.SSHClient.cmd>` method.
'''
client = salt.client.ssh.client.SSHClient(mopts=__opts__)
return client.cmd(
tgt,
fun,
arg,
timeout,
expr_form,
kwarg)
## Instruction:
Fix pylint errors that snuck into 2015.2
## Code After:
'''
A Runner module interface on top of the salt-ssh Python API.
This allows for programmatic use from salt-api, the Reactor, Orchestrate, etc.
'''
# Import Python Libs
from __future__ import absolute_import
# Import Salt Libs
import salt.client.ssh.client
def cmd(
tgt,
fun,
arg=(),
timeout=None,
expr_form='glob',
kwarg=None):
'''
Execute a single command via the salt-ssh subsystem and return all
routines at once
.. versionaddedd:: 2015.2
A wrapper around the :py:meth:`SSHClient.cmd
<salt.client.ssh.client.SSHClient.cmd>` method.
'''
client = salt.client.ssh.client.SSHClient(mopts=__opts__)
return client.cmd(
tgt,
fun,
arg,
timeout,
expr_form,
kwarg)
|
// ... existing code ...
'''
A Runner module interface on top of the salt-ssh Python API.
This allows for programmatic use from salt-api, the Reactor, Orchestrate, etc.
'''
# Import Python Libs
from __future__ import absolute_import
# Import Salt Libs
import salt.client.ssh.client
def cmd(
tgt,
// ... rest of the code ...
|
b8770a85e11c048fb0dc6c46f799b17add07568d
|
productController.py
|
productController.py
|
from endpoints import Controller, CorsMixin
import sqlite3
from datetime import datetime
conn = sqlite3.connect('CIUK.db')
cur = conn.cursor()
class Default(Controller, CorsMixin):
def GET(self):
return "CIUK"
def POST(self, **kwargs):
return '{}, {}, {}'.format(kwargs['title'], kwargs['desc'], kwargs['price'])
class Products(Controller, CorsMixin):
def GET(self):
cur.execute("select * from products")
return cur.fetchall()
class Product(Controller, CorsMixin):
def GET(self, id):
cur.execute("select * from products where id=?", (id,))
return cur.fetchone()
def POST(self, **kwargs):
row =[kwargs['title'], kwargs['desc'], kwargs['price'], datetime.now(), datetime.now()]
cur.execute("insert into products values (null, ?, ?, ?, ?, ?);", (row))
conn.commit()
return "New product added!"
def PUT(self, id, **kwargs):
row =[kwargs['title'], kwargs['desc'], kwargs['price'], datetime.now(), id]
cur.execute("update products set title=?, description=?, price=?, created_at=? where id=?", (row))
conn.commit()
return "Product updated!"
def DELETE(self, id):
cur.execute("delete from products where id=?", (id,))
conn.commit()
return "Product deleted!"
|
from endpoints import Controller, CorsMixin
import sqlite3
from datetime import datetime
conn = sqlite3.connect('databaseForTest.db')
cur = conn.cursor()
class Default(Controller, CorsMixin):
def GET(self):
return "CIUK"
def POST(self, **kwargs):
return '{}, {}, {}'.format(kwargs['title'], kwargs['desc'], kwargs['price'])
class Products(Controller, CorsMixin):
def GET(self):
cur.execute("select * from products")
return cur.fetchall()
class Product(Controller, CorsMixin):
def GET(self, id):
cur.execute("select * from products where id=?", (id,))
return cur.fetchone()
def POST(self, **kwargs):
row =[kwargs['title'], kwargs['desc'], kwargs['price'], datetime.now(), datetime.now()]
cur.execute("insert into products values (null, ?, ?, ?, ?, ?);", (row))
conn.commit()
return "New product added!"
def PUT(self, id, **kwargs):
row =[kwargs['title'], kwargs['desc'], kwargs['price'], datetime.now(), id]
cur.execute("update products set title=?, description=?, price=?, created_at=? where id=?", (row))
conn.commit()
return "Product updated!"
def DELETE(self, id):
cur.execute("delete from products where id=?", (id,))
conn.commit()
return "Product deleted!"
|
Change name of database for test
|
Change name of database for test
|
Python
|
mit
|
joykuotw/python-endpoints,joykuotw/python-endpoints,joykuotw/python-endpoints
|
from endpoints import Controller, CorsMixin
import sqlite3
from datetime import datetime
- conn = sqlite3.connect('CIUK.db')
+ conn = sqlite3.connect('databaseForTest.db')
cur = conn.cursor()
class Default(Controller, CorsMixin):
def GET(self):
return "CIUK"
def POST(self, **kwargs):
return '{}, {}, {}'.format(kwargs['title'], kwargs['desc'], kwargs['price'])
class Products(Controller, CorsMixin):
def GET(self):
cur.execute("select * from products")
return cur.fetchall()
class Product(Controller, CorsMixin):
def GET(self, id):
cur.execute("select * from products where id=?", (id,))
return cur.fetchone()
def POST(self, **kwargs):
row =[kwargs['title'], kwargs['desc'], kwargs['price'], datetime.now(), datetime.now()]
cur.execute("insert into products values (null, ?, ?, ?, ?, ?);", (row))
conn.commit()
return "New product added!"
def PUT(self, id, **kwargs):
row =[kwargs['title'], kwargs['desc'], kwargs['price'], datetime.now(), id]
cur.execute("update products set title=?, description=?, price=?, created_at=? where id=?", (row))
conn.commit()
return "Product updated!"
def DELETE(self, id):
cur.execute("delete from products where id=?", (id,))
conn.commit()
return "Product deleted!"
|
Change name of database for test
|
## Code Before:
from endpoints import Controller, CorsMixin
import sqlite3
from datetime import datetime
conn = sqlite3.connect('CIUK.db')
cur = conn.cursor()
class Default(Controller, CorsMixin):
def GET(self):
return "CIUK"
def POST(self, **kwargs):
return '{}, {}, {}'.format(kwargs['title'], kwargs['desc'], kwargs['price'])
class Products(Controller, CorsMixin):
def GET(self):
cur.execute("select * from products")
return cur.fetchall()
class Product(Controller, CorsMixin):
def GET(self, id):
cur.execute("select * from products where id=?", (id,))
return cur.fetchone()
def POST(self, **kwargs):
row =[kwargs['title'], kwargs['desc'], kwargs['price'], datetime.now(), datetime.now()]
cur.execute("insert into products values (null, ?, ?, ?, ?, ?);", (row))
conn.commit()
return "New product added!"
def PUT(self, id, **kwargs):
row =[kwargs['title'], kwargs['desc'], kwargs['price'], datetime.now(), id]
cur.execute("update products set title=?, description=?, price=?, created_at=? where id=?", (row))
conn.commit()
return "Product updated!"
def DELETE(self, id):
cur.execute("delete from products where id=?", (id,))
conn.commit()
return "Product deleted!"
## Instruction:
Change name of database for test
## Code After:
from endpoints import Controller, CorsMixin
import sqlite3
from datetime import datetime
conn = sqlite3.connect('databaseForTest.db')
cur = conn.cursor()
class Default(Controller, CorsMixin):
def GET(self):
return "CIUK"
def POST(self, **kwargs):
return '{}, {}, {}'.format(kwargs['title'], kwargs['desc'], kwargs['price'])
class Products(Controller, CorsMixin):
def GET(self):
cur.execute("select * from products")
return cur.fetchall()
class Product(Controller, CorsMixin):
def GET(self, id):
cur.execute("select * from products where id=?", (id,))
return cur.fetchone()
def POST(self, **kwargs):
row =[kwargs['title'], kwargs['desc'], kwargs['price'], datetime.now(), datetime.now()]
cur.execute("insert into products values (null, ?, ?, ?, ?, ?);", (row))
conn.commit()
return "New product added!"
def PUT(self, id, **kwargs):
row =[kwargs['title'], kwargs['desc'], kwargs['price'], datetime.now(), id]
cur.execute("update products set title=?, description=?, price=?, created_at=? where id=?", (row))
conn.commit()
return "Product updated!"
def DELETE(self, id):
cur.execute("delete from products where id=?", (id,))
conn.commit()
return "Product deleted!"
|
# ... existing code ...
import sqlite3
from datetime import datetime
conn = sqlite3.connect('databaseForTest.db')
cur = conn.cursor()
class Default(Controller, CorsMixin):
# ... rest of the code ...
|
b1bf5dfa91f1f7b84512f72d6e5e18c2109f3239
|
addic7ed/__init__.py
|
addic7ed/__init__.py
|
from termcolor import colored
from .parser import Addic7edParser
from .file_crawler import FileCrawler
from .logger import init_logger
from .config import Config
def addic7ed():
try:
init_logger()
Config.load()
main()
except (EOFError, KeyboardInterrupt, SystemExit):
print(colored("\nBye!", "yellow"))
exit(0)
def main():
crawler = FileCrawler()
parser = Addic7edParser()
for filename, ep in crawler.episodes.items():
subs = parser.parse(**ep.infos)
print(ep)
if not subs:
print(colored("No subtitles for %s" % filename, "red"), end="\n\n")
continue
for i, sub in enumerate(subs):
print("[%s] %s" % (colored(i, "yellow"), sub))
if Config.dry_run:
print()
continue
else:
version = input('Download number? ')
if not version:
print(colored("Nothing to do!", "yellow"),
end="\n\n")
continue
try:
if Config.rename != "sub":
filename = subs[int(version)].download()
if filename and Config.rename == "video":
print(ep.rename(filename), end="\n\n")
else:
filename = subs[int(version)].download("%s.srt" %
ep.filename)
print(colored("Downloaded %s subtitle file" %
filename, "green"))
except Exception as e:
print(colored(e, "red"),
end="\n\n")
|
from termcolor import colored
from .parser import Addic7edParser
from .file_crawler import FileCrawler
from .logger import init_logger
from .config import Config
def addic7ed():
try:
init_logger()
Config.load()
main()
except (EOFError, KeyboardInterrupt, SystemExit):
print(colored("\nBye!", "yellow"))
exit(0)
def main():
crawler = FileCrawler()
parser = Addic7edParser()
for filename, ep in crawler.episodes.items():
subs = parser.parse(**ep.infos)
print(ep)
if not subs:
print(colored("No subtitles for %s" % filename, "red"), end="\n\n")
continue
for i, sub in enumerate(subs):
print("[%s] %s" % (colored(i, "yellow"), sub))
if Config.dry_run:
print()
continue
else:
version = input('Download number? ')
if not version:
print(colored("Nothing to do!", "yellow"),
end="\n\n")
continue
try:
if Config.rename != "sub":
filename = subs[int(version)].download()
if filename and Config.rename == "video":
print(ep.rename(filename))
else:
filename = subs[int(version)].download("%s.srt" %
ep.filename)
print(colored("Downloaded %s subtitle file" %
filename, "green"), end="\n\n")
except Exception as e:
print(colored(e, "red"),
end="\n\n")
|
Fix newline output of downloaded srt
|
Fix newline output of downloaded srt
|
Python
|
mit
|
Jesus-21/addic7ed
|
from termcolor import colored
from .parser import Addic7edParser
from .file_crawler import FileCrawler
from .logger import init_logger
from .config import Config
def addic7ed():
try:
init_logger()
Config.load()
main()
except (EOFError, KeyboardInterrupt, SystemExit):
print(colored("\nBye!", "yellow"))
exit(0)
def main():
crawler = FileCrawler()
parser = Addic7edParser()
for filename, ep in crawler.episodes.items():
subs = parser.parse(**ep.infos)
print(ep)
if not subs:
print(colored("No subtitles for %s" % filename, "red"), end="\n\n")
continue
for i, sub in enumerate(subs):
print("[%s] %s" % (colored(i, "yellow"), sub))
if Config.dry_run:
print()
continue
else:
version = input('Download number? ')
if not version:
print(colored("Nothing to do!", "yellow"),
end="\n\n")
continue
try:
if Config.rename != "sub":
filename = subs[int(version)].download()
if filename and Config.rename == "video":
- print(ep.rename(filename), end="\n\n")
+ print(ep.rename(filename))
else:
filename = subs[int(version)].download("%s.srt" %
ep.filename)
print(colored("Downloaded %s subtitle file" %
- filename, "green"))
+ filename, "green"), end="\n\n")
except Exception as e:
print(colored(e, "red"),
end="\n\n")
|
Fix newline output of downloaded srt
|
## Code Before:
from termcolor import colored
from .parser import Addic7edParser
from .file_crawler import FileCrawler
from .logger import init_logger
from .config import Config
def addic7ed():
try:
init_logger()
Config.load()
main()
except (EOFError, KeyboardInterrupt, SystemExit):
print(colored("\nBye!", "yellow"))
exit(0)
def main():
crawler = FileCrawler()
parser = Addic7edParser()
for filename, ep in crawler.episodes.items():
subs = parser.parse(**ep.infos)
print(ep)
if not subs:
print(colored("No subtitles for %s" % filename, "red"), end="\n\n")
continue
for i, sub in enumerate(subs):
print("[%s] %s" % (colored(i, "yellow"), sub))
if Config.dry_run:
print()
continue
else:
version = input('Download number? ')
if not version:
print(colored("Nothing to do!", "yellow"),
end="\n\n")
continue
try:
if Config.rename != "sub":
filename = subs[int(version)].download()
if filename and Config.rename == "video":
print(ep.rename(filename), end="\n\n")
else:
filename = subs[int(version)].download("%s.srt" %
ep.filename)
print(colored("Downloaded %s subtitle file" %
filename, "green"))
except Exception as e:
print(colored(e, "red"),
end="\n\n")
## Instruction:
Fix newline output of downloaded srt
## Code After:
from termcolor import colored
from .parser import Addic7edParser
from .file_crawler import FileCrawler
from .logger import init_logger
from .config import Config
def addic7ed():
try:
init_logger()
Config.load()
main()
except (EOFError, KeyboardInterrupt, SystemExit):
print(colored("\nBye!", "yellow"))
exit(0)
def main():
crawler = FileCrawler()
parser = Addic7edParser()
for filename, ep in crawler.episodes.items():
subs = parser.parse(**ep.infos)
print(ep)
if not subs:
print(colored("No subtitles for %s" % filename, "red"), end="\n\n")
continue
for i, sub in enumerate(subs):
print("[%s] %s" % (colored(i, "yellow"), sub))
if Config.dry_run:
print()
continue
else:
version = input('Download number? ')
if not version:
print(colored("Nothing to do!", "yellow"),
end="\n\n")
continue
try:
if Config.rename != "sub":
filename = subs[int(version)].download()
if filename and Config.rename == "video":
print(ep.rename(filename))
else:
filename = subs[int(version)].download("%s.srt" %
ep.filename)
print(colored("Downloaded %s subtitle file" %
filename, "green"), end="\n\n")
except Exception as e:
print(colored(e, "red"),
end="\n\n")
|
// ... existing code ...
if Config.rename != "sub":
filename = subs[int(version)].download()
if filename and Config.rename == "video":
print(ep.rename(filename))
else:
filename = subs[int(version)].download("%s.srt" %
ep.filename)
print(colored("Downloaded %s subtitle file" %
filename, "green"), end="\n\n")
except Exception as e:
print(colored(e, "red"),
end="\n\n")
// ... rest of the code ...
|
f6b40cbe9da0552b27b7c4c5d1a2d9bb0a75dafd
|
custom/icds_reports/permissions.py
|
custom/icds_reports/permissions.py
|
from functools import wraps
from django.http import HttpResponse
from corehq.apps.locations.permissions import user_can_access_location_id
from custom.icds_core.view_utils import icds_pre_release_features
def can_access_location_data(view_fn):
"""
Decorator controlling a user's access to VIEW data for a specific location.
"""
@wraps(view_fn)
def _inner(request, domain, *args, **kwargs):
def call_view(): return view_fn(request, domain, *args, **kwargs)
if icds_pre_release_features(request.couch_user):
loc_id = request.GET.get('location_id')
def return_no_location_access_response():
return HttpResponse('No access to the location {} for the logged in user'.format(loc_id),
status=403)
if not loc_id and not request.couch_user.has_permission(domain, 'access_all_locations'):
return return_no_location_access_response()
if loc_id and not user_can_access_location_id(domain, request.couch_user, loc_id):
return return_no_location_access_response()
return call_view()
return _inner
|
from functools import wraps
from django.http import HttpResponse
from corehq.apps.locations.permissions import user_can_access_location_id
from custom.icds_core.view_utils import icds_pre_release_features
def can_access_location_data(view_fn):
"""
Decorator controlling a user's access to VIEW data for a specific location.
"""
@wraps(view_fn)
def _inner(request, domain, *args, **kwargs):
def call_view(): return view_fn(request, domain, *args, **kwargs)
loc_id = request.GET.get('location_id')
def return_no_location_access_response():
return HttpResponse('No access to the location {} for the logged in user'.format(loc_id),
status=403)
if not loc_id and not request.couch_user.has_permission(domain, 'access_all_locations'):
return return_no_location_access_response()
if loc_id and not user_can_access_location_id(domain, request.couch_user, loc_id):
return return_no_location_access_response()
return call_view()
return _inner
|
Remove Feature flag from the location security check for dashboard
|
Remove Feature flag from the location security check for dashboard
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
from functools import wraps
from django.http import HttpResponse
from corehq.apps.locations.permissions import user_can_access_location_id
from custom.icds_core.view_utils import icds_pre_release_features
def can_access_location_data(view_fn):
"""
Decorator controlling a user's access to VIEW data for a specific location.
"""
@wraps(view_fn)
def _inner(request, domain, *args, **kwargs):
def call_view(): return view_fn(request, domain, *args, **kwargs)
- if icds_pre_release_features(request.couch_user):
- loc_id = request.GET.get('location_id')
+ loc_id = request.GET.get('location_id')
+
- def return_no_location_access_response():
+ def return_no_location_access_response():
- return HttpResponse('No access to the location {} for the logged in user'.format(loc_id),
+ return HttpResponse('No access to the location {} for the logged in user'.format(loc_id),
- status=403)
+ status=403)
- if not loc_id and not request.couch_user.has_permission(domain, 'access_all_locations'):
+ if not loc_id and not request.couch_user.has_permission(domain, 'access_all_locations'):
- return return_no_location_access_response()
+ return return_no_location_access_response()
- if loc_id and not user_can_access_location_id(domain, request.couch_user, loc_id):
+ if loc_id and not user_can_access_location_id(domain, request.couch_user, loc_id):
return return_no_location_access_response()
return call_view()
return _inner
|
Remove Feature flag from the location security check for dashboard
|
## Code Before:
from functools import wraps
from django.http import HttpResponse
from corehq.apps.locations.permissions import user_can_access_location_id
from custom.icds_core.view_utils import icds_pre_release_features
def can_access_location_data(view_fn):
"""
Decorator controlling a user's access to VIEW data for a specific location.
"""
@wraps(view_fn)
def _inner(request, domain, *args, **kwargs):
def call_view(): return view_fn(request, domain, *args, **kwargs)
if icds_pre_release_features(request.couch_user):
loc_id = request.GET.get('location_id')
def return_no_location_access_response():
return HttpResponse('No access to the location {} for the logged in user'.format(loc_id),
status=403)
if not loc_id and not request.couch_user.has_permission(domain, 'access_all_locations'):
return return_no_location_access_response()
if loc_id and not user_can_access_location_id(domain, request.couch_user, loc_id):
return return_no_location_access_response()
return call_view()
return _inner
## Instruction:
Remove Feature flag from the location security check for dashboard
## Code After:
from functools import wraps
from django.http import HttpResponse
from corehq.apps.locations.permissions import user_can_access_location_id
from custom.icds_core.view_utils import icds_pre_release_features
def can_access_location_data(view_fn):
"""
Decorator controlling a user's access to VIEW data for a specific location.
"""
@wraps(view_fn)
def _inner(request, domain, *args, **kwargs):
def call_view(): return view_fn(request, domain, *args, **kwargs)
loc_id = request.GET.get('location_id')
def return_no_location_access_response():
return HttpResponse('No access to the location {} for the logged in user'.format(loc_id),
status=403)
if not loc_id and not request.couch_user.has_permission(domain, 'access_all_locations'):
return return_no_location_access_response()
if loc_id and not user_can_access_location_id(domain, request.couch_user, loc_id):
return return_no_location_access_response()
return call_view()
return _inner
|
# ... existing code ...
@wraps(view_fn)
def _inner(request, domain, *args, **kwargs):
def call_view(): return view_fn(request, domain, *args, **kwargs)
loc_id = request.GET.get('location_id')
def return_no_location_access_response():
return HttpResponse('No access to the location {} for the logged in user'.format(loc_id),
status=403)
if not loc_id and not request.couch_user.has_permission(domain, 'access_all_locations'):
return return_no_location_access_response()
if loc_id and not user_can_access_location_id(domain, request.couch_user, loc_id):
return return_no_location_access_response()
return call_view()
return _inner
# ... rest of the code ...
|
4a6449b806dc755fe3f9d18966c0420da2a4d0fc
|
devito/dle/manipulation.py
|
devito/dle/manipulation.py
|
import cgen as c
from devito.codeprinter import ccode
from devito.nodes import Element, Iteration
from devito.visitors import MergeOuterIterations
__all__ = ['compose_nodes', 'copy_arrays']
def compose_nodes(nodes):
"""Build an Iteration/Expression tree by nesting the nodes in ``nodes``."""
l = list(nodes)
body = l.pop(-1)
while l:
handle = l.pop(-1)
body = handle._rebuild(body, **handle.args_frozen)
return body
def copy_arrays(mapper):
"""Build an Iteration/Expression tree performing the copy ``k = v`` for each
(k, v) in mapper. (k, v) are expected to be of type :class:`IndexedData`."""
# Build the Iteration tree for the copy
iterations = []
for k, v in mapper.items():
handle = []
indices = k.function.indices
for i, j in zip(k.shape, indices):
handle.append(Iteration([], dimension=j, limits=j.symbolic_size))
handle.append(Element(c.Assign(ccode(k[indices]), ccode(v[indices]))))
iterations.append(compose_nodes(handle))
# Maybe some Iterations are mergeable
iterations = MergeOuterIterations().visit(iterations)
return iterations
|
from sympy import Eq
from devito.codeprinter import ccode
from devito.nodes import Expression, Iteration
from devito.visitors import MergeOuterIterations
__all__ = ['compose_nodes', 'copy_arrays']
def compose_nodes(nodes):
"""Build an Iteration/Expression tree by nesting the nodes in ``nodes``."""
l = list(nodes)
body = l.pop(-1)
while l:
handle = l.pop(-1)
body = handle._rebuild(body, **handle.args_frozen)
return body
def copy_arrays(mapper):
"""Build an Iteration/Expression tree performing the copy ``k = v`` for each
(k, v) in mapper. (k, v) are expected to be of type :class:`IndexedData`."""
# Build the Iteration tree for the copy
iterations = []
for k, v in mapper.items():
handle = []
indices = k.function.indices
for i, j in zip(k.shape, indices):
handle.append(Iteration([], dimension=j, limits=j.symbolic_size))
handle.append(Expression(Eq(k[indices], v[indices]), dtype=k.function.dtype))
iterations.append(compose_nodes(handle))
# Maybe some Iterations are mergeable
iterations = MergeOuterIterations().visit(iterations)
return iterations
|
Use Expression, not Element, in copy_arrays
|
dle: Use Expression, not Element, in copy_arrays
|
Python
|
mit
|
opesci/devito,opesci/devito
|
- import cgen as c
+ from sympy import Eq
from devito.codeprinter import ccode
- from devito.nodes import Element, Iteration
+ from devito.nodes import Expression, Iteration
from devito.visitors import MergeOuterIterations
__all__ = ['compose_nodes', 'copy_arrays']
def compose_nodes(nodes):
"""Build an Iteration/Expression tree by nesting the nodes in ``nodes``."""
l = list(nodes)
body = l.pop(-1)
while l:
handle = l.pop(-1)
body = handle._rebuild(body, **handle.args_frozen)
return body
def copy_arrays(mapper):
"""Build an Iteration/Expression tree performing the copy ``k = v`` for each
(k, v) in mapper. (k, v) are expected to be of type :class:`IndexedData`."""
# Build the Iteration tree for the copy
iterations = []
for k, v in mapper.items():
handle = []
indices = k.function.indices
for i, j in zip(k.shape, indices):
handle.append(Iteration([], dimension=j, limits=j.symbolic_size))
- handle.append(Element(c.Assign(ccode(k[indices]), ccode(v[indices]))))
+ handle.append(Expression(Eq(k[indices], v[indices]), dtype=k.function.dtype))
iterations.append(compose_nodes(handle))
# Maybe some Iterations are mergeable
iterations = MergeOuterIterations().visit(iterations)
return iterations
|
Use Expression, not Element, in copy_arrays
|
## Code Before:
import cgen as c
from devito.codeprinter import ccode
from devito.nodes import Element, Iteration
from devito.visitors import MergeOuterIterations
__all__ = ['compose_nodes', 'copy_arrays']
def compose_nodes(nodes):
"""Build an Iteration/Expression tree by nesting the nodes in ``nodes``."""
l = list(nodes)
body = l.pop(-1)
while l:
handle = l.pop(-1)
body = handle._rebuild(body, **handle.args_frozen)
return body
def copy_arrays(mapper):
"""Build an Iteration/Expression tree performing the copy ``k = v`` for each
(k, v) in mapper. (k, v) are expected to be of type :class:`IndexedData`."""
# Build the Iteration tree for the copy
iterations = []
for k, v in mapper.items():
handle = []
indices = k.function.indices
for i, j in zip(k.shape, indices):
handle.append(Iteration([], dimension=j, limits=j.symbolic_size))
handle.append(Element(c.Assign(ccode(k[indices]), ccode(v[indices]))))
iterations.append(compose_nodes(handle))
# Maybe some Iterations are mergeable
iterations = MergeOuterIterations().visit(iterations)
return iterations
## Instruction:
Use Expression, not Element, in copy_arrays
## Code After:
from sympy import Eq
from devito.codeprinter import ccode
from devito.nodes import Expression, Iteration
from devito.visitors import MergeOuterIterations
__all__ = ['compose_nodes', 'copy_arrays']
def compose_nodes(nodes):
"""Build an Iteration/Expression tree by nesting the nodes in ``nodes``."""
l = list(nodes)
body = l.pop(-1)
while l:
handle = l.pop(-1)
body = handle._rebuild(body, **handle.args_frozen)
return body
def copy_arrays(mapper):
"""Build an Iteration/Expression tree performing the copy ``k = v`` for each
(k, v) in mapper. (k, v) are expected to be of type :class:`IndexedData`."""
# Build the Iteration tree for the copy
iterations = []
for k, v in mapper.items():
handle = []
indices = k.function.indices
for i, j in zip(k.shape, indices):
handle.append(Iteration([], dimension=j, limits=j.symbolic_size))
handle.append(Expression(Eq(k[indices], v[indices]), dtype=k.function.dtype))
iterations.append(compose_nodes(handle))
# Maybe some Iterations are mergeable
iterations = MergeOuterIterations().visit(iterations)
return iterations
|
# ... existing code ...
from sympy import Eq
from devito.codeprinter import ccode
from devito.nodes import Expression, Iteration
from devito.visitors import MergeOuterIterations
__all__ = ['compose_nodes', 'copy_arrays']
# ... modified code ...
indices = k.function.indices
for i, j in zip(k.shape, indices):
handle.append(Iteration([], dimension=j, limits=j.symbolic_size))
handle.append(Expression(Eq(k[indices], v[indices]), dtype=k.function.dtype))
iterations.append(compose_nodes(handle))
# Maybe some Iterations are mergeable
# ... rest of the code ...
|
020015cccceb3c2391c4764ee2ec29dfc5c461c6
|
__init__.py
|
__init__.py
|
from . import LayerView
def getMetaData():
return { "name": "LayerView", "type": "View" }
def register(app):
app.getController().addView("LayerView", LayerView.LayerView())
|
from . import LayerView
def getMetaData():
return { "name": "LayerView", "type": "View" }
def register(app):
return LayerView.LayerView()
|
Update plugin's register functions to return the object instance instead of performing the registration themselves
|
Update plugin's register functions to return the object instance instead of performing the registration themselves
|
Python
|
agpl-3.0
|
Curahelper/Cura,bq/Ultimaker-Cura,ad1217/Cura,bq/Ultimaker-Cura,senttech/Cura,lo0ol/Ultimaker-Cura,quillford/Cura,derekhe/Cura,ynotstartups/Wanhao,markwal/Cura,lo0ol/Ultimaker-Cura,senttech/Cura,DeskboxBrazil/Cura,ynotstartups/Wanhao,totalretribution/Cura,ad1217/Cura,fieldOfView/Cura,quillford/Cura,fxtentacle/Cura,derekhe/Cura,hmflash/Cura,DeskboxBrazil/Cura,fieldOfView/Cura,totalretribution/Cura,Curahelper/Cura,markwal/Cura,fxtentacle/Cura,hmflash/Cura
|
from . import LayerView
def getMetaData():
return { "name": "LayerView", "type": "View" }
def register(app):
- app.getController().addView("LayerView", LayerView.LayerView())
+ return LayerView.LayerView()
|
Update plugin's register functions to return the object instance instead of performing the registration themselves
|
## Code Before:
from . import LayerView
def getMetaData():
return { "name": "LayerView", "type": "View" }
def register(app):
app.getController().addView("LayerView", LayerView.LayerView())
## Instruction:
Update plugin's register functions to return the object instance instead of performing the registration themselves
## Code After:
from . import LayerView
def getMetaData():
return { "name": "LayerView", "type": "View" }
def register(app):
return LayerView.LayerView()
|
# ... existing code ...
return { "name": "LayerView", "type": "View" }
def register(app):
return LayerView.LayerView()
# ... rest of the code ...
|
cabc7da28989f1cc000f7219845222992846631a
|
datasets/templatetags/general_templatetags.py
|
datasets/templatetags/general_templatetags.py
|
from django import template
from django.core import urlresolvers
import datetime
import time
register = template.Library()
# Adapted from: http://blog.scur.pl/2012/09/highlighting-current-active-page-django/
@register.simple_tag(takes_context=True)
def current(context, url_name, return_value=' current', **kwargs):
matches = current_url_equals(context, url_name, **kwargs)
return return_value if matches else ''
# Adapted from: http://blog.scur.pl/2012/09/highlighting-current-active-page-django/
def current_url_equals(context, url_name, **kwargs):
resolved = False
try:
resolved = urlresolvers.resolve(context.get('request').path)
except:
pass
matches = resolved and resolved.url_name == url_name
if matches and kwargs:
for key in kwargs:
kwarg = kwargs.get(key)
resolved_kwarg = resolved.kwargs.get(key)
if not resolved_kwarg or kwarg != resolved_kwarg:
return False
return matches
@register.simple_tag(takes_context=True)
def active_if_current(context, url_name, return_value=' current', **kwargs):
if current(context, url_name, return_value, **kwargs):
return 'active '
else:
return ''
@register.filter()
def timestamp_to_datetime(value):
return datetime.datetime.fromtimestamp(value)
|
from django import template
from django.core import urlresolvers
import datetime
import time
register = template.Library()
# Adapted from: http://blog.scur.pl/2012/09/highlighting-current-active-page-django/
@register.simple_tag(takes_context=True)
def current(context, url_name, return_value=' current', **kwargs):
matches = current_url_equals(context, url_name, **kwargs)
return return_value if matches else ''
# Adapted from: http://blog.scur.pl/2012/09/highlighting-current-active-page-django/
def current_url_equals(context, url_name, **kwargs):
resolved = False
try:
resolved = urlresolvers.resolve(context.get('request').path)
except:
pass
matches = resolved and resolved.url_name == url_name
if matches and kwargs:
for key in kwargs:
kwarg = kwargs.get(key)
resolved_kwarg = resolved.kwargs.get(key)
if not resolved_kwarg or kwarg != resolved_kwarg:
return False
return matches
@register.simple_tag(takes_context=True)
def active_if_current(context, url_name, return_value=' current', **kwargs):
if current(context, url_name, return_value, **kwargs):
return 'active '
else:
return ''
@register.filter()
def timestamp_to_datetime(value):
if not value or value is None:
return None
return datetime.datetime.fromtimestamp(value)
|
Fix bug with timestamp_to_datetime when value is not a number
|
Fix bug with timestamp_to_datetime when value is not a number
|
Python
|
agpl-3.0
|
MTG/freesound-datasets,MTG/freesound-datasets,MTG/freesound-datasets,MTG/freesound-datasets
|
from django import template
from django.core import urlresolvers
import datetime
import time
register = template.Library()
# Adapted from: http://blog.scur.pl/2012/09/highlighting-current-active-page-django/
@register.simple_tag(takes_context=True)
def current(context, url_name, return_value=' current', **kwargs):
matches = current_url_equals(context, url_name, **kwargs)
return return_value if matches else ''
# Adapted from: http://blog.scur.pl/2012/09/highlighting-current-active-page-django/
def current_url_equals(context, url_name, **kwargs):
resolved = False
try:
resolved = urlresolvers.resolve(context.get('request').path)
except:
pass
matches = resolved and resolved.url_name == url_name
if matches and kwargs:
for key in kwargs:
kwarg = kwargs.get(key)
resolved_kwarg = resolved.kwargs.get(key)
if not resolved_kwarg or kwarg != resolved_kwarg:
return False
return matches
@register.simple_tag(takes_context=True)
def active_if_current(context, url_name, return_value=' current', **kwargs):
if current(context, url_name, return_value, **kwargs):
return 'active '
else:
return ''
@register.filter()
def timestamp_to_datetime(value):
+ if not value or value is None:
+ return None
return datetime.datetime.fromtimestamp(value)
|
Fix bug with timestamp_to_datetime when value is not a number
|
## Code Before:
from django import template
from django.core import urlresolvers
import datetime
import time
register = template.Library()
# Adapted from: http://blog.scur.pl/2012/09/highlighting-current-active-page-django/
@register.simple_tag(takes_context=True)
def current(context, url_name, return_value=' current', **kwargs):
matches = current_url_equals(context, url_name, **kwargs)
return return_value if matches else ''
# Adapted from: http://blog.scur.pl/2012/09/highlighting-current-active-page-django/
def current_url_equals(context, url_name, **kwargs):
resolved = False
try:
resolved = urlresolvers.resolve(context.get('request').path)
except:
pass
matches = resolved and resolved.url_name == url_name
if matches and kwargs:
for key in kwargs:
kwarg = kwargs.get(key)
resolved_kwarg = resolved.kwargs.get(key)
if not resolved_kwarg or kwarg != resolved_kwarg:
return False
return matches
@register.simple_tag(takes_context=True)
def active_if_current(context, url_name, return_value=' current', **kwargs):
if current(context, url_name, return_value, **kwargs):
return 'active '
else:
return ''
@register.filter()
def timestamp_to_datetime(value):
return datetime.datetime.fromtimestamp(value)
## Instruction:
Fix bug with timestamp_to_datetime when value is not a number
## Code After:
from django import template
from django.core import urlresolvers
import datetime
import time
register = template.Library()
# Adapted from: http://blog.scur.pl/2012/09/highlighting-current-active-page-django/
@register.simple_tag(takes_context=True)
def current(context, url_name, return_value=' current', **kwargs):
matches = current_url_equals(context, url_name, **kwargs)
return return_value if matches else ''
# Adapted from: http://blog.scur.pl/2012/09/highlighting-current-active-page-django/
def current_url_equals(context, url_name, **kwargs):
resolved = False
try:
resolved = urlresolvers.resolve(context.get('request').path)
except:
pass
matches = resolved and resolved.url_name == url_name
if matches and kwargs:
for key in kwargs:
kwarg = kwargs.get(key)
resolved_kwarg = resolved.kwargs.get(key)
if not resolved_kwarg or kwarg != resolved_kwarg:
return False
return matches
@register.simple_tag(takes_context=True)
def active_if_current(context, url_name, return_value=' current', **kwargs):
if current(context, url_name, return_value, **kwargs):
return 'active '
else:
return ''
@register.filter()
def timestamp_to_datetime(value):
if not value or value is None:
return None
return datetime.datetime.fromtimestamp(value)
|
...
@register.filter()
def timestamp_to_datetime(value):
if not value or value is None:
return None
return datetime.datetime.fromtimestamp(value)
...
|
645cbd9a4445898f69b1ecd9f3db7d5e7b7b9dbd
|
libqtile/layout/__init__.py
|
libqtile/layout/__init__.py
|
from stack import Stack
from max import Max
from tile import Tile
from floating import Floating
from ratiotile import RatioTile
from tree import TreeTab
from slice import Slice
|
from stack import Stack
from max import Max
from tile import Tile
from floating import Floating
from ratiotile import RatioTile
from tree import TreeTab
from slice import Slice
from xmonad import MonadTall
|
Add MonadTall to layout module proper.
|
Add MonadTall to layout module proper.
Fixes #126
|
Python
|
mit
|
nxnfufunezn/qtile,qtile/qtile,de-vri-es/qtile,jdowner/qtile,tych0/qtile,w1ndy/qtile,soulchainer/qtile,rxcomm/qtile,flacjacket/qtile,w1ndy/qtile,ramnes/qtile,andrewyoung1991/qtile,frostidaho/qtile,himaaaatti/qtile,cortesi/qtile,encukou/qtile,himaaaatti/qtile,de-vri-es/qtile,kopchik/qtile,qtile/qtile,zordsdavini/qtile,StephenBarnes/qtile,flacjacket/qtile,kiniou/qtile,tych0/qtile,rxcomm/qtile,frostidaho/qtile,farebord/qtile,EndPointCorp/qtile,encukou/qtile,zordsdavini/qtile,jdowner/qtile,apinsard/qtile,aniruddhkanojia/qtile,xplv/qtile,ramnes/qtile,apinsard/qtile,kseistrup/qtile,cortesi/qtile,dequis/qtile,farebord/qtile,xplv/qtile,EndPointCorp/qtile,kopchik/qtile,nxnfufunezn/qtile,soulchainer/qtile,kseistrup/qtile,dequis/qtile,aniruddhkanojia/qtile,andrewyoung1991/qtile,StephenBarnes/qtile,kynikos/qtile,kiniou/qtile,kynikos/qtile
|
from stack import Stack
from max import Max
from tile import Tile
from floating import Floating
from ratiotile import RatioTile
from tree import TreeTab
from slice import Slice
+ from xmonad import MonadTall
|
Add MonadTall to layout module proper.
|
## Code Before:
from stack import Stack
from max import Max
from tile import Tile
from floating import Floating
from ratiotile import RatioTile
from tree import TreeTab
from slice import Slice
## Instruction:
Add MonadTall to layout module proper.
## Code After:
from stack import Stack
from max import Max
from tile import Tile
from floating import Floating
from ratiotile import RatioTile
from tree import TreeTab
from slice import Slice
from xmonad import MonadTall
|
// ... existing code ...
from ratiotile import RatioTile
from tree import TreeTab
from slice import Slice
from xmonad import MonadTall
// ... rest of the code ...
|
6c6934e8a36429e2a988835d8bd4d66fe95e306b
|
tensorflow_datasets/image/cifar_test.py
|
tensorflow_datasets/image/cifar_test.py
|
"""Tests for cifar dataset module."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow_datasets.image import cifar
from tensorflow_datasets.testing import dataset_builder_testing
class Cifar10Test(dataset_builder_testing.TestCase):
DATASET_CLASS = cifar.Cifar10
SPLITS = {
"train": 10, # Number of examples.
"test": 2, # See testing/generate_cifar10_like_example.py
}
if __name__ == "__main__":
dataset_builder_testing.main()
|
"""Tests for cifar dataset module."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow_datasets.image import cifar
from tensorflow_datasets.testing import dataset_builder_testing
class Cifar10Test(dataset_builder_testing.TestCase):
DATASET_CLASS = cifar.Cifar10
SPLITS = {
"train": 10, # Number of examples.
"test": 2, # See testing/cifar10.py
}
if __name__ == "__main__":
dataset_builder_testing.main()
|
Move references of deleted generate_cifar10_like_example.py to the new name cifar.py
|
Move references of deleted generate_cifar10_like_example.py to the new name cifar.py
PiperOrigin-RevId: 225386826
|
Python
|
apache-2.0
|
tensorflow/datasets,tensorflow/datasets,tensorflow/datasets,tensorflow/datasets,tensorflow/datasets
|
"""Tests for cifar dataset module."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow_datasets.image import cifar
from tensorflow_datasets.testing import dataset_builder_testing
class Cifar10Test(dataset_builder_testing.TestCase):
DATASET_CLASS = cifar.Cifar10
SPLITS = {
"train": 10, # Number of examples.
- "test": 2, # See testing/generate_cifar10_like_example.py
+ "test": 2, # See testing/cifar10.py
}
if __name__ == "__main__":
dataset_builder_testing.main()
|
Move references of deleted generate_cifar10_like_example.py to the new name cifar.py
|
## Code Before:
"""Tests for cifar dataset module."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow_datasets.image import cifar
from tensorflow_datasets.testing import dataset_builder_testing
class Cifar10Test(dataset_builder_testing.TestCase):
DATASET_CLASS = cifar.Cifar10
SPLITS = {
"train": 10, # Number of examples.
"test": 2, # See testing/generate_cifar10_like_example.py
}
if __name__ == "__main__":
dataset_builder_testing.main()
## Instruction:
Move references of deleted generate_cifar10_like_example.py to the new name cifar.py
## Code After:
"""Tests for cifar dataset module."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow_datasets.image import cifar
from tensorflow_datasets.testing import dataset_builder_testing
class Cifar10Test(dataset_builder_testing.TestCase):
DATASET_CLASS = cifar.Cifar10
SPLITS = {
"train": 10, # Number of examples.
"test": 2, # See testing/cifar10.py
}
if __name__ == "__main__":
dataset_builder_testing.main()
|
# ... existing code ...
DATASET_CLASS = cifar.Cifar10
SPLITS = {
"train": 10, # Number of examples.
"test": 2, # See testing/cifar10.py
}
# ... rest of the code ...
|
3cfa4f48c6bf28ed4273004d9a44173ecb4b195c
|
parliament/templatetags/parliament.py
|
parliament/templatetags/parliament.py
|
from django import template
register = template.Library()
@register.filter(name='governing')
def governing(party, date):
return party.is_governing(date)
|
from django import template
from ..models import Party, Statement
register = template.Library()
@register.filter(name='governing')
def governing(obj, date=None):
if isinstance(obj, Party):
assert date is not None, "Date must be supplied when 'govern' is called with a Party object"
return obj.is_governing(date)
elif isinstance(obj, Statement):
if obj.member is None:
if 'ministeri' in obj.speaker_role:
return True
else:
return False
if date is None:
date = obj.item.plsess.date
return obj.member.party.is_governing(date)
|
Allow governing templatetag to be called with a Statement object
|
Allow governing templatetag to be called with a Statement object
|
Python
|
agpl-3.0
|
kansanmuisti/kamu,kansanmuisti/kamu,kansanmuisti/kamu,kansanmuisti/kamu,kansanmuisti/kamu
|
from django import template
+ from ..models import Party, Statement
register = template.Library()
@register.filter(name='governing')
- def governing(party, date):
+ def governing(obj, date=None):
+ if isinstance(obj, Party):
+ assert date is not None, "Date must be supplied when 'govern' is called with a Party object"
- return party.is_governing(date)
+ return obj.is_governing(date)
+ elif isinstance(obj, Statement):
+ if obj.member is None:
+ if 'ministeri' in obj.speaker_role:
+ return True
+ else:
+ return False
+ if date is None:
+ date = obj.item.plsess.date
+ return obj.member.party.is_governing(date)
-
|
Allow governing templatetag to be called with a Statement object
|
## Code Before:
from django import template
register = template.Library()
@register.filter(name='governing')
def governing(party, date):
return party.is_governing(date)
## Instruction:
Allow governing templatetag to be called with a Statement object
## Code After:
from django import template
from ..models import Party, Statement
register = template.Library()
@register.filter(name='governing')
def governing(obj, date=None):
if isinstance(obj, Party):
assert date is not None, "Date must be supplied when 'govern' is called with a Party object"
return obj.is_governing(date)
elif isinstance(obj, Statement):
if obj.member is None:
if 'ministeri' in obj.speaker_role:
return True
else:
return False
if date is None:
date = obj.item.plsess.date
return obj.member.party.is_governing(date)
|
...
from django import template
from ..models import Party, Statement
register = template.Library()
@register.filter(name='governing')
def governing(obj, date=None):
if isinstance(obj, Party):
assert date is not None, "Date must be supplied when 'govern' is called with a Party object"
return obj.is_governing(date)
elif isinstance(obj, Statement):
if obj.member is None:
if 'ministeri' in obj.speaker_role:
return True
else:
return False
if date is None:
date = obj.item.plsess.date
return obj.member.party.is_governing(date)
...
|
abfe0538769145ac83031062ce3b22d2622f18bf
|
opwen_email_server/utils/temporary.py
|
opwen_email_server/utils/temporary.py
|
from contextlib import contextmanager
from contextlib import suppress
from os import close
from os import remove
from tempfile import mkstemp
def create_tempfilename() -> str:
file_descriptor, filename = mkstemp()
close(file_descriptor)
return filename
@contextmanager
def removing(path: str) -> str:
try:
yield path
finally:
_remove_if_exists(path)
def _remove_if_exists(path: str):
with suppress(FileNotFoundError):
remove(path)
|
from contextlib import contextmanager
from contextlib import suppress
from os import close
from os import remove
from tempfile import mkstemp
from typing import Generator
def create_tempfilename() -> str:
file_descriptor, filename = mkstemp()
close(file_descriptor)
return filename
@contextmanager
def removing(path: str) -> Generator[str, None, None]:
try:
yield path
finally:
_remove_if_exists(path)
def _remove_if_exists(path: str):
with suppress(FileNotFoundError):
remove(path)
|
Fix type annotation for context manager
|
Fix type annotation for context manager
|
Python
|
apache-2.0
|
ascoderu/opwen-cloudserver,ascoderu/opwen-cloudserver
|
from contextlib import contextmanager
from contextlib import suppress
from os import close
from os import remove
from tempfile import mkstemp
+ from typing import Generator
def create_tempfilename() -> str:
file_descriptor, filename = mkstemp()
close(file_descriptor)
return filename
@contextmanager
- def removing(path: str) -> str:
+ def removing(path: str) -> Generator[str, None, None]:
try:
yield path
finally:
_remove_if_exists(path)
def _remove_if_exists(path: str):
with suppress(FileNotFoundError):
remove(path)
|
Fix type annotation for context manager
|
## Code Before:
from contextlib import contextmanager
from contextlib import suppress
from os import close
from os import remove
from tempfile import mkstemp
def create_tempfilename() -> str:
file_descriptor, filename = mkstemp()
close(file_descriptor)
return filename
@contextmanager
def removing(path: str) -> str:
try:
yield path
finally:
_remove_if_exists(path)
def _remove_if_exists(path: str):
with suppress(FileNotFoundError):
remove(path)
## Instruction:
Fix type annotation for context manager
## Code After:
from contextlib import contextmanager
from contextlib import suppress
from os import close
from os import remove
from tempfile import mkstemp
from typing import Generator
def create_tempfilename() -> str:
file_descriptor, filename = mkstemp()
close(file_descriptor)
return filename
@contextmanager
def removing(path: str) -> Generator[str, None, None]:
try:
yield path
finally:
_remove_if_exists(path)
def _remove_if_exists(path: str):
with suppress(FileNotFoundError):
remove(path)
|
// ... existing code ...
from os import close
from os import remove
from tempfile import mkstemp
from typing import Generator
def create_tempfilename() -> str:
// ... modified code ...
@contextmanager
def removing(path: str) -> Generator[str, None, None]:
try:
yield path
finally:
// ... rest of the code ...
|
c5ef250240cbaa894ee84615c5d07a383bd16962
|
fluent_contents/plugins/oembeditem/content_plugins.py
|
fluent_contents/plugins/oembeditem/content_plugins.py
|
from django.utils.translation import ugettext_lazy as _
from fluent_contents.extensions import ContentPlugin, plugin_pool
from fluent_contents.plugins.oembeditem.forms import OEmbedItemForm
from fluent_contents.plugins.oembeditem.models import OEmbedItem
@plugin_pool.register
class OEmbedPlugin(ContentPlugin):
model = OEmbedItem
category = _('Online content')
form = OEmbedItemForm
render_template = "fluent_contents/plugins/oembed/default.html"
class Media:
css = {
'screen': (
'fluent_contents/plugins/oembed/oembed_admin.css',
)
}
def get_render_template(self, request, instance, **kwargs):
"""
Allow to style the item based on the type.
"""
return ["fluent_contents/plugins/oembed/{type}.html".format(type=instance.type or 'default'), self.render_template]
|
from django.utils.translation import ugettext_lazy as _
from fluent_contents.extensions import ContentPlugin, plugin_pool
from fluent_contents.plugins.oembeditem.forms import OEmbedItemForm
from fluent_contents.plugins.oembeditem.models import OEmbedItem
import re
re_safe = re.compile(r'[^\w_-]')
@plugin_pool.register
class OEmbedPlugin(ContentPlugin):
model = OEmbedItem
category = _('Online content')
form = OEmbedItemForm
render_template = "fluent_contents/plugins/oembed/default.html"
class Media:
css = {
'screen': (
'fluent_contents/plugins/oembed/oembed_admin.css',
)
}
def get_render_template(self, request, instance, **kwargs):
"""
Allow to style the item based on the type.
"""
safe_filename = re_safe.sub('', instance.type or 'default')
return [
"fluent_contents/plugins/oembed/{type}.html".format(type=safe_filename),
self.render_template
]
|
Make sure the OEmbed type can never be used to control filenames.
|
Make sure the OEmbed type can never be used to control filenames.
Minor risk, as it's still a template path, but better be safe then sorry.
|
Python
|
apache-2.0
|
pombredanne/django-fluent-contents,edoburu/django-fluent-contents,jpotterm/django-fluent-contents,ixc/django-fluent-contents,pombredanne/django-fluent-contents,ixc/django-fluent-contents,jpotterm/django-fluent-contents,edoburu/django-fluent-contents,django-fluent/django-fluent-contents,pombredanne/django-fluent-contents,ixc/django-fluent-contents,django-fluent/django-fluent-contents,django-fluent/django-fluent-contents,edoburu/django-fluent-contents,jpotterm/django-fluent-contents
|
from django.utils.translation import ugettext_lazy as _
from fluent_contents.extensions import ContentPlugin, plugin_pool
from fluent_contents.plugins.oembeditem.forms import OEmbedItemForm
from fluent_contents.plugins.oembeditem.models import OEmbedItem
+ import re
+
+ re_safe = re.compile(r'[^\w_-]')
@plugin_pool.register
class OEmbedPlugin(ContentPlugin):
model = OEmbedItem
category = _('Online content')
form = OEmbedItemForm
render_template = "fluent_contents/plugins/oembed/default.html"
class Media:
css = {
'screen': (
'fluent_contents/plugins/oembed/oembed_admin.css',
)
}
def get_render_template(self, request, instance, **kwargs):
"""
Allow to style the item based on the type.
"""
- return ["fluent_contents/plugins/oembed/{type}.html".format(type=instance.type or 'default'), self.render_template]
+ safe_filename = re_safe.sub('', instance.type or 'default')
+ return [
+ "fluent_contents/plugins/oembed/{type}.html".format(type=safe_filename),
+ self.render_template
+ ]
|
Make sure the OEmbed type can never be used to control filenames.
|
## Code Before:
from django.utils.translation import ugettext_lazy as _
from fluent_contents.extensions import ContentPlugin, plugin_pool
from fluent_contents.plugins.oembeditem.forms import OEmbedItemForm
from fluent_contents.plugins.oembeditem.models import OEmbedItem
@plugin_pool.register
class OEmbedPlugin(ContentPlugin):
model = OEmbedItem
category = _('Online content')
form = OEmbedItemForm
render_template = "fluent_contents/plugins/oembed/default.html"
class Media:
css = {
'screen': (
'fluent_contents/plugins/oembed/oembed_admin.css',
)
}
def get_render_template(self, request, instance, **kwargs):
"""
Allow to style the item based on the type.
"""
return ["fluent_contents/plugins/oembed/{type}.html".format(type=instance.type or 'default'), self.render_template]
## Instruction:
Make sure the OEmbed type can never be used to control filenames.
## Code After:
from django.utils.translation import ugettext_lazy as _
from fluent_contents.extensions import ContentPlugin, plugin_pool
from fluent_contents.plugins.oembeditem.forms import OEmbedItemForm
from fluent_contents.plugins.oembeditem.models import OEmbedItem
import re
re_safe = re.compile(r'[^\w_-]')
@plugin_pool.register
class OEmbedPlugin(ContentPlugin):
model = OEmbedItem
category = _('Online content')
form = OEmbedItemForm
render_template = "fluent_contents/plugins/oembed/default.html"
class Media:
css = {
'screen': (
'fluent_contents/plugins/oembed/oembed_admin.css',
)
}
def get_render_template(self, request, instance, **kwargs):
"""
Allow to style the item based on the type.
"""
safe_filename = re_safe.sub('', instance.type or 'default')
return [
"fluent_contents/plugins/oembed/{type}.html".format(type=safe_filename),
self.render_template
]
|
# ... existing code ...
from fluent_contents.extensions import ContentPlugin, plugin_pool
from fluent_contents.plugins.oembeditem.forms import OEmbedItemForm
from fluent_contents.plugins.oembeditem.models import OEmbedItem
import re
re_safe = re.compile(r'[^\w_-]')
@plugin_pool.register
# ... modified code ...
"""
Allow to style the item based on the type.
"""
safe_filename = re_safe.sub('', instance.type or 'default')
return [
"fluent_contents/plugins/oembed/{type}.html".format(type=safe_filename),
self.render_template
]
# ... rest of the code ...
|
89b23ce8abd259ace055c35b0da47428bdcbc37a
|
scripts/server/client_example.py
|
scripts/server/client_example.py
|
from __future__ import print_function, unicode_literals, division
import sys
import time
import argparse
from websocket import create_connection
def translate(batch, port=8080):
ws = create_connection("ws://localhost:{}/translate".format(port))
#print(batch.rstrip())
ws.send(batch)
result = ws.recv()
print(result.rstrip())
ws.close()
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument("-b", "--batch-size", type=int, default=1)
parser.add_argument("-p", "--port", type=int, default=8080)
return parser.parse_args()
if __name__ == "__main__":
args = parse_args()
count = 0
batch = ""
for line in sys.stdin:
count += 1
batch += line
if count == args.batch_size:
translate(batch, port=args.port)
count = 0
batch = ""
if count:
translate(batch, port=args.port)
|
from __future__ import print_function, unicode_literals, division
import sys
import time
import argparse
from websocket import create_connection
def translate(batch, port=8080):
ws = create_connection("ws://localhost:{}/translate".format(port))
#print(batch.rstrip())
ws.send(batch)
result = ws.recv()
print(result.rstrip())
ws.close()
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument("-b", "--batch-size", type=int, default=1)
parser.add_argument("-p", "--port", type=int, default=8080)
return parser.parse_args()
if __name__ == "__main__":
args = parse_args()
count = 0
batch = ""
for line in sys.stdin:
count += 1
batch += line.decode('utf-8') if sys.version_info < (3, 0) else line
if count == args.batch_size:
translate(batch, port=args.port)
count = 0
batch = ""
if count:
translate(batch, port=args.port)
|
Fix decoding error with python2
|
Fix decoding error with python2
|
Python
|
mit
|
marian-nmt/marian-train,emjotde/amunn,amunmt/marian,emjotde/amunmt,emjotde/amunmt,emjotde/amunmt,marian-nmt/marian-train,marian-nmt/marian-train,marian-nmt/marian-train,amunmt/marian,amunmt/marian,emjotde/amunn,emjotde/amunn,marian-nmt/marian-train,emjotde/amunmt,emjotde/amunn,emjotde/Marian,emjotde/Marian
|
from __future__ import print_function, unicode_literals, division
import sys
import time
import argparse
from websocket import create_connection
def translate(batch, port=8080):
ws = create_connection("ws://localhost:{}/translate".format(port))
#print(batch.rstrip())
ws.send(batch)
result = ws.recv()
print(result.rstrip())
ws.close()
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument("-b", "--batch-size", type=int, default=1)
parser.add_argument("-p", "--port", type=int, default=8080)
return parser.parse_args()
if __name__ == "__main__":
args = parse_args()
count = 0
batch = ""
for line in sys.stdin:
count += 1
- batch += line
+ batch += line.decode('utf-8') if sys.version_info < (3, 0) else line
if count == args.batch_size:
translate(batch, port=args.port)
count = 0
batch = ""
if count:
translate(batch, port=args.port)
|
Fix decoding error with python2
|
## Code Before:
from __future__ import print_function, unicode_literals, division
import sys
import time
import argparse
from websocket import create_connection
def translate(batch, port=8080):
ws = create_connection("ws://localhost:{}/translate".format(port))
#print(batch.rstrip())
ws.send(batch)
result = ws.recv()
print(result.rstrip())
ws.close()
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument("-b", "--batch-size", type=int, default=1)
parser.add_argument("-p", "--port", type=int, default=8080)
return parser.parse_args()
if __name__ == "__main__":
args = parse_args()
count = 0
batch = ""
for line in sys.stdin:
count += 1
batch += line
if count == args.batch_size:
translate(batch, port=args.port)
count = 0
batch = ""
if count:
translate(batch, port=args.port)
## Instruction:
Fix decoding error with python2
## Code After:
from __future__ import print_function, unicode_literals, division
import sys
import time
import argparse
from websocket import create_connection
def translate(batch, port=8080):
ws = create_connection("ws://localhost:{}/translate".format(port))
#print(batch.rstrip())
ws.send(batch)
result = ws.recv()
print(result.rstrip())
ws.close()
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument("-b", "--batch-size", type=int, default=1)
parser.add_argument("-p", "--port", type=int, default=8080)
return parser.parse_args()
if __name__ == "__main__":
args = parse_args()
count = 0
batch = ""
for line in sys.stdin:
count += 1
batch += line.decode('utf-8') if sys.version_info < (3, 0) else line
if count == args.batch_size:
translate(batch, port=args.port)
count = 0
batch = ""
if count:
translate(batch, port=args.port)
|
# ... existing code ...
batch = ""
for line in sys.stdin:
count += 1
batch += line.decode('utf-8') if sys.version_info < (3, 0) else line
if count == args.batch_size:
translate(batch, port=args.port)
count = 0
# ... rest of the code ...
|
749aa35a85b6482cfba9dec7d37473a787d73c32
|
integration-test/1106-merge-ocean-earth.py
|
integration-test/1106-merge-ocean-earth.py
|
assert_less_than_n_features(9, 167, 186, 'water', {'kind': 'ocean'}, 2)
# There should be a single (merged) earth feature in this tile
assert_less_than_n_features(9, 170, 186, 'earth', {'kind': 'earth'}, 2)
|
assert_less_than_n_features(5, 11, 11, 'water', {'kind': 'ocean'}, 2)
assert_less_than_n_features(5, 8, 11, 'earth', {'kind': 'earth'}, 2)
# OpenStreetMap
assert_less_than_n_features(9, 167, 186, 'water', {'kind': 'ocean'}, 2)
assert_less_than_n_features(9, 170, 186, 'earth', {'kind': 'earth'}, 2)
|
Add lowzoom tests for polygon merging
|
Add lowzoom tests for polygon merging
|
Python
|
mit
|
mapzen/vector-datasource,mapzen/vector-datasource,mapzen/vector-datasource
|
+ assert_less_than_n_features(5, 11, 11, 'water', {'kind': 'ocean'}, 2)
+ assert_less_than_n_features(5, 8, 11, 'earth', {'kind': 'earth'}, 2)
+
+ # OpenStreetMap
assert_less_than_n_features(9, 167, 186, 'water', {'kind': 'ocean'}, 2)
- # There should be a single (merged) earth feature in this tile
assert_less_than_n_features(9, 170, 186, 'earth', {'kind': 'earth'}, 2)
|
Add lowzoom tests for polygon merging
|
## Code Before:
assert_less_than_n_features(9, 167, 186, 'water', {'kind': 'ocean'}, 2)
# There should be a single (merged) earth feature in this tile
assert_less_than_n_features(9, 170, 186, 'earth', {'kind': 'earth'}, 2)
## Instruction:
Add lowzoom tests for polygon merging
## Code After:
assert_less_than_n_features(5, 11, 11, 'water', {'kind': 'ocean'}, 2)
assert_less_than_n_features(5, 8, 11, 'earth', {'kind': 'earth'}, 2)
# OpenStreetMap
assert_less_than_n_features(9, 167, 186, 'water', {'kind': 'ocean'}, 2)
assert_less_than_n_features(9, 170, 186, 'earth', {'kind': 'earth'}, 2)
|
# ... existing code ...
assert_less_than_n_features(5, 11, 11, 'water', {'kind': 'ocean'}, 2)
assert_less_than_n_features(5, 8, 11, 'earth', {'kind': 'earth'}, 2)
# OpenStreetMap
assert_less_than_n_features(9, 167, 186, 'water', {'kind': 'ocean'}, 2)
assert_less_than_n_features(9, 170, 186, 'earth', {'kind': 'earth'}, 2)
# ... rest of the code ...
|
f29a6b205a872d7df63e8c45b5829959c98de227
|
comics/comics/pcweenies.py
|
comics/comics/pcweenies.py
|
from comics.aggregator.crawler import CrawlerBase, CrawlerResult
from comics.meta.base import MetaBase
class Meta(MetaBase):
name = 'The PC Weenies'
language = 'en'
url = 'http://www.pcweenies.com/'
start_date = '1998-10-21'
rights = 'Krishna M. Sadasivam'
class Crawler(CrawlerBase):
history_capable_days = 10
schedule = 'Mo,We,Fr'
time_zone = -8
def crawl(self, pub_date):
feed = self.parse_feed('http://www.pcweenies.com/feed/')
for entry in feed.for_date(pub_date):
if 'Comic' in entry.tags:
title = entry.title
url = entry.content0.src(u'img')
return CrawlerResult(url, title)
|
from comics.aggregator.crawler import CrawlerBase, CrawlerResult
from comics.meta.base import MetaBase
class Meta(MetaBase):
name = 'The PC Weenies'
language = 'en'
url = 'http://www.pcweenies.com/'
start_date = '1998-10-21'
rights = 'Krishna M. Sadasivam'
class Crawler(CrawlerBase):
history_capable_days = 10
schedule = 'Mo,We,Fr'
time_zone = -8
def crawl(self, pub_date):
feed = self.parse_feed('http://www.pcweenies.com/feed/')
for entry in feed.for_date(pub_date):
if 'Comic' in entry.tags:
title = entry.title
url = entry.content0.src(u'img[src*="/comics/"]')
return CrawlerResult(url, title)
|
Update CSS selector which matched two img elements
|
Update CSS selector which matched two img elements
|
Python
|
agpl-3.0
|
klette/comics,jodal/comics,jodal/comics,datagutten/comics,jodal/comics,datagutten/comics,jodal/comics,klette/comics,klette/comics,datagutten/comics,datagutten/comics
|
from comics.aggregator.crawler import CrawlerBase, CrawlerResult
from comics.meta.base import MetaBase
class Meta(MetaBase):
name = 'The PC Weenies'
language = 'en'
url = 'http://www.pcweenies.com/'
start_date = '1998-10-21'
rights = 'Krishna M. Sadasivam'
class Crawler(CrawlerBase):
history_capable_days = 10
schedule = 'Mo,We,Fr'
time_zone = -8
def crawl(self, pub_date):
feed = self.parse_feed('http://www.pcweenies.com/feed/')
for entry in feed.for_date(pub_date):
if 'Comic' in entry.tags:
title = entry.title
- url = entry.content0.src(u'img')
+ url = entry.content0.src(u'img[src*="/comics/"]')
return CrawlerResult(url, title)
|
Update CSS selector which matched two img elements
|
## Code Before:
from comics.aggregator.crawler import CrawlerBase, CrawlerResult
from comics.meta.base import MetaBase
class Meta(MetaBase):
name = 'The PC Weenies'
language = 'en'
url = 'http://www.pcweenies.com/'
start_date = '1998-10-21'
rights = 'Krishna M. Sadasivam'
class Crawler(CrawlerBase):
history_capable_days = 10
schedule = 'Mo,We,Fr'
time_zone = -8
def crawl(self, pub_date):
feed = self.parse_feed('http://www.pcweenies.com/feed/')
for entry in feed.for_date(pub_date):
if 'Comic' in entry.tags:
title = entry.title
url = entry.content0.src(u'img')
return CrawlerResult(url, title)
## Instruction:
Update CSS selector which matched two img elements
## Code After:
from comics.aggregator.crawler import CrawlerBase, CrawlerResult
from comics.meta.base import MetaBase
class Meta(MetaBase):
name = 'The PC Weenies'
language = 'en'
url = 'http://www.pcweenies.com/'
start_date = '1998-10-21'
rights = 'Krishna M. Sadasivam'
class Crawler(CrawlerBase):
history_capable_days = 10
schedule = 'Mo,We,Fr'
time_zone = -8
def crawl(self, pub_date):
feed = self.parse_feed('http://www.pcweenies.com/feed/')
for entry in feed.for_date(pub_date):
if 'Comic' in entry.tags:
title = entry.title
url = entry.content0.src(u'img[src*="/comics/"]')
return CrawlerResult(url, title)
|
// ... existing code ...
for entry in feed.for_date(pub_date):
if 'Comic' in entry.tags:
title = entry.title
url = entry.content0.src(u'img[src*="/comics/"]')
return CrawlerResult(url, title)
// ... rest of the code ...
|
a40a925c29b04b1b6822566e72db4afa5552479c
|
pygame/_error.py
|
pygame/_error.py
|
from pygame._sdl import sdl, ffi
class SDLError(Exception):
"""SDL error."""
@classmethod
def from_sdl_error(cls):
return cls(ffi.string(sdl.SDL_GetError()))
def unpack_rect(rect):
"""Unpack the size and raise a type error if needed."""
if (not hasattr(rect, '__iter__') or
len(rect) != 2 or
not isinstance(rect[0], int) or
not isinstance(rect[1], int)):
raise TypeError("expected tuple of two integers but got %r"
% type(rect))
return rect
def get_error():
err = ffi.string(sdl.SDL_GetError())
if not isinstance(err, str):
return err.decode('utf8')
return err
def set_error(errmsg):
if not isinstance(errmsg, bytes):
errmsg = errmsg.encode('utf8')
sdl.SDL_SetError(errmsg)
|
from pygame._sdl import sdl, ffi
from numbers import Number
class SDLError(Exception):
"""SDL error."""
@classmethod
def from_sdl_error(cls):
return cls(ffi.string(sdl.SDL_GetError()))
def unpack_rect(rect):
"""Unpack the size and raise a type error if needed."""
# This is as liberal as pygame when used for pygame.surface, but
# more liberal for pygame.display. I don't think the inconsistency
# matters
if (not hasattr(rect, '__iter__') or
len(rect) != 2 or
not isinstance(rect[0], Number) or
not isinstance(rect[1], Number)):
raise TypeError("expected tuple of two integers but got %r"
% type(rect))
# We'll throw a conversion TypeError here if someone is using a
# complex number, but so does pygame.
return int(rect[0]), int(rect[1])
def get_error():
err = ffi.string(sdl.SDL_GetError())
if not isinstance(err, str):
return err.decode('utf8')
return err
def set_error(errmsg):
if not isinstance(errmsg, bytes):
errmsg = errmsg.encode('utf8')
sdl.SDL_SetError(errmsg)
|
Support arbitary numeric types for creating pygame surfaces
|
Support arbitary numeric types for creating pygame surfaces
|
Python
|
lgpl-2.1
|
CTPUG/pygame_cffi,CTPUG/pygame_cffi,CTPUG/pygame_cffi
|
from pygame._sdl import sdl, ffi
+ from numbers import Number
class SDLError(Exception):
"""SDL error."""
@classmethod
def from_sdl_error(cls):
return cls(ffi.string(sdl.SDL_GetError()))
def unpack_rect(rect):
"""Unpack the size and raise a type error if needed."""
+ # This is as liberal as pygame when used for pygame.surface, but
+ # more liberal for pygame.display. I don't think the inconsistency
+ # matters
if (not hasattr(rect, '__iter__') or
len(rect) != 2 or
- not isinstance(rect[0], int) or
+ not isinstance(rect[0], Number) or
- not isinstance(rect[1], int)):
+ not isinstance(rect[1], Number)):
raise TypeError("expected tuple of two integers but got %r"
% type(rect))
- return rect
+ # We'll throw a conversion TypeError here if someone is using a
+ # complex number, but so does pygame.
+ return int(rect[0]), int(rect[1])
def get_error():
err = ffi.string(sdl.SDL_GetError())
if not isinstance(err, str):
return err.decode('utf8')
return err
def set_error(errmsg):
if not isinstance(errmsg, bytes):
errmsg = errmsg.encode('utf8')
sdl.SDL_SetError(errmsg)
|
Support arbitary numeric types for creating pygame surfaces
|
## Code Before:
from pygame._sdl import sdl, ffi
class SDLError(Exception):
"""SDL error."""
@classmethod
def from_sdl_error(cls):
return cls(ffi.string(sdl.SDL_GetError()))
def unpack_rect(rect):
"""Unpack the size and raise a type error if needed."""
if (not hasattr(rect, '__iter__') or
len(rect) != 2 or
not isinstance(rect[0], int) or
not isinstance(rect[1], int)):
raise TypeError("expected tuple of two integers but got %r"
% type(rect))
return rect
def get_error():
err = ffi.string(sdl.SDL_GetError())
if not isinstance(err, str):
return err.decode('utf8')
return err
def set_error(errmsg):
if not isinstance(errmsg, bytes):
errmsg = errmsg.encode('utf8')
sdl.SDL_SetError(errmsg)
## Instruction:
Support arbitary numeric types for creating pygame surfaces
## Code After:
from pygame._sdl import sdl, ffi
from numbers import Number
class SDLError(Exception):
"""SDL error."""
@classmethod
def from_sdl_error(cls):
return cls(ffi.string(sdl.SDL_GetError()))
def unpack_rect(rect):
"""Unpack the size and raise a type error if needed."""
# This is as liberal as pygame when used for pygame.surface, but
# more liberal for pygame.display. I don't think the inconsistency
# matters
if (not hasattr(rect, '__iter__') or
len(rect) != 2 or
not isinstance(rect[0], Number) or
not isinstance(rect[1], Number)):
raise TypeError("expected tuple of two integers but got %r"
% type(rect))
# We'll throw a conversion TypeError here if someone is using a
# complex number, but so does pygame.
return int(rect[0]), int(rect[1])
def get_error():
err = ffi.string(sdl.SDL_GetError())
if not isinstance(err, str):
return err.decode('utf8')
return err
def set_error(errmsg):
if not isinstance(errmsg, bytes):
errmsg = errmsg.encode('utf8')
sdl.SDL_SetError(errmsg)
|
# ... existing code ...
from pygame._sdl import sdl, ffi
from numbers import Number
class SDLError(Exception):
# ... modified code ...
def unpack_rect(rect):
"""Unpack the size and raise a type error if needed."""
# This is as liberal as pygame when used for pygame.surface, but
# more liberal for pygame.display. I don't think the inconsistency
# matters
if (not hasattr(rect, '__iter__') or
len(rect) != 2 or
not isinstance(rect[0], Number) or
not isinstance(rect[1], Number)):
raise TypeError("expected tuple of two integers but got %r"
% type(rect))
# We'll throw a conversion TypeError here if someone is using a
# complex number, but so does pygame.
return int(rect[0]), int(rect[1])
def get_error():
# ... rest of the code ...
|
c5eb64cda6972df0e96a9f3dc9e776386ef50a78
|
examples/hello_world.py
|
examples/hello_world.py
|
from __future__ import print_function
import os
from binascii import hexlify
import cantools
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
MOTOHAWK_PATH = os.path.join(SCRIPT_DIR,
'..',
'tests',
'files',
'motohawk.dbc')
database = cantools.db.load_file('tests/files/motohawk.dbc')
message = {
'Temperature': 250.1,
'AverageRadius': 3.2,
'Enable': 'Enabled'
}
encoded = database.encode_message('ExampleMessage', message)
decoded = database.decode_message('ExampleMessage', encoded)
print('Message:', message)
print('Encoded:', hexlify(encoded).decode('ascii'))
print('Decoded:', decoded)
|
from __future__ import print_function
import os
from binascii import hexlify
import cantools
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
MOTOHAWK_PATH = os.path.join(SCRIPT_DIR,
'..',
'tests',
'files',
'motohawk.dbc')
database = cantools.db.load_file(MOTOHAWK_PATH)
message = {
'Temperature': 250.1,
'AverageRadius': 3.2,
'Enable': 'Enabled'
}
encoded = database.encode_message('ExampleMessage', message)
decoded = database.decode_message('ExampleMessage', encoded)
print('Message:', message)
print('Encoded:', hexlify(encoded).decode('ascii'))
print('Decoded:', decoded)
|
Correct DBC file path in hello world example.
|
Correct DBC file path in hello world example.
|
Python
|
mit
|
cantools/cantools,eerimoq/cantools
|
from __future__ import print_function
import os
from binascii import hexlify
import cantools
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
MOTOHAWK_PATH = os.path.join(SCRIPT_DIR,
'..',
'tests',
'files',
'motohawk.dbc')
- database = cantools.db.load_file('tests/files/motohawk.dbc')
+ database = cantools.db.load_file(MOTOHAWK_PATH)
message = {
'Temperature': 250.1,
'AverageRadius': 3.2,
'Enable': 'Enabled'
}
encoded = database.encode_message('ExampleMessage', message)
decoded = database.decode_message('ExampleMessage', encoded)
print('Message:', message)
print('Encoded:', hexlify(encoded).decode('ascii'))
print('Decoded:', decoded)
|
Correct DBC file path in hello world example.
|
## Code Before:
from __future__ import print_function
import os
from binascii import hexlify
import cantools
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
MOTOHAWK_PATH = os.path.join(SCRIPT_DIR,
'..',
'tests',
'files',
'motohawk.dbc')
database = cantools.db.load_file('tests/files/motohawk.dbc')
message = {
'Temperature': 250.1,
'AverageRadius': 3.2,
'Enable': 'Enabled'
}
encoded = database.encode_message('ExampleMessage', message)
decoded = database.decode_message('ExampleMessage', encoded)
print('Message:', message)
print('Encoded:', hexlify(encoded).decode('ascii'))
print('Decoded:', decoded)
## Instruction:
Correct DBC file path in hello world example.
## Code After:
from __future__ import print_function
import os
from binascii import hexlify
import cantools
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
MOTOHAWK_PATH = os.path.join(SCRIPT_DIR,
'..',
'tests',
'files',
'motohawk.dbc')
database = cantools.db.load_file(MOTOHAWK_PATH)
message = {
'Temperature': 250.1,
'AverageRadius': 3.2,
'Enable': 'Enabled'
}
encoded = database.encode_message('ExampleMessage', message)
decoded = database.decode_message('ExampleMessage', encoded)
print('Message:', message)
print('Encoded:', hexlify(encoded).decode('ascii'))
print('Decoded:', decoded)
|
...
'files',
'motohawk.dbc')
database = cantools.db.load_file(MOTOHAWK_PATH)
message = {
'Temperature': 250.1,
...
|
ad6d981cfbb9af0b02b40346548eb37631538016
|
poradnia/users/migrations/0007_migrate_avatars.py
|
poradnia/users/migrations/0007_migrate_avatars.py
|
from __future__ import unicode_literals
from django.db import models, migrations
def migrate_avatar(apps, schema_editor):
Avatar = apps.get_model("avatar", "Avatar")
for avatar in Avatar.objects.filter(primary=True).all():
avatar.user.picture = avatar.avatar
avatar.user.save()
avatar.save()
class Migration(migrations.Migration):
dependencies = [
('users', '0006_user_picture'),
('avatar', '0001_initial')
]
operations = [
migrations.RunPython(migrate_avatar)
]
|
from __future__ import unicode_literals
from django.conf import settings
from django.db import models, migrations
if 'avatar' in settings.INSTALLED_APPS:
def migrate_avatar(apps, schema_editor):
Avatar = apps.get_model("avatar", "Avatar")
for avatar in Avatar.objects.filter(primary=True).all():
avatar.user.picture = avatar.avatar
avatar.user.save()
avatar.save()
class Migration(migrations.Migration):
dependencies = [
('users', '0006_user_picture'),
('avatar', '0001_initial')
]
operations = [
migrations.RunPython(migrate_avatar)
]
else:
class Migration(migrations.Migration):
dependencies = [('users', '0006_user_picture'), ]
operations = []
|
Fix migrations after django-avatar drop
|
Fix migrations after django-avatar drop
|
Python
|
mit
|
watchdogpolska/poradnia.siecobywatelska.pl,watchdogpolska/poradnia.siecobywatelska.pl,rwakulszowa/poradnia,rwakulszowa/poradnia,watchdogpolska/poradnia.siecobywatelska.pl,watchdogpolska/poradnia,watchdogpolska/poradnia,rwakulszowa/poradnia,rwakulszowa/poradnia,watchdogpolska/poradnia,watchdogpolska/poradnia
|
from __future__ import unicode_literals
-
+ from django.conf import settings
from django.db import models, migrations
+ if 'avatar' in settings.INSTALLED_APPS:
- def migrate_avatar(apps, schema_editor):
+ def migrate_avatar(apps, schema_editor):
- Avatar = apps.get_model("avatar", "Avatar")
+ Avatar = apps.get_model("avatar", "Avatar")
- for avatar in Avatar.objects.filter(primary=True).all():
+ for avatar in Avatar.objects.filter(primary=True).all():
- avatar.user.picture = avatar.avatar
+ avatar.user.picture = avatar.avatar
- avatar.user.save()
+ avatar.user.save()
- avatar.save()
+ avatar.save()
+ class Migration(migrations.Migration):
+ dependencies = [
+ ('users', '0006_user_picture'),
+ ('avatar', '0001_initial')
+ ]
+ operations = [
+ migrations.RunPython(migrate_avatar)
- class Migration(migrations.Migration):
- dependencies = [
- ('users', '0006_user_picture'),
- ('avatar', '0001_initial')
- ]
+ ]
+ else:
+ class Migration(migrations.Migration):
+ dependencies = [('users', '0006_user_picture'), ]
- operations = [
+ operations = []
- migrations.RunPython(migrate_avatar)
- ]
-
|
Fix migrations after django-avatar drop
|
## Code Before:
from __future__ import unicode_literals
from django.db import models, migrations
def migrate_avatar(apps, schema_editor):
Avatar = apps.get_model("avatar", "Avatar")
for avatar in Avatar.objects.filter(primary=True).all():
avatar.user.picture = avatar.avatar
avatar.user.save()
avatar.save()
class Migration(migrations.Migration):
dependencies = [
('users', '0006_user_picture'),
('avatar', '0001_initial')
]
operations = [
migrations.RunPython(migrate_avatar)
]
## Instruction:
Fix migrations after django-avatar drop
## Code After:
from __future__ import unicode_literals
from django.conf import settings
from django.db import models, migrations
if 'avatar' in settings.INSTALLED_APPS:
def migrate_avatar(apps, schema_editor):
Avatar = apps.get_model("avatar", "Avatar")
for avatar in Avatar.objects.filter(primary=True).all():
avatar.user.picture = avatar.avatar
avatar.user.save()
avatar.save()
class Migration(migrations.Migration):
dependencies = [
('users', '0006_user_picture'),
('avatar', '0001_initial')
]
operations = [
migrations.RunPython(migrate_avatar)
]
else:
class Migration(migrations.Migration):
dependencies = [('users', '0006_user_picture'), ]
operations = []
|
...
from __future__ import unicode_literals
from django.conf import settings
from django.db import models, migrations
if 'avatar' in settings.INSTALLED_APPS:
def migrate_avatar(apps, schema_editor):
Avatar = apps.get_model("avatar", "Avatar")
for avatar in Avatar.objects.filter(primary=True).all():
avatar.user.picture = avatar.avatar
avatar.user.save()
avatar.save()
class Migration(migrations.Migration):
dependencies = [
('users', '0006_user_picture'),
('avatar', '0001_initial')
]
operations = [
migrations.RunPython(migrate_avatar)
]
else:
class Migration(migrations.Migration):
dependencies = [('users', '0006_user_picture'), ]
operations = []
...
|
7bee444eeb17ec956478e999db47338fdf201411
|
querylist/tests/querylist_list_tests.py
|
querylist/tests/querylist_list_tests.py
|
import unittest2
from querylist import QueryList
class QueryListActsAsList(unittest2.TestCase):
"""QueryList should behave as lists behave"""
def setUp(self):
self.src_list = [{'foo': 1}, {'foo': 2}, {'foo': 3}]
self.query_list = QueryList(self.src_list)
def test_QueryList_items_are_equal_to_its_source_lists_items(self):
self.assertEqual(self.src_list, self.query_list)
def test_QueryList_length_is_equal_to_its_source_lists_length(self):
self.assertEqual(len(self.src_list), len(self.query_list))
def test_QueryLists_can_append_like_lists(self):
dbl_list = self.src_list + self.src_list
dbl_query_list = self.query_list + self.query_list
self.assertEqual(dbl_query_list, dbl_list)
|
import unittest2
from querylist import QueryList
class QueryListActsAsList(unittest2.TestCase):
"""QueryLists should act just like lists"""
def setUp(self):
self.src_list = [{'foo': 1}, {'foo': 2}, {'foo': 3}]
self.query_list = QueryList(self.src_list)
def test_QueryList_items_are_equal_to_its_source_lists_items(self):
self.assertEqual(self.src_list, self.query_list)
def test_QueryList_length_is_equal_to_its_source_lists_length(self):
self.assertEqual(len(self.src_list), len(self.query_list))
def test_QueryLists_can_append_like_lists(self):
dbl_list = self.src_list + self.src_list
dbl_query_list = self.query_list + self.query_list
self.assertEqual(dbl_query_list, dbl_list)
def test_QueryList_slicing_works_like_list_slicing(self):
self.assertEqual(self.query_list[:2], self.src_list[:2])
def test_QueryList_indexing_works_like_list_indexing(self):
self.assertEqual(self.query_list[1], self.src_list[1])
|
Add some more acts as list tests.
|
Add some more acts as list tests.
|
Python
|
mit
|
thomasw/querylist,zoidbergwill/querylist
|
import unittest2
from querylist import QueryList
class QueryListActsAsList(unittest2.TestCase):
- """QueryList should behave as lists behave"""
+ """QueryLists should act just like lists"""
def setUp(self):
self.src_list = [{'foo': 1}, {'foo': 2}, {'foo': 3}]
self.query_list = QueryList(self.src_list)
def test_QueryList_items_are_equal_to_its_source_lists_items(self):
self.assertEqual(self.src_list, self.query_list)
def test_QueryList_length_is_equal_to_its_source_lists_length(self):
self.assertEqual(len(self.src_list), len(self.query_list))
def test_QueryLists_can_append_like_lists(self):
dbl_list = self.src_list + self.src_list
dbl_query_list = self.query_list + self.query_list
self.assertEqual(dbl_query_list, dbl_list)
+
+ def test_QueryList_slicing_works_like_list_slicing(self):
+ self.assertEqual(self.query_list[:2], self.src_list[:2])
+
+ def test_QueryList_indexing_works_like_list_indexing(self):
+ self.assertEqual(self.query_list[1], self.src_list[1])
+
|
Add some more acts as list tests.
|
## Code Before:
import unittest2
from querylist import QueryList
class QueryListActsAsList(unittest2.TestCase):
"""QueryList should behave as lists behave"""
def setUp(self):
self.src_list = [{'foo': 1}, {'foo': 2}, {'foo': 3}]
self.query_list = QueryList(self.src_list)
def test_QueryList_items_are_equal_to_its_source_lists_items(self):
self.assertEqual(self.src_list, self.query_list)
def test_QueryList_length_is_equal_to_its_source_lists_length(self):
self.assertEqual(len(self.src_list), len(self.query_list))
def test_QueryLists_can_append_like_lists(self):
dbl_list = self.src_list + self.src_list
dbl_query_list = self.query_list + self.query_list
self.assertEqual(dbl_query_list, dbl_list)
## Instruction:
Add some more acts as list tests.
## Code After:
import unittest2
from querylist import QueryList
class QueryListActsAsList(unittest2.TestCase):
"""QueryLists should act just like lists"""
def setUp(self):
self.src_list = [{'foo': 1}, {'foo': 2}, {'foo': 3}]
self.query_list = QueryList(self.src_list)
def test_QueryList_items_are_equal_to_its_source_lists_items(self):
self.assertEqual(self.src_list, self.query_list)
def test_QueryList_length_is_equal_to_its_source_lists_length(self):
self.assertEqual(len(self.src_list), len(self.query_list))
def test_QueryLists_can_append_like_lists(self):
dbl_list = self.src_list + self.src_list
dbl_query_list = self.query_list + self.query_list
self.assertEqual(dbl_query_list, dbl_list)
def test_QueryList_slicing_works_like_list_slicing(self):
self.assertEqual(self.query_list[:2], self.src_list[:2])
def test_QueryList_indexing_works_like_list_indexing(self):
self.assertEqual(self.query_list[1], self.src_list[1])
|
...
from querylist import QueryList
class QueryListActsAsList(unittest2.TestCase):
"""QueryLists should act just like lists"""
def setUp(self):
self.src_list = [{'foo': 1}, {'foo': 2}, {'foo': 3}]
self.query_list = QueryList(self.src_list)
...
dbl_query_list = self.query_list + self.query_list
self.assertEqual(dbl_query_list, dbl_list)
def test_QueryList_slicing_works_like_list_slicing(self):
self.assertEqual(self.query_list[:2], self.src_list[:2])
def test_QueryList_indexing_works_like_list_indexing(self):
self.assertEqual(self.query_list[1], self.src_list[1])
...
|
d9b46a4d06bf6832aa5dbb394ae97325e0578400
|
survey/tests/test_default_settings.py
|
survey/tests/test_default_settings.py
|
from survey.tests import BaseTest
from django.test import override_settings
from django.conf import settings
from django.test import tag
from survey import set_default_settings
@tag("set")
@override_settings()
class TestDefaultSettings(BaseTest):
def test_set_choices_separator(self):
url = "/admin/survey/survey/1/change/"
del settings.CHOICES_SEPARATOR
self.login()
with self.assertRaises(AttributeError):
self.client.get(url)
set_default_settings()
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
|
from survey.tests import BaseTest
from django.test import override_settings
from django.conf import settings
from survey import set_default_settings
from survey.exporter.tex.survey2tex import Survey2Tex
@override_settings()
class TestDefaultSettings(BaseTest):
def test_set_choices_separator(self):
url = "/admin/survey/survey/1/change/"
del settings.CHOICES_SEPARATOR
self.login()
set_default_settings()
try:
self.client.get(url)
except AttributeError:
self.fail("AttributeError: survey failed to set CHOICES_SEPARATOR")
def test_set_root(self):
del settings.ROOT
set_default_settings()
try:
Survey2Tex.generate(self, "/")
except AttributeError:
self.fail("AttributeError: survey failed to set ROOT")
|
Add - Test for setting ROOT
|
Add - Test for setting ROOT
|
Python
|
agpl-3.0
|
Pierre-Sassoulas/django-survey,Pierre-Sassoulas/django-survey,Pierre-Sassoulas/django-survey
|
from survey.tests import BaseTest
from django.test import override_settings
from django.conf import settings
- from django.test import tag
from survey import set_default_settings
+ from survey.exporter.tex.survey2tex import Survey2Tex
- @tag("set")
@override_settings()
class TestDefaultSettings(BaseTest):
def test_set_choices_separator(self):
url = "/admin/survey/survey/1/change/"
del settings.CHOICES_SEPARATOR
self.login()
- with self.assertRaises(AttributeError):
+ set_default_settings()
+ try:
self.client.get(url)
+ except AttributeError:
+ self.fail("AttributeError: survey failed to set CHOICES_SEPARATOR")
+
+ def test_set_root(self):
+ del settings.ROOT
set_default_settings()
- response = self.client.get(url)
- self.assertEqual(response.status_code, 200)
+ try:
+ Survey2Tex.generate(self, "/")
+ except AttributeError:
+ self.fail("AttributeError: survey failed to set ROOT")
|
Add - Test for setting ROOT
|
## Code Before:
from survey.tests import BaseTest
from django.test import override_settings
from django.conf import settings
from django.test import tag
from survey import set_default_settings
@tag("set")
@override_settings()
class TestDefaultSettings(BaseTest):
def test_set_choices_separator(self):
url = "/admin/survey/survey/1/change/"
del settings.CHOICES_SEPARATOR
self.login()
with self.assertRaises(AttributeError):
self.client.get(url)
set_default_settings()
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
## Instruction:
Add - Test for setting ROOT
## Code After:
from survey.tests import BaseTest
from django.test import override_settings
from django.conf import settings
from survey import set_default_settings
from survey.exporter.tex.survey2tex import Survey2Tex
@override_settings()
class TestDefaultSettings(BaseTest):
def test_set_choices_separator(self):
url = "/admin/survey/survey/1/change/"
del settings.CHOICES_SEPARATOR
self.login()
set_default_settings()
try:
self.client.get(url)
except AttributeError:
self.fail("AttributeError: survey failed to set CHOICES_SEPARATOR")
def test_set_root(self):
del settings.ROOT
set_default_settings()
try:
Survey2Tex.generate(self, "/")
except AttributeError:
self.fail("AttributeError: survey failed to set ROOT")
|
# ... existing code ...
from survey.tests import BaseTest
from django.test import override_settings
from django.conf import settings
from survey import set_default_settings
from survey.exporter.tex.survey2tex import Survey2Tex
@override_settings()
class TestDefaultSettings(BaseTest):
def test_set_choices_separator(self):
# ... modified code ...
url = "/admin/survey/survey/1/change/"
del settings.CHOICES_SEPARATOR
self.login()
set_default_settings()
try:
self.client.get(url)
except AttributeError:
self.fail("AttributeError: survey failed to set CHOICES_SEPARATOR")
def test_set_root(self):
del settings.ROOT
set_default_settings()
try:
Survey2Tex.generate(self, "/")
except AttributeError:
self.fail("AttributeError: survey failed to set ROOT")
# ... rest of the code ...
|
962fc49f734b04e717bf936745013ab0c19c4ee1
|
utils.py
|
utils.py
|
import cv2
import itertools
import numpy as np
def partition(pred, iterable):
"""
Partition the iterable into two disjoint entries based
on the predicate.
@return: Tuple (iterable1, iterable2)
"""
iter1, iter2 = itertools.tee(iterable)
return itertools.filterfalse(pred, iter1), filter(pred, iter2)
def decay(val, min_val, decay_rate):
return max(val * decay_rate, min_val)
def one_hot(i, n):
"""
One-hot encoder. Returns a numpy array of length n with i-th entry
set to 1, and all others set to 0."
@return: numpy.array
"""
assert i < n, "Invalid args to one_hot"
enc = np.zeros(n)
enc[i] = 1
return enc
def resize_image(image, width, height):
"""
Resize the image screen to the configured width and height and
convert it to grayscale.
"""
grayscale = cv2.cvtColor(image, cv2.COLOR_RGB2GRAY)
return cv2.resize(grayscale, (width, height))
|
from six.moves import filterfalse
import cv2
import itertools
import numpy as np
def partition(pred, iterable):
"""
Partition the iterable into two disjoint entries based
on the predicate.
@return: Tuple (iterable1, iterable2)
"""
iter1, iter2 = itertools.tee(iterable)
return filterfalse(pred, iter1), filter(pred, iter2)
def decay(val, min_val, decay_rate):
return max(val * decay_rate, min_val)
def one_hot(i, n):
"""
One-hot encoder. Returns a numpy array of length n with i-th entry
set to 1, and all others set to 0."
@return: numpy.array
"""
assert i < n, "Invalid args to one_hot"
enc = np.zeros(n)
enc[i] = 1
return enc
def resize_image(image, width, height):
"""
Resize the image screen to the configured width and height and
convert it to grayscale.
"""
grayscale = cv2.cvtColor(image, cv2.COLOR_RGB2GRAY)
return cv2.resize(grayscale, (width, height))
|
Fix python 2.7 compatibility issue
|
Fix python 2.7 compatibility issue
|
Python
|
mit
|
viswanathgs/dist-dqn,viswanathgs/dist-dqn
|
+ from six.moves import filterfalse
+
import cv2
import itertools
import numpy as np
def partition(pred, iterable):
"""
Partition the iterable into two disjoint entries based
on the predicate.
@return: Tuple (iterable1, iterable2)
"""
iter1, iter2 = itertools.tee(iterable)
- return itertools.filterfalse(pred, iter1), filter(pred, iter2)
+ return filterfalse(pred, iter1), filter(pred, iter2)
def decay(val, min_val, decay_rate):
return max(val * decay_rate, min_val)
def one_hot(i, n):
"""
One-hot encoder. Returns a numpy array of length n with i-th entry
set to 1, and all others set to 0."
@return: numpy.array
"""
assert i < n, "Invalid args to one_hot"
enc = np.zeros(n)
enc[i] = 1
return enc
def resize_image(image, width, height):
"""
Resize the image screen to the configured width and height and
convert it to grayscale.
"""
grayscale = cv2.cvtColor(image, cv2.COLOR_RGB2GRAY)
return cv2.resize(grayscale, (width, height))
|
Fix python 2.7 compatibility issue
|
## Code Before:
import cv2
import itertools
import numpy as np
def partition(pred, iterable):
"""
Partition the iterable into two disjoint entries based
on the predicate.
@return: Tuple (iterable1, iterable2)
"""
iter1, iter2 = itertools.tee(iterable)
return itertools.filterfalse(pred, iter1), filter(pred, iter2)
def decay(val, min_val, decay_rate):
return max(val * decay_rate, min_val)
def one_hot(i, n):
"""
One-hot encoder. Returns a numpy array of length n with i-th entry
set to 1, and all others set to 0."
@return: numpy.array
"""
assert i < n, "Invalid args to one_hot"
enc = np.zeros(n)
enc[i] = 1
return enc
def resize_image(image, width, height):
"""
Resize the image screen to the configured width and height and
convert it to grayscale.
"""
grayscale = cv2.cvtColor(image, cv2.COLOR_RGB2GRAY)
return cv2.resize(grayscale, (width, height))
## Instruction:
Fix python 2.7 compatibility issue
## Code After:
from six.moves import filterfalse
import cv2
import itertools
import numpy as np
def partition(pred, iterable):
"""
Partition the iterable into two disjoint entries based
on the predicate.
@return: Tuple (iterable1, iterable2)
"""
iter1, iter2 = itertools.tee(iterable)
return filterfalse(pred, iter1), filter(pred, iter2)
def decay(val, min_val, decay_rate):
return max(val * decay_rate, min_val)
def one_hot(i, n):
"""
One-hot encoder. Returns a numpy array of length n with i-th entry
set to 1, and all others set to 0."
@return: numpy.array
"""
assert i < n, "Invalid args to one_hot"
enc = np.zeros(n)
enc[i] = 1
return enc
def resize_image(image, width, height):
"""
Resize the image screen to the configured width and height and
convert it to grayscale.
"""
grayscale = cv2.cvtColor(image, cv2.COLOR_RGB2GRAY)
return cv2.resize(grayscale, (width, height))
|
...
from six.moves import filterfalse
import cv2
import itertools
import numpy as np
...
@return: Tuple (iterable1, iterable2)
"""
iter1, iter2 = itertools.tee(iterable)
return filterfalse(pred, iter1), filter(pred, iter2)
def decay(val, min_val, decay_rate):
return max(val * decay_rate, min_val)
...
|
d5a1bfbff18cf129550c2c423beb8db9302c0736
|
tests/redisdl_test.py
|
tests/redisdl_test.py
|
import redisdl
import unittest
import json
import os.path
class RedisdlTest(unittest.TestCase):
def test_roundtrip(self):
path = os.path.join(os.path.dirname(__file__), 'fixtures', 'dump.json')
with open(path) as f:
dump = f.read()
redisdl.loads(dump)
redump = redisdl.dumps()
expected = json.loads(dump)
actual = json.loads(redump)
self.assertEqual(expected, actual)
|
import redisdl
import unittest
import json
import os.path
class RedisdlTest(unittest.TestCase):
def setUp(self):
import redis
self.r = redis.Redis()
for key in self.r.keys('*'):
self.r.delete(key)
def test_roundtrip(self):
path = os.path.join(os.path.dirname(__file__), 'fixtures', 'dump.json')
with open(path) as f:
dump = f.read()
redisdl.loads(dump)
redump = redisdl.dumps()
expected = json.loads(dump)
actual = json.loads(redump)
self.assertEqual(expected, actual)
|
Clear redis data store before running tests
|
Clear redis data store before running tests
|
Python
|
bsd-2-clause
|
hyunchel/redis-dump-load,p/redis-dump-load,hyunchel/redis-dump-load,p/redis-dump-load
|
import redisdl
import unittest
import json
import os.path
class RedisdlTest(unittest.TestCase):
+ def setUp(self):
+ import redis
+ self.r = redis.Redis()
+ for key in self.r.keys('*'):
+ self.r.delete(key)
+
def test_roundtrip(self):
path = os.path.join(os.path.dirname(__file__), 'fixtures', 'dump.json')
with open(path) as f:
dump = f.read()
redisdl.loads(dump)
redump = redisdl.dumps()
expected = json.loads(dump)
actual = json.loads(redump)
self.assertEqual(expected, actual)
|
Clear redis data store before running tests
|
## Code Before:
import redisdl
import unittest
import json
import os.path
class RedisdlTest(unittest.TestCase):
def test_roundtrip(self):
path = os.path.join(os.path.dirname(__file__), 'fixtures', 'dump.json')
with open(path) as f:
dump = f.read()
redisdl.loads(dump)
redump = redisdl.dumps()
expected = json.loads(dump)
actual = json.loads(redump)
self.assertEqual(expected, actual)
## Instruction:
Clear redis data store before running tests
## Code After:
import redisdl
import unittest
import json
import os.path
class RedisdlTest(unittest.TestCase):
def setUp(self):
import redis
self.r = redis.Redis()
for key in self.r.keys('*'):
self.r.delete(key)
def test_roundtrip(self):
path = os.path.join(os.path.dirname(__file__), 'fixtures', 'dump.json')
with open(path) as f:
dump = f.read()
redisdl.loads(dump)
redump = redisdl.dumps()
expected = json.loads(dump)
actual = json.loads(redump)
self.assertEqual(expected, actual)
|
# ... existing code ...
import os.path
class RedisdlTest(unittest.TestCase):
def setUp(self):
import redis
self.r = redis.Redis()
for key in self.r.keys('*'):
self.r.delete(key)
def test_roundtrip(self):
path = os.path.join(os.path.dirname(__file__), 'fixtures', 'dump.json')
with open(path) as f:
# ... rest of the code ...
|
8a34e665539b10a8e90c86f89a7e2d5881b36519
|
functional_tests.py
|
functional_tests.py
|
from selenium import webdriver
browser = webdriver.Firefox()
browser.get('http://localhost:8000')
assert 'Django' in browser.title
|
from selenium import webdriver
import unittest
class NewVisitorTest(unittest.TestCase):
def setUp(self):
self.browser = webdriver.Firefox()
self.browser.implicitly_wait(3)
def tearDown(self):
self.browser.quit()
def test_can_start_a_list_and_retrieve_it_later(self):
self.browser.get('http://localhost:8000')
# User notices the page title and header mention to-do lists
self.assertIn('To-Do', self.browser.title)
self.fail('Finish the test!')
# User is invited to enter a to-do item straight away
# User types "Buy peacock feathers"
# When user hits enter, the page updates, and now the page lists
# "1: Buy peacock feathers" as an item in a to-do list
# There is still a text box inviting the user to add another item.
# User enters "Use peacock feathers to make a fly"
# The page updates again, and now shows both items on their list
# User wonders whether the site will remember their list.
# Then user sees that the site has generated a unique URL for them
# -- there is some explanatory text to that effect.
# User visits that URL - their to-do list is still there.
# Satisfied, user goes back to sleep
if __name__ == '__main__':
unittest.main(warnings='ignore')
|
Add first FT spec comments
|
Add first FT spec comments
|
Python
|
mit
|
rodowi/remember-the-beer
|
from selenium import webdriver
+ import unittest
+ class NewVisitorTest(unittest.TestCase):
- browser = webdriver.Firefox()
- browser.get('http://localhost:8000')
- assert 'Django' in browser.title
+ def setUp(self):
+ self.browser = webdriver.Firefox()
+ self.browser.implicitly_wait(3)
+
+ def tearDown(self):
+ self.browser.quit()
+
+ def test_can_start_a_list_and_retrieve_it_later(self):
+ self.browser.get('http://localhost:8000')
+
+ # User notices the page title and header mention to-do lists
+ self.assertIn('To-Do', self.browser.title)
+ self.fail('Finish the test!')
+
+ # User is invited to enter a to-do item straight away
+
+ # User types "Buy peacock feathers"
+
+ # When user hits enter, the page updates, and now the page lists
+ # "1: Buy peacock feathers" as an item in a to-do list
+
+ # There is still a text box inviting the user to add another item.
+ # User enters "Use peacock feathers to make a fly"
+
+ # The page updates again, and now shows both items on their list
+
+ # User wonders whether the site will remember their list.
+ # Then user sees that the site has generated a unique URL for them
+ # -- there is some explanatory text to that effect.
+
+ # User visits that URL - their to-do list is still there.
+
+ # Satisfied, user goes back to sleep
+
+ if __name__ == '__main__':
+ unittest.main(warnings='ignore')
|
Add first FT spec comments
|
## Code Before:
from selenium import webdriver
browser = webdriver.Firefox()
browser.get('http://localhost:8000')
assert 'Django' in browser.title
## Instruction:
Add first FT spec comments
## Code After:
from selenium import webdriver
import unittest
class NewVisitorTest(unittest.TestCase):
def setUp(self):
self.browser = webdriver.Firefox()
self.browser.implicitly_wait(3)
def tearDown(self):
self.browser.quit()
def test_can_start_a_list_and_retrieve_it_later(self):
self.browser.get('http://localhost:8000')
# User notices the page title and header mention to-do lists
self.assertIn('To-Do', self.browser.title)
self.fail('Finish the test!')
# User is invited to enter a to-do item straight away
# User types "Buy peacock feathers"
# When user hits enter, the page updates, and now the page lists
# "1: Buy peacock feathers" as an item in a to-do list
# There is still a text box inviting the user to add another item.
# User enters "Use peacock feathers to make a fly"
# The page updates again, and now shows both items on their list
# User wonders whether the site will remember their list.
# Then user sees that the site has generated a unique URL for them
# -- there is some explanatory text to that effect.
# User visits that URL - their to-do list is still there.
# Satisfied, user goes back to sleep
if __name__ == '__main__':
unittest.main(warnings='ignore')
|
// ... existing code ...
from selenium import webdriver
import unittest
class NewVisitorTest(unittest.TestCase):
def setUp(self):
self.browser = webdriver.Firefox()
self.browser.implicitly_wait(3)
def tearDown(self):
self.browser.quit()
def test_can_start_a_list_and_retrieve_it_later(self):
self.browser.get('http://localhost:8000')
# User notices the page title and header mention to-do lists
self.assertIn('To-Do', self.browser.title)
self.fail('Finish the test!')
# User is invited to enter a to-do item straight away
# User types "Buy peacock feathers"
# When user hits enter, the page updates, and now the page lists
# "1: Buy peacock feathers" as an item in a to-do list
# There is still a text box inviting the user to add another item.
# User enters "Use peacock feathers to make a fly"
# The page updates again, and now shows both items on their list
# User wonders whether the site will remember their list.
# Then user sees that the site has generated a unique URL for them
# -- there is some explanatory text to that effect.
# User visits that URL - their to-do list is still there.
# Satisfied, user goes back to sleep
if __name__ == '__main__':
unittest.main(warnings='ignore')
// ... rest of the code ...
|
257addd4403ae17a79c955d9751d5f3072c2a020
|
nightreads/emails/views.py
|
nightreads/emails/views.py
|
from django.shortcuts import render, redirect
from django.views.generic import View
from django.core.urlresolvers import reverse
from django.contrib import messages
from .models import Email
from .forms import EmailAdminForm
from nightreads.user_manager.models import Subscription
class SendEmailAdminView(View):
template = 'admin/emails/email/send_email.html'
form_class = EmailAdminForm
def get(self, request, pk):
email_obj = Email.objects.get(pk=pk)
return render(request, self.template, {'email_obj': email_obj})
def post(self, request, pk):
email_type = request.POST.get('type', '').lower()
email_obj = Email.objects.get(pk=pk)
if email_type == 'preview':
# send preview email
m = 'Preview email has been sent!'
else:
# send email
m = 'Email has been sent!'
email_obj.is_sent = True
messages.add_message(request, messages.INFO, m)
return redirect(reverse(
'admin:emails_email_change', args=(email_obj.id,)))
class UpdateTargetCountView(View):
def get(self, request, pk):
email_obj = Email.objects.get(pk=pk)
email_obj.targetted_users = Subscription.objects.filter(
tags__in=email_obj.tags.all()).count()
email_obj.save()
return redirect(reverse(
'admin:emails_email_change', args=(email_obj.id,)))
|
from django.shortcuts import render, redirect
from django.views.generic import View
from django.core.urlresolvers import reverse
from django.contrib import messages
from .models import Email
from .forms import EmailAdminForm
from nightreads.user_manager.models import Subscription
class SendEmailAdminView(View):
template = 'admin/emails/email/send_email.html'
form_class = EmailAdminForm
def get(self, request, pk):
email_obj = Email.objects.get(pk=pk)
return render(request, self.template, {'email_obj': email_obj})
def post(self, request, pk):
email_type = request.POST.get('type', '').lower()
email_obj = Email.objects.get(pk=pk)
if email_type == 'preview':
# send preview email
m = 'Preview email has been sent!'
else:
# send email
m = 'Email has been sent!'
email_obj.is_sent = True
messages.add_message(request, messages.INFO, m)
return redirect(reverse(
'admin:emails_email_change', args=(email_obj.id,)))
class UpdateTargetCountView(View):
def get(self, request, pk):
email_obj = Email.objects.get(pk=pk)
email_obj.targetted_users = Subscription.objects.filter(
Q(tags__in=email_obj.tags.all()) | Q(tags__name='all')).count()
email_obj.save()
return redirect(reverse(
'admin:emails_email_change', args=(email_obj.id,)))
|
Update target count to consider users who subded to `all`
|
Update target count to consider users who subded to `all`
|
Python
|
mit
|
avinassh/nightreads,avinassh/nightreads
|
from django.shortcuts import render, redirect
from django.views.generic import View
from django.core.urlresolvers import reverse
from django.contrib import messages
from .models import Email
from .forms import EmailAdminForm
from nightreads.user_manager.models import Subscription
class SendEmailAdminView(View):
template = 'admin/emails/email/send_email.html'
form_class = EmailAdminForm
def get(self, request, pk):
email_obj = Email.objects.get(pk=pk)
return render(request, self.template, {'email_obj': email_obj})
def post(self, request, pk):
email_type = request.POST.get('type', '').lower()
email_obj = Email.objects.get(pk=pk)
if email_type == 'preview':
# send preview email
m = 'Preview email has been sent!'
else:
# send email
m = 'Email has been sent!'
email_obj.is_sent = True
messages.add_message(request, messages.INFO, m)
return redirect(reverse(
'admin:emails_email_change', args=(email_obj.id,)))
class UpdateTargetCountView(View):
def get(self, request, pk):
email_obj = Email.objects.get(pk=pk)
email_obj.targetted_users = Subscription.objects.filter(
- tags__in=email_obj.tags.all()).count()
+ Q(tags__in=email_obj.tags.all()) | Q(tags__name='all')).count()
email_obj.save()
return redirect(reverse(
'admin:emails_email_change', args=(email_obj.id,)))
|
Update target count to consider users who subded to `all`
|
## Code Before:
from django.shortcuts import render, redirect
from django.views.generic import View
from django.core.urlresolvers import reverse
from django.contrib import messages
from .models import Email
from .forms import EmailAdminForm
from nightreads.user_manager.models import Subscription
class SendEmailAdminView(View):
template = 'admin/emails/email/send_email.html'
form_class = EmailAdminForm
def get(self, request, pk):
email_obj = Email.objects.get(pk=pk)
return render(request, self.template, {'email_obj': email_obj})
def post(self, request, pk):
email_type = request.POST.get('type', '').lower()
email_obj = Email.objects.get(pk=pk)
if email_type == 'preview':
# send preview email
m = 'Preview email has been sent!'
else:
# send email
m = 'Email has been sent!'
email_obj.is_sent = True
messages.add_message(request, messages.INFO, m)
return redirect(reverse(
'admin:emails_email_change', args=(email_obj.id,)))
class UpdateTargetCountView(View):
def get(self, request, pk):
email_obj = Email.objects.get(pk=pk)
email_obj.targetted_users = Subscription.objects.filter(
tags__in=email_obj.tags.all()).count()
email_obj.save()
return redirect(reverse(
'admin:emails_email_change', args=(email_obj.id,)))
## Instruction:
Update target count to consider users who subded to `all`
## Code After:
from django.shortcuts import render, redirect
from django.views.generic import View
from django.core.urlresolvers import reverse
from django.contrib import messages
from .models import Email
from .forms import EmailAdminForm
from nightreads.user_manager.models import Subscription
class SendEmailAdminView(View):
template = 'admin/emails/email/send_email.html'
form_class = EmailAdminForm
def get(self, request, pk):
email_obj = Email.objects.get(pk=pk)
return render(request, self.template, {'email_obj': email_obj})
def post(self, request, pk):
email_type = request.POST.get('type', '').lower()
email_obj = Email.objects.get(pk=pk)
if email_type == 'preview':
# send preview email
m = 'Preview email has been sent!'
else:
# send email
m = 'Email has been sent!'
email_obj.is_sent = True
messages.add_message(request, messages.INFO, m)
return redirect(reverse(
'admin:emails_email_change', args=(email_obj.id,)))
class UpdateTargetCountView(View):
def get(self, request, pk):
email_obj = Email.objects.get(pk=pk)
email_obj.targetted_users = Subscription.objects.filter(
Q(tags__in=email_obj.tags.all()) | Q(tags__name='all')).count()
email_obj.save()
return redirect(reverse(
'admin:emails_email_change', args=(email_obj.id,)))
|
# ... existing code ...
def get(self, request, pk):
email_obj = Email.objects.get(pk=pk)
email_obj.targetted_users = Subscription.objects.filter(
Q(tags__in=email_obj.tags.all()) | Q(tags__name='all')).count()
email_obj.save()
return redirect(reverse(
'admin:emails_email_change', args=(email_obj.id,)))
# ... rest of the code ...
|
cc92b1770acdc5a34eb32c596c0b2ece6bf32b0f
|
qiprofile_rest/server/settings.py
|
qiprofile_rest/server/settings.py
|
import os
# The run environment default is production.
# Modify this by setting the NODE_ENV environment variable.
env = os.getenv('NODE_ENV') or 'production'
# The MongoDB database.
if env == 'production':
MONGO_DBNAME = 'qiprofile'
else:
MONGO_DBNAME = 'qiprofile_test'
# Even though the domain is defined by the Eve MongoEngine
# adapter, a DOMAIN setting is required by Eve. This setting
# is only used to avoid an Eve complaint about a missing domain.
DOMAIN = {'eve-mongoengine': {}}
|
"""This ``settings`` file specifies the Eve configuration."""
import os
# The run environment default is production.
# Modify this by setting the NODE_ENV environment variable.
env = os.getenv('NODE_ENV') or 'production'
# The MongoDB database.
if env == 'production':
MONGO_DBNAME = 'qiprofile'
else:
MONGO_DBNAME = 'qiprofile_test'
# The MongoDB host default is localhost, but can be reset
# by the MONGO_HOST environment variable.
host = os.getenv('MONGO_HOST')
if host:
MONGO_HOST = host
# Even though the domain is defined by the Eve MongoEngine
# adapter, a DOMAIN setting is required by Eve. This setting
# is only used to avoid an Eve complaint about a missing domain.
DOMAIN = {'eve-mongoengine': {}}
|
Allow MONGO_HOST env var override.
|
Allow MONGO_HOST env var override.
|
Python
|
bsd-2-clause
|
ohsu-qin/qirest,ohsu-qin/qiprofile-rest
|
+ """This ``settings`` file specifies the Eve configuration."""
+
import os
# The run environment default is production.
# Modify this by setting the NODE_ENV environment variable.
env = os.getenv('NODE_ENV') or 'production'
# The MongoDB database.
if env == 'production':
- MONGO_DBNAME = 'qiprofile'
+ MONGO_DBNAME = 'qiprofile'
else:
- MONGO_DBNAME = 'qiprofile_test'
+ MONGO_DBNAME = 'qiprofile_test'
+
+ # The MongoDB host default is localhost, but can be reset
+ # by the MONGO_HOST environment variable.
+ host = os.getenv('MONGO_HOST')
+ if host:
+ MONGO_HOST = host
# Even though the domain is defined by the Eve MongoEngine
# adapter, a DOMAIN setting is required by Eve. This setting
# is only used to avoid an Eve complaint about a missing domain.
DOMAIN = {'eve-mongoengine': {}}
|
Allow MONGO_HOST env var override.
|
## Code Before:
import os
# The run environment default is production.
# Modify this by setting the NODE_ENV environment variable.
env = os.getenv('NODE_ENV') or 'production'
# The MongoDB database.
if env == 'production':
MONGO_DBNAME = 'qiprofile'
else:
MONGO_DBNAME = 'qiprofile_test'
# Even though the domain is defined by the Eve MongoEngine
# adapter, a DOMAIN setting is required by Eve. This setting
# is only used to avoid an Eve complaint about a missing domain.
DOMAIN = {'eve-mongoengine': {}}
## Instruction:
Allow MONGO_HOST env var override.
## Code After:
"""This ``settings`` file specifies the Eve configuration."""
import os
# The run environment default is production.
# Modify this by setting the NODE_ENV environment variable.
env = os.getenv('NODE_ENV') or 'production'
# The MongoDB database.
if env == 'production':
MONGO_DBNAME = 'qiprofile'
else:
MONGO_DBNAME = 'qiprofile_test'
# The MongoDB host default is localhost, but can be reset
# by the MONGO_HOST environment variable.
host = os.getenv('MONGO_HOST')
if host:
MONGO_HOST = host
# Even though the domain is defined by the Eve MongoEngine
# adapter, a DOMAIN setting is required by Eve. This setting
# is only used to avoid an Eve complaint about a missing domain.
DOMAIN = {'eve-mongoengine': {}}
|
# ... existing code ...
"""This ``settings`` file specifies the Eve configuration."""
import os
# The run environment default is production.
# ... modified code ...
env = os.getenv('NODE_ENV') or 'production'
# The MongoDB database.
if env == 'production':
MONGO_DBNAME = 'qiprofile'
else:
MONGO_DBNAME = 'qiprofile_test'
# The MongoDB host default is localhost, but can be reset
# by the MONGO_HOST environment variable.
host = os.getenv('MONGO_HOST')
if host:
MONGO_HOST = host
# Even though the domain is defined by the Eve MongoEngine
# adapter, a DOMAIN setting is required by Eve. This setting
# ... rest of the code ...
|
1525d327adf76a37bdbd6b0b9f63308ad55c5dbc
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(
name='django-databrowse',
version='1.3',
packages=['django_databrowse', 'django_databrowse.plugins'],
package_dir={'django_databrowse': 'django_databrowse'},
package_data={
'django_databrowse': [
'templates/databrowse/*.html',
'templates/databrowse/include/*.html'
]
},
provides=['django_databrowse'],
include_package_data=True,
url='http://pypi.python.org/pypi/django-databrowse',
license=open('LICENSE').read(),
author='Alireza Savand',
author_email='[email protected]',
description='Databrowse is a Django application that lets you browse your data.',
long_description=open('README.rst').read(),
install_requires=['django', ],
keywords=[
'django',
'web',
'databrowse',
'data'
],
platforms='OS Independent',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Programming Language :: Python',
'Framework :: Django',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development'
],
)
|
from distutils.core import setup
setup(
name='django-databrowse',
version='1.3',
packages=['django_databrowse', 'django_databrowse.plugins'],
package_dir={'django_databrowse': 'django_databrowse'},
package_data={
'django_databrowse': [
'templates/databrowse/*.html',
'templates/databrowse/include/*.html'
]
},
provides=['django_databrowse'],
include_package_data=True,
url='https://github.com/Alir3z4/django-databrowse',
license=open('LICENSE').read(),
author='Alireza Savand',
author_email='[email protected]',
description='Databrowse is a Django application that lets you browse your data.',
long_description=open('README.rst').read(),
install_requires=['django', ],
keywords=[
'django',
'web',
'databrowse',
'data'
],
platforms='OS Independent',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Programming Language :: Python',
'Framework :: Django',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development'
],
)
|
Change the pkg url to its github repo
|
Change the pkg url to its github repo
|
Python
|
bsd-3-clause
|
Alir3z4/django-databrowse,Alir3z4/django-databrowse
|
from distutils.core import setup
setup(
name='django-databrowse',
version='1.3',
packages=['django_databrowse', 'django_databrowse.plugins'],
package_dir={'django_databrowse': 'django_databrowse'},
package_data={
'django_databrowse': [
'templates/databrowse/*.html',
'templates/databrowse/include/*.html'
]
},
provides=['django_databrowse'],
include_package_data=True,
- url='http://pypi.python.org/pypi/django-databrowse',
+ url='https://github.com/Alir3z4/django-databrowse',
license=open('LICENSE').read(),
author='Alireza Savand',
author_email='[email protected]',
description='Databrowse is a Django application that lets you browse your data.',
long_description=open('README.rst').read(),
install_requires=['django', ],
keywords=[
'django',
'web',
'databrowse',
'data'
],
platforms='OS Independent',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Programming Language :: Python',
'Framework :: Django',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development'
],
)
|
Change the pkg url to its github repo
|
## Code Before:
from distutils.core import setup
setup(
name='django-databrowse',
version='1.3',
packages=['django_databrowse', 'django_databrowse.plugins'],
package_dir={'django_databrowse': 'django_databrowse'},
package_data={
'django_databrowse': [
'templates/databrowse/*.html',
'templates/databrowse/include/*.html'
]
},
provides=['django_databrowse'],
include_package_data=True,
url='http://pypi.python.org/pypi/django-databrowse',
license=open('LICENSE').read(),
author='Alireza Savand',
author_email='[email protected]',
description='Databrowse is a Django application that lets you browse your data.',
long_description=open('README.rst').read(),
install_requires=['django', ],
keywords=[
'django',
'web',
'databrowse',
'data'
],
platforms='OS Independent',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Programming Language :: Python',
'Framework :: Django',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development'
],
)
## Instruction:
Change the pkg url to its github repo
## Code After:
from distutils.core import setup
setup(
name='django-databrowse',
version='1.3',
packages=['django_databrowse', 'django_databrowse.plugins'],
package_dir={'django_databrowse': 'django_databrowse'},
package_data={
'django_databrowse': [
'templates/databrowse/*.html',
'templates/databrowse/include/*.html'
]
},
provides=['django_databrowse'],
include_package_data=True,
url='https://github.com/Alir3z4/django-databrowse',
license=open('LICENSE').read(),
author='Alireza Savand',
author_email='[email protected]',
description='Databrowse is a Django application that lets you browse your data.',
long_description=open('README.rst').read(),
install_requires=['django', ],
keywords=[
'django',
'web',
'databrowse',
'data'
],
platforms='OS Independent',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Programming Language :: Python',
'Framework :: Django',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development'
],
)
|
# ... existing code ...
},
provides=['django_databrowse'],
include_package_data=True,
url='https://github.com/Alir3z4/django-databrowse',
license=open('LICENSE').read(),
author='Alireza Savand',
author_email='[email protected]',
# ... rest of the code ...
|
4965511fdb9843233e84a8aa9aa0414bf1c02133
|
mail/views.py
|
mail/views.py
|
from django.shortcuts import redirect
from django.http import JsonResponse
from django.core.mail import EmailMessage
from django.middleware import csrf
from rest_framework.decorators import api_view
@api_view(['POST', 'GET'])
def send_contact_message(request):
if request.method == 'POST':
to_address = request.POST.get("to_address", "").split(',')
from_name = request.POST.get("from_name", "")
from_address = request.POST.get("from_address", "")
from_string = '{} <{}>'.format(from_name, from_address)
subject = request.POST.get("subject", "")
message_body = request.POST.get("message_body", "")
csrf_token = request.POST.get("csrfmiddlewaretoken", "")
email = EmailMessage(subject,
message_body,
'[email protected]',
to_address,
reply_to=[from_string])
email.send(fail_silently=False)
#return redirect('/contact-thank-you')
data = {'subject': subject,
'message_body': message_body,
'to_address': to_address,
'reply_to': [from_string],
'from_address': '[email protected]',
'csrf_token': csrf_token,
}
return JsonResponse(data)
# if this is not posting a message, let's send the csfr token back
else:
csrf_token = csrf.get_token(request)
data = {'csrf_token': csrf_token}
return JsonResponse(data)
|
from django.shortcuts import redirect
from django.http import JsonResponse
from django.core.mail import EmailMessage
from django.middleware import csrf
from rest_framework.decorators import api_view
@api_view(['POST', 'GET'])
def send_contact_message(request):
if request.method == 'POST':
to_address = request.POST.get("to_address", "").split(',')
from_name = request.POST.get("from_name", "")
from_address = request.POST.get("from_address", "")
from_string = '{} <{}>'.format(from_name, from_address)
subject = request.POST.get("subject", "")
message_body = request.POST.get("message_body", "")
email = EmailMessage(subject,
message_body,
'[email protected]',
to_address,
reply_to=[from_string])
email.send()
return redirect('/contact-thank-you')
# if this is not posting a message, let's send the csfr token back
else:
csrf_token = csrf.get_token(request)
data = {'csrf_token': csrf_token}
return JsonResponse(data)
|
Revert "return json of message being sent to debug mail issue"
|
Revert "return json of message being sent to debug mail issue"
|
Python
|
agpl-3.0
|
openstax/openstax-cms,Connexions/openstax-cms,openstax/openstax-cms,openstax/openstax-cms,openstax/openstax-cms,Connexions/openstax-cms
|
from django.shortcuts import redirect
from django.http import JsonResponse
from django.core.mail import EmailMessage
from django.middleware import csrf
from rest_framework.decorators import api_view
@api_view(['POST', 'GET'])
def send_contact_message(request):
if request.method == 'POST':
to_address = request.POST.get("to_address", "").split(',')
from_name = request.POST.get("from_name", "")
from_address = request.POST.get("from_address", "")
from_string = '{} <{}>'.format(from_name, from_address)
subject = request.POST.get("subject", "")
message_body = request.POST.get("message_body", "")
- csrf_token = request.POST.get("csrfmiddlewaretoken", "")
email = EmailMessage(subject,
message_body,
'[email protected]',
to_address,
reply_to=[from_string])
- email.send(fail_silently=False)
+ email.send()
- #return redirect('/contact-thank-you')
+ return redirect('/contact-thank-you')
- data = {'subject': subject,
- 'message_body': message_body,
- 'to_address': to_address,
- 'reply_to': [from_string],
- 'from_address': '[email protected]',
- 'csrf_token': csrf_token,
- }
-
- return JsonResponse(data)
# if this is not posting a message, let's send the csfr token back
else:
csrf_token = csrf.get_token(request)
data = {'csrf_token': csrf_token}
return JsonResponse(data)
|
Revert "return json of message being sent to debug mail issue"
|
## Code Before:
from django.shortcuts import redirect
from django.http import JsonResponse
from django.core.mail import EmailMessage
from django.middleware import csrf
from rest_framework.decorators import api_view
@api_view(['POST', 'GET'])
def send_contact_message(request):
if request.method == 'POST':
to_address = request.POST.get("to_address", "").split(',')
from_name = request.POST.get("from_name", "")
from_address = request.POST.get("from_address", "")
from_string = '{} <{}>'.format(from_name, from_address)
subject = request.POST.get("subject", "")
message_body = request.POST.get("message_body", "")
csrf_token = request.POST.get("csrfmiddlewaretoken", "")
email = EmailMessage(subject,
message_body,
'[email protected]',
to_address,
reply_to=[from_string])
email.send(fail_silently=False)
#return redirect('/contact-thank-you')
data = {'subject': subject,
'message_body': message_body,
'to_address': to_address,
'reply_to': [from_string],
'from_address': '[email protected]',
'csrf_token': csrf_token,
}
return JsonResponse(data)
# if this is not posting a message, let's send the csfr token back
else:
csrf_token = csrf.get_token(request)
data = {'csrf_token': csrf_token}
return JsonResponse(data)
## Instruction:
Revert "return json of message being sent to debug mail issue"
## Code After:
from django.shortcuts import redirect
from django.http import JsonResponse
from django.core.mail import EmailMessage
from django.middleware import csrf
from rest_framework.decorators import api_view
@api_view(['POST', 'GET'])
def send_contact_message(request):
if request.method == 'POST':
to_address = request.POST.get("to_address", "").split(',')
from_name = request.POST.get("from_name", "")
from_address = request.POST.get("from_address", "")
from_string = '{} <{}>'.format(from_name, from_address)
subject = request.POST.get("subject", "")
message_body = request.POST.get("message_body", "")
email = EmailMessage(subject,
message_body,
'[email protected]',
to_address,
reply_to=[from_string])
email.send()
return redirect('/contact-thank-you')
# if this is not posting a message, let's send the csfr token back
else:
csrf_token = csrf.get_token(request)
data = {'csrf_token': csrf_token}
return JsonResponse(data)
|
...
from_string = '{} <{}>'.format(from_name, from_address)
subject = request.POST.get("subject", "")
message_body = request.POST.get("message_body", "")
email = EmailMessage(subject,
message_body,
...
'[email protected]',
to_address,
reply_to=[from_string])
email.send()
return redirect('/contact-thank-you')
# if this is not posting a message, let's send the csfr token back
else:
csrf_token = csrf.get_token(request)
...
|
a9de2f3c9a05236c7254a2b1b03049b034fd555e
|
elections/bf_elections_2015/lib.py
|
elections/bf_elections_2015/lib.py
|
from candidates.static_data import (
BaseMapItData, BasePartyData, BaseAreaPostData
)
class MapItData(BaseMapItData):
pass
class PartyData(BasePartyData):
def __init__(self):
super(PartyData, self).__init__()
self.ALL_PARTY_SETS = (
{'slug': 'national', 'name': 'National'},
)
def party_data_to_party_sets(self, party_data):
return ['national']
class AreaPostData(BaseAreaPostData):
def area_to_post_group(self, area_data):
return None
def shorten_post_label(self, election, post_label):
return post_label
def post_id_to_post_group(self, election, post_id):
return None
def post_id_to_party_set(self, post_id):
return 'national'
|
from candidates.static_data import (
BaseMapItData, BasePartyData, BaseAreaPostData
)
class MapItData(BaseMapItData):
pass
class PartyData(BasePartyData):
def __init__(self):
super(PartyData, self).__init__()
self.ALL_PARTY_SETS = (
{'slug': 'national', 'name': 'National'},
)
def party_data_to_party_sets(self, party_data):
return ['national']
class AreaPostData(BaseAreaPostData):
def __init__(self, *args, **kwargs):
super(AreaPostData, self).__init__(*args, **kwargs)
self.ALL_POSSIBLE_POST_GROUPS = [None]
def area_to_post_group(self, area_data):
return None
def shorten_post_label(self, election, post_label):
return post_label
def post_id_to_post_group(self, election, post_id):
return None
def post_id_to_party_set(self, post_id):
return 'national'
def party_to_possible_post_groups(self, party_data):
return (None,)
|
Fix missing post group defaults for Burkina Faso
|
Fix missing post group defaults for Burkina Faso
|
Python
|
agpl-3.0
|
neavouli/yournextrepresentative,datamade/yournextmp-popit,mysociety/yournextrepresentative,neavouli/yournextrepresentative,DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative,datamade/yournextmp-popit,mysociety/yournextmp-popit,datamade/yournextmp-popit,mysociety/yournextmp-popit,datamade/yournextmp-popit,mysociety/yournextmp-popit,neavouli/yournextrepresentative,mysociety/yournextrepresentative,mysociety/yournextmp-popit,mysociety/yournextrepresentative,neavouli/yournextrepresentative,neavouli/yournextrepresentative,mysociety/yournextrepresentative,datamade/yournextmp-popit,mysociety/yournextrepresentative,mysociety/yournextmp-popit,DemocracyClub/yournextrepresentative
|
from candidates.static_data import (
BaseMapItData, BasePartyData, BaseAreaPostData
)
class MapItData(BaseMapItData):
pass
class PartyData(BasePartyData):
def __init__(self):
super(PartyData, self).__init__()
self.ALL_PARTY_SETS = (
{'slug': 'national', 'name': 'National'},
)
def party_data_to_party_sets(self, party_data):
return ['national']
class AreaPostData(BaseAreaPostData):
+ def __init__(self, *args, **kwargs):
+ super(AreaPostData, self).__init__(*args, **kwargs)
+ self.ALL_POSSIBLE_POST_GROUPS = [None]
+
def area_to_post_group(self, area_data):
return None
def shorten_post_label(self, election, post_label):
return post_label
def post_id_to_post_group(self, election, post_id):
return None
def post_id_to_party_set(self, post_id):
return 'national'
+ def party_to_possible_post_groups(self, party_data):
+ return (None,)
+
|
Fix missing post group defaults for Burkina Faso
|
## Code Before:
from candidates.static_data import (
BaseMapItData, BasePartyData, BaseAreaPostData
)
class MapItData(BaseMapItData):
pass
class PartyData(BasePartyData):
def __init__(self):
super(PartyData, self).__init__()
self.ALL_PARTY_SETS = (
{'slug': 'national', 'name': 'National'},
)
def party_data_to_party_sets(self, party_data):
return ['national']
class AreaPostData(BaseAreaPostData):
def area_to_post_group(self, area_data):
return None
def shorten_post_label(self, election, post_label):
return post_label
def post_id_to_post_group(self, election, post_id):
return None
def post_id_to_party_set(self, post_id):
return 'national'
## Instruction:
Fix missing post group defaults for Burkina Faso
## Code After:
from candidates.static_data import (
BaseMapItData, BasePartyData, BaseAreaPostData
)
class MapItData(BaseMapItData):
pass
class PartyData(BasePartyData):
def __init__(self):
super(PartyData, self).__init__()
self.ALL_PARTY_SETS = (
{'slug': 'national', 'name': 'National'},
)
def party_data_to_party_sets(self, party_data):
return ['national']
class AreaPostData(BaseAreaPostData):
def __init__(self, *args, **kwargs):
super(AreaPostData, self).__init__(*args, **kwargs)
self.ALL_POSSIBLE_POST_GROUPS = [None]
def area_to_post_group(self, area_data):
return None
def shorten_post_label(self, election, post_label):
return post_label
def post_id_to_post_group(self, election, post_id):
return None
def post_id_to_party_set(self, post_id):
return 'national'
def party_to_possible_post_groups(self, party_data):
return (None,)
|
...
class AreaPostData(BaseAreaPostData):
def __init__(self, *args, **kwargs):
super(AreaPostData, self).__init__(*args, **kwargs)
self.ALL_POSSIBLE_POST_GROUPS = [None]
def area_to_post_group(self, area_data):
return None
...
def post_id_to_party_set(self, post_id):
return 'national'
def party_to_possible_post_groups(self, party_data):
return (None,)
...
|
30c97e8a377b40f42855d38167768f9eb8e374fc
|
base/views.py
|
base/views.py
|
from .utils import SESSION_KEY_CURRENT_OS
from .forms import OSForm
class CurrentOSMixin(object):
allowed_oses = OSForm.OS_CHOICES
def get_context_data(self, **kwargs):
"""Inject current active OS key and the choice form into context.
"""
# Zip the 2-tuple into a [keys, values] generator, and use next() to
# get its first item (i.e. keys).
allowed_os_keys = next(zip(*self.allowed_oses))
os = self.request.session.get(SESSION_KEY_CURRENT_OS)
if os not in allowed_os_keys:
os = 'windows'
os_form = OSForm(initial={'os': os})
kwargs.update({'current_os': os, 'os_form': os_form})
return super().get_context_data(**kwargs)
|
from .utils import SESSION_KEY_CURRENT_OS
from .forms import OSForm
class CurrentOSMixin(object):
allowed_oses = OSForm.OS_CHOICES
def get_context_data(self, **kwargs):
"""Inject current active OS key and the choice form into context.
"""
# Zip the 2-tuple into a [keys, values] generator, and use next() to
# get its first item (i.e. keys).
allowed_os_keys = next(zip(*self.allowed_oses))
os = self.request.session.get(SESSION_KEY_CURRENT_OS)
if os not in allowed_os_keys:
os = OSForm.OS_CHOICES[0][0]
os_form = OSForm(initial={'os': os})
kwargs.update({'current_os': os, 'os_form': os_form})
return super().get_context_data(**kwargs)
|
Use form variable instead hard-coding
|
Use form variable instead hard-coding
|
Python
|
mit
|
djangogirlstaipei/djangogirlstaipei,djangogirlstaipei/djangogirlstaipei,djangogirlstaipei/djangogirlstaipei
|
from .utils import SESSION_KEY_CURRENT_OS
from .forms import OSForm
class CurrentOSMixin(object):
allowed_oses = OSForm.OS_CHOICES
def get_context_data(self, **kwargs):
"""Inject current active OS key and the choice form into context.
"""
# Zip the 2-tuple into a [keys, values] generator, and use next() to
# get its first item (i.e. keys).
allowed_os_keys = next(zip(*self.allowed_oses))
os = self.request.session.get(SESSION_KEY_CURRENT_OS)
if os not in allowed_os_keys:
- os = 'windows'
+ os = OSForm.OS_CHOICES[0][0]
os_form = OSForm(initial={'os': os})
kwargs.update({'current_os': os, 'os_form': os_form})
return super().get_context_data(**kwargs)
|
Use form variable instead hard-coding
|
## Code Before:
from .utils import SESSION_KEY_CURRENT_OS
from .forms import OSForm
class CurrentOSMixin(object):
allowed_oses = OSForm.OS_CHOICES
def get_context_data(self, **kwargs):
"""Inject current active OS key and the choice form into context.
"""
# Zip the 2-tuple into a [keys, values] generator, and use next() to
# get its first item (i.e. keys).
allowed_os_keys = next(zip(*self.allowed_oses))
os = self.request.session.get(SESSION_KEY_CURRENT_OS)
if os not in allowed_os_keys:
os = 'windows'
os_form = OSForm(initial={'os': os})
kwargs.update({'current_os': os, 'os_form': os_form})
return super().get_context_data(**kwargs)
## Instruction:
Use form variable instead hard-coding
## Code After:
from .utils import SESSION_KEY_CURRENT_OS
from .forms import OSForm
class CurrentOSMixin(object):
allowed_oses = OSForm.OS_CHOICES
def get_context_data(self, **kwargs):
"""Inject current active OS key and the choice form into context.
"""
# Zip the 2-tuple into a [keys, values] generator, and use next() to
# get its first item (i.e. keys).
allowed_os_keys = next(zip(*self.allowed_oses))
os = self.request.session.get(SESSION_KEY_CURRENT_OS)
if os not in allowed_os_keys:
os = OSForm.OS_CHOICES[0][0]
os_form = OSForm(initial={'os': os})
kwargs.update({'current_os': os, 'os_form': os_form})
return super().get_context_data(**kwargs)
|
// ... existing code ...
os = self.request.session.get(SESSION_KEY_CURRENT_OS)
if os not in allowed_os_keys:
os = OSForm.OS_CHOICES[0][0]
os_form = OSForm(initial={'os': os})
kwargs.update({'current_os': os, 'os_form': os_form})
return super().get_context_data(**kwargs)
// ... rest of the code ...
|
4eb043cfb0f2535a1dca37927323155b7d3f363e
|
dynamic_rest/links.py
|
dynamic_rest/links.py
|
"""This module contains utilities to support API links."""
from django.utils import six
from dynamic_rest.conf import settings
from .routers import DynamicRouter
def merge_link_object(serializer, data, instance):
"""Add a 'links' attribute to the data that maps field names to URLs.
NOTE: This is the format that Ember Data supports, but alternative
implementations are possible to support other formats.
"""
link_object = {}
if not getattr(instance, 'pk', None):
# If instance doesn't have a `pk` field, we'll assume it doesn't
# have a canonical resource URL to hang a link off of.
# This generally only affectes Ephemeral Objects.
return data
link_fields = serializer.get_link_fields()
for name, field in six.iteritems(link_fields):
# For included fields, omit link if there's no data.
if name in data and not data[name]:
continue
link = getattr(field, 'link', None)
if link is None:
base_url = ''
if settings.ENABLE_HOST_RELATIVE_LINKS:
# if the resource isn't registered, this will default back to
# using resource-relative urls for links.
base_url = DynamicRouter.get_canonical_path(
serializer.get_resource_key(),
instance.pk
) or ''
link = '%s%s/' % (base_url, name)
# Default to DREST-generated relation endpoints.
elif callable(link):
link = link(name, field, data, instance)
link_object[name] = link
if link_object:
data['links'] = link_object
return data
|
"""This module contains utilities to support API links."""
from django.utils import six
from dynamic_rest.conf import settings
from dynamic_rest.routers import DynamicRouter
def merge_link_object(serializer, data, instance):
"""Add a 'links' attribute to the data that maps field names to URLs.
NOTE: This is the format that Ember Data supports, but alternative
implementations are possible to support other formats.
"""
link_object = {}
if not getattr(instance, 'pk', None):
# If instance doesn't have a `pk` field, we'll assume it doesn't
# have a canonical resource URL to hang a link off of.
# This generally only affectes Ephemeral Objects.
return data
link_fields = serializer.get_link_fields()
for name, field in six.iteritems(link_fields):
# For included fields, omit link if there's no data.
if name in data and not data[name]:
continue
link = getattr(field, 'link', None)
if link is None:
base_url = ''
if settings.ENABLE_HOST_RELATIVE_LINKS:
# if the resource isn't registered, this will default back to
# using resource-relative urls for links.
base_url = DynamicRouter.get_canonical_path(
serializer.get_resource_key(),
instance.pk
) or ''
link = '%s%s/' % (base_url, name)
# Default to DREST-generated relation endpoints.
elif callable(link):
link = link(name, field, data, instance)
link_object[name] = link
if link_object:
data['links'] = link_object
return data
|
Fix some sorting thing for isort
|
Fix some sorting thing for isort
|
Python
|
mit
|
sanoma/dynamic-rest,AltSchool/dynamic-rest,sanoma/dynamic-rest,AltSchool/dynamic-rest
|
"""This module contains utilities to support API links."""
from django.utils import six
+
from dynamic_rest.conf import settings
- from .routers import DynamicRouter
+ from dynamic_rest.routers import DynamicRouter
def merge_link_object(serializer, data, instance):
"""Add a 'links' attribute to the data that maps field names to URLs.
NOTE: This is the format that Ember Data supports, but alternative
implementations are possible to support other formats.
"""
link_object = {}
if not getattr(instance, 'pk', None):
# If instance doesn't have a `pk` field, we'll assume it doesn't
# have a canonical resource URL to hang a link off of.
# This generally only affectes Ephemeral Objects.
return data
link_fields = serializer.get_link_fields()
for name, field in six.iteritems(link_fields):
# For included fields, omit link if there's no data.
if name in data and not data[name]:
continue
link = getattr(field, 'link', None)
if link is None:
base_url = ''
if settings.ENABLE_HOST_RELATIVE_LINKS:
# if the resource isn't registered, this will default back to
# using resource-relative urls for links.
base_url = DynamicRouter.get_canonical_path(
serializer.get_resource_key(),
instance.pk
) or ''
link = '%s%s/' % (base_url, name)
# Default to DREST-generated relation endpoints.
elif callable(link):
link = link(name, field, data, instance)
link_object[name] = link
if link_object:
data['links'] = link_object
return data
|
Fix some sorting thing for isort
|
## Code Before:
"""This module contains utilities to support API links."""
from django.utils import six
from dynamic_rest.conf import settings
from .routers import DynamicRouter
def merge_link_object(serializer, data, instance):
"""Add a 'links' attribute to the data that maps field names to URLs.
NOTE: This is the format that Ember Data supports, but alternative
implementations are possible to support other formats.
"""
link_object = {}
if not getattr(instance, 'pk', None):
# If instance doesn't have a `pk` field, we'll assume it doesn't
# have a canonical resource URL to hang a link off of.
# This generally only affectes Ephemeral Objects.
return data
link_fields = serializer.get_link_fields()
for name, field in six.iteritems(link_fields):
# For included fields, omit link if there's no data.
if name in data and not data[name]:
continue
link = getattr(field, 'link', None)
if link is None:
base_url = ''
if settings.ENABLE_HOST_RELATIVE_LINKS:
# if the resource isn't registered, this will default back to
# using resource-relative urls for links.
base_url = DynamicRouter.get_canonical_path(
serializer.get_resource_key(),
instance.pk
) or ''
link = '%s%s/' % (base_url, name)
# Default to DREST-generated relation endpoints.
elif callable(link):
link = link(name, field, data, instance)
link_object[name] = link
if link_object:
data['links'] = link_object
return data
## Instruction:
Fix some sorting thing for isort
## Code After:
"""This module contains utilities to support API links."""
from django.utils import six
from dynamic_rest.conf import settings
from dynamic_rest.routers import DynamicRouter
def merge_link_object(serializer, data, instance):
"""Add a 'links' attribute to the data that maps field names to URLs.
NOTE: This is the format that Ember Data supports, but alternative
implementations are possible to support other formats.
"""
link_object = {}
if not getattr(instance, 'pk', None):
# If instance doesn't have a `pk` field, we'll assume it doesn't
# have a canonical resource URL to hang a link off of.
# This generally only affectes Ephemeral Objects.
return data
link_fields = serializer.get_link_fields()
for name, field in six.iteritems(link_fields):
# For included fields, omit link if there's no data.
if name in data and not data[name]:
continue
link = getattr(field, 'link', None)
if link is None:
base_url = ''
if settings.ENABLE_HOST_RELATIVE_LINKS:
# if the resource isn't registered, this will default back to
# using resource-relative urls for links.
base_url = DynamicRouter.get_canonical_path(
serializer.get_resource_key(),
instance.pk
) or ''
link = '%s%s/' % (base_url, name)
# Default to DREST-generated relation endpoints.
elif callable(link):
link = link(name, field, data, instance)
link_object[name] = link
if link_object:
data['links'] = link_object
return data
|
// ... existing code ...
"""This module contains utilities to support API links."""
from django.utils import six
from dynamic_rest.conf import settings
from dynamic_rest.routers import DynamicRouter
def merge_link_object(serializer, data, instance):
// ... rest of the code ...
|
daf6468079e7ff3e00550db0f3a16bc109184027
|
osgtest/tests/test_49_jobs.py
|
osgtest/tests/test_49_jobs.py
|
import osgtest.library.core as core
import osgtest.library.files as files
import osgtest.library.osgunittest as osgunittest
class TestCleanupJobs(osgunittest.OSGTestCase):
"""Clean any configuration we touched for running jobs"""
def test_01_restore_job_env(self):
core.skip_ok_unless_installed('osg-configure')
core.skip_ok_unless_one_installed(['htcondor-ce', 'globus-gatekeeper', 'condor'])
files.restore(core.config['osg.job-environment'], owner='pbs')
files.restore(core.config['osg.local-job-environment'], owner='pbs')
|
import osgtest.library.core as core
import osgtest.library.files as files
import osgtest.library.osgunittest as osgunittest
class TestCleanupJobs(osgunittest.OSGTestCase):
"""Clean any configuration we touched for running jobs"""
def test_01_restore_job_env(self):
core.skip_ok_unless_one_installed(['htcondor-ce', 'globus-gatekeeper', 'condor'])
files.restore(core.config['osg.job-environment'], owner='pbs')
files.restore(core.config['osg.local-job-environment'], owner='pbs')
|
Drop job env backup cleanup dependence on osg-configure
|
Drop job env backup cleanup dependence on osg-configure
We already dropped the creation of the job env files in 840ea8
|
Python
|
apache-2.0
|
efajardo/osg-test,efajardo/osg-test
|
import osgtest.library.core as core
import osgtest.library.files as files
import osgtest.library.osgunittest as osgunittest
class TestCleanupJobs(osgunittest.OSGTestCase):
"""Clean any configuration we touched for running jobs"""
def test_01_restore_job_env(self):
- core.skip_ok_unless_installed('osg-configure')
core.skip_ok_unless_one_installed(['htcondor-ce', 'globus-gatekeeper', 'condor'])
files.restore(core.config['osg.job-environment'], owner='pbs')
files.restore(core.config['osg.local-job-environment'], owner='pbs')
|
Drop job env backup cleanup dependence on osg-configure
|
## Code Before:
import osgtest.library.core as core
import osgtest.library.files as files
import osgtest.library.osgunittest as osgunittest
class TestCleanupJobs(osgunittest.OSGTestCase):
"""Clean any configuration we touched for running jobs"""
def test_01_restore_job_env(self):
core.skip_ok_unless_installed('osg-configure')
core.skip_ok_unless_one_installed(['htcondor-ce', 'globus-gatekeeper', 'condor'])
files.restore(core.config['osg.job-environment'], owner='pbs')
files.restore(core.config['osg.local-job-environment'], owner='pbs')
## Instruction:
Drop job env backup cleanup dependence on osg-configure
## Code After:
import osgtest.library.core as core
import osgtest.library.files as files
import osgtest.library.osgunittest as osgunittest
class TestCleanupJobs(osgunittest.OSGTestCase):
"""Clean any configuration we touched for running jobs"""
def test_01_restore_job_env(self):
core.skip_ok_unless_one_installed(['htcondor-ce', 'globus-gatekeeper', 'condor'])
files.restore(core.config['osg.job-environment'], owner='pbs')
files.restore(core.config['osg.local-job-environment'], owner='pbs')
|
// ... existing code ...
"""Clean any configuration we touched for running jobs"""
def test_01_restore_job_env(self):
core.skip_ok_unless_one_installed(['htcondor-ce', 'globus-gatekeeper', 'condor'])
files.restore(core.config['osg.job-environment'], owner='pbs')
// ... rest of the code ...
|
ded80de3c276b57cd36d94ab393937289f772a25
|
django_prometheus/db/backends/postgresql/base.py
|
django_prometheus/db/backends/postgresql/base.py
|
import django
import psycopg2.extensions
from django_prometheus.db.common import DatabaseWrapperMixin, \
ExportingCursorWrapper
if django.VERSION >= (1, 9):
from django.db.backends.postgresql import base
else:
from django.db.backends.postgresql_psycopg2 import base
class DatabaseFeatures(base.DatabaseFeatures):
"""Our database has the exact same features as the base one."""
pass
class DatabaseWrapper(DatabaseWrapperMixin, base.DatabaseWrapper):
def get_connection_params(self):
conn_params = super(DatabaseWrapper, self).get_connection_params()
conn_params['cursor_factory'] = ExportingCursorWrapper(
psycopg2.extensions.cursor,
self.alias,
self.vendor,
)
return conn_params
def create_cursor(self, name=None):
# cursor_factory is a kwarg to connect() so restore create_cursor()'s
# default behavior
return base.DatabaseWrapper.create_cursor(self, name=name)
|
import django
import psycopg2.extensions
from django_prometheus.db.common import DatabaseWrapperMixin, \
ExportingCursorWrapper
if django.VERSION >= (1, 9):
from django.db.backends.postgresql import base
else:
from django.db.backends.postgresql_psycopg2 import base
class DatabaseFeatures(base.DatabaseFeatures):
"""Our database has the exact same features as the base one."""
pass
class DatabaseWrapper(DatabaseWrapperMixin, base.DatabaseWrapper):
def get_connection_params(self):
conn_params = super(DatabaseWrapper, self).get_connection_params()
conn_params['cursor_factory'] = ExportingCursorWrapper(
psycopg2.extensions.cursor,
self.alias,
self.vendor,
)
return conn_params
def create_cursor(self, name=None):
# cursor_factory is a kwarg to connect() so restore create_cursor()'s
# default behavior
if django.VERSION >= (1, 11, 0):
return base.DatabaseWrapper.create_cursor(self, name=name)
else:
return base.DatabaseWrapper.create_cursor(self)
|
Fix backwards compatibility for postgresql backend on Django 1.10 and earlier
|
Fix backwards compatibility for postgresql backend on Django 1.10 and earlier
|
Python
|
apache-2.0
|
korfuri/django-prometheus,obytes/django-prometheus,korfuri/django-prometheus,obytes/django-prometheus
|
import django
import psycopg2.extensions
from django_prometheus.db.common import DatabaseWrapperMixin, \
ExportingCursorWrapper
if django.VERSION >= (1, 9):
from django.db.backends.postgresql import base
else:
from django.db.backends.postgresql_psycopg2 import base
class DatabaseFeatures(base.DatabaseFeatures):
"""Our database has the exact same features as the base one."""
pass
class DatabaseWrapper(DatabaseWrapperMixin, base.DatabaseWrapper):
def get_connection_params(self):
conn_params = super(DatabaseWrapper, self).get_connection_params()
conn_params['cursor_factory'] = ExportingCursorWrapper(
psycopg2.extensions.cursor,
self.alias,
self.vendor,
)
return conn_params
def create_cursor(self, name=None):
# cursor_factory is a kwarg to connect() so restore create_cursor()'s
# default behavior
+ if django.VERSION >= (1, 11, 0):
- return base.DatabaseWrapper.create_cursor(self, name=name)
+ return base.DatabaseWrapper.create_cursor(self, name=name)
+ else:
+ return base.DatabaseWrapper.create_cursor(self)
|
Fix backwards compatibility for postgresql backend on Django 1.10 and earlier
|
## Code Before:
import django
import psycopg2.extensions
from django_prometheus.db.common import DatabaseWrapperMixin, \
ExportingCursorWrapper
if django.VERSION >= (1, 9):
from django.db.backends.postgresql import base
else:
from django.db.backends.postgresql_psycopg2 import base
class DatabaseFeatures(base.DatabaseFeatures):
"""Our database has the exact same features as the base one."""
pass
class DatabaseWrapper(DatabaseWrapperMixin, base.DatabaseWrapper):
def get_connection_params(self):
conn_params = super(DatabaseWrapper, self).get_connection_params()
conn_params['cursor_factory'] = ExportingCursorWrapper(
psycopg2.extensions.cursor,
self.alias,
self.vendor,
)
return conn_params
def create_cursor(self, name=None):
# cursor_factory is a kwarg to connect() so restore create_cursor()'s
# default behavior
return base.DatabaseWrapper.create_cursor(self, name=name)
## Instruction:
Fix backwards compatibility for postgresql backend on Django 1.10 and earlier
## Code After:
import django
import psycopg2.extensions
from django_prometheus.db.common import DatabaseWrapperMixin, \
ExportingCursorWrapper
if django.VERSION >= (1, 9):
from django.db.backends.postgresql import base
else:
from django.db.backends.postgresql_psycopg2 import base
class DatabaseFeatures(base.DatabaseFeatures):
"""Our database has the exact same features as the base one."""
pass
class DatabaseWrapper(DatabaseWrapperMixin, base.DatabaseWrapper):
def get_connection_params(self):
conn_params = super(DatabaseWrapper, self).get_connection_params()
conn_params['cursor_factory'] = ExportingCursorWrapper(
psycopg2.extensions.cursor,
self.alias,
self.vendor,
)
return conn_params
def create_cursor(self, name=None):
# cursor_factory is a kwarg to connect() so restore create_cursor()'s
# default behavior
if django.VERSION >= (1, 11, 0):
return base.DatabaseWrapper.create_cursor(self, name=name)
else:
return base.DatabaseWrapper.create_cursor(self)
|
# ... existing code ...
def create_cursor(self, name=None):
# cursor_factory is a kwarg to connect() so restore create_cursor()'s
# default behavior
if django.VERSION >= (1, 11, 0):
return base.DatabaseWrapper.create_cursor(self, name=name)
else:
return base.DatabaseWrapper.create_cursor(self)
# ... rest of the code ...
|
359c563e200431e7da13766cf106f14f36b29bd4
|
shuup_workbench/urls.py
|
shuup_workbench/urls.py
|
from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^sa/', include('shuup.admin.urls', namespace="shuup_admin", app_name="shuup_admin")),
url(r'^api/', include('shuup.api.urls')),
url(r'^', include('shuup.front.urls', namespace="shuup", app_name="shuup")),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
urlpatterns = [
url(r'^sa/', include('shuup.admin.urls', namespace="shuup_admin", app_name="shuup_admin")),
url(r'^api/', include('shuup.api.urls')),
url(r'^', include('shuup.front.urls', namespace="shuup", app_name="shuup")),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
Hide Django admin URLs from the workbench
|
Hide Django admin URLs from the workbench
Django admin shouldn't be used by default with Shuup. Enabling
this would require some attention towards Django filer in multi
shop situations.
|
Python
|
agpl-3.0
|
shoopio/shoop,shoopio/shoop,shoopio/shoop
|
from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
- from django.contrib import admin
urlpatterns = [
- url(r'^admin/', include(admin.site.urls)),
url(r'^sa/', include('shuup.admin.urls', namespace="shuup_admin", app_name="shuup_admin")),
url(r'^api/', include('shuup.api.urls')),
url(r'^', include('shuup.front.urls', namespace="shuup", app_name="shuup")),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
Hide Django admin URLs from the workbench
|
## Code Before:
from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^sa/', include('shuup.admin.urls', namespace="shuup_admin", app_name="shuup_admin")),
url(r'^api/', include('shuup.api.urls')),
url(r'^', include('shuup.front.urls', namespace="shuup", app_name="shuup")),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
## Instruction:
Hide Django admin URLs from the workbench
## Code After:
from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
urlpatterns = [
url(r'^sa/', include('shuup.admin.urls', namespace="shuup_admin", app_name="shuup_admin")),
url(r'^api/', include('shuup.api.urls')),
url(r'^', include('shuup.front.urls', namespace="shuup", app_name="shuup")),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
# ... existing code ...
from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
urlpatterns = [
url(r'^sa/', include('shuup.admin.urls', namespace="shuup_admin", app_name="shuup_admin")),
url(r'^api/', include('shuup.api.urls')),
url(r'^', include('shuup.front.urls', namespace="shuup", app_name="shuup")),
# ... rest of the code ...
|
ed8b6b615bc8d006e3e31843fa31f0bda09109ed
|
spec/puzzle/examples/public_domain/zebra_puzzle_spec.py
|
spec/puzzle/examples/public_domain/zebra_puzzle_spec.py
|
import astor
from data import warehouse
from puzzle.examples.public_domain import zebra_puzzle
from puzzle.problems import logic_problem
from puzzle.puzzlepedia import prod_config
from spec.mamba import *
with _description('zebra_puzzle'):
with description('solution'):
with before.all:
warehouse.save()
prod_config.init()
self.subject = zebra_puzzle.get()
with after.all:
prod_config.reset()
warehouse.restore()
with it('identifies puzzle type'):
problems = self.subject.problems()
expect(problems).to(have_len(1))
problem = problems[0]
expect(problem).to(be_a(logic_problem.LogicProblem))
with it('parses expressions'):
problem = self.subject.problems()[0]
expect(astor.to_source(problem._parse())).to(
look_like(zebra_puzzle.PARSED))
with it('exports a model'):
problem = self.subject.problems()[0]
expect(problem.solution).to(look_like(zebra_puzzle.SOLUTION))
|
import astor
from data import warehouse
from puzzle.examples.public_domain import zebra_puzzle
from puzzle.problems import logic_problem
from puzzle.puzzlepedia import prod_config
from spec.mamba import *
with _description('zebra_puzzle'):
with description('solution'):
with before.all:
warehouse.save()
prod_config.init()
self.subject = zebra_puzzle.get()
with after.all:
prod_config.reset()
warehouse.restore()
with it('identifies puzzle type'):
problems = self.subject.problems()
expect(problems).to(have_len(1))
problem = problems[0]
expect(problem).to(be_a(logic_problem.LogicProblem))
with it('parses expressions'):
parsed = logic_problem._parse(zebra_puzzle.SOURCE.split('\n'))
expect(astor.to_source(parsed)).to(look_like(zebra_puzzle.PARSED))
with it('models puzzle'):
model = logic_problem._model(zebra_puzzle.SOURCE.split('\n'))
print(str(model))
with it('exports a solution'):
problem = self.subject.problems()[0]
expect(problem.solution).to(look_like(zebra_puzzle.SOLUTION))
|
Update zebra puzzle to reflect LogicProblem changes.
|
Update zebra puzzle to reflect LogicProblem changes.
|
Python
|
mit
|
PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge
|
import astor
from data import warehouse
from puzzle.examples.public_domain import zebra_puzzle
from puzzle.problems import logic_problem
from puzzle.puzzlepedia import prod_config
from spec.mamba import *
with _description('zebra_puzzle'):
with description('solution'):
with before.all:
warehouse.save()
prod_config.init()
self.subject = zebra_puzzle.get()
with after.all:
prod_config.reset()
warehouse.restore()
with it('identifies puzzle type'):
problems = self.subject.problems()
expect(problems).to(have_len(1))
problem = problems[0]
expect(problem).to(be_a(logic_problem.LogicProblem))
with it('parses expressions'):
+ parsed = logic_problem._parse(zebra_puzzle.SOURCE.split('\n'))
+ expect(astor.to_source(parsed)).to(look_like(zebra_puzzle.PARSED))
- problem = self.subject.problems()[0]
- expect(astor.to_source(problem._parse())).to(
- look_like(zebra_puzzle.PARSED))
+ with it('models puzzle'):
+ model = logic_problem._model(zebra_puzzle.SOURCE.split('\n'))
+ print(str(model))
+
- with it('exports a model'):
+ with it('exports a solution'):
problem = self.subject.problems()[0]
expect(problem.solution).to(look_like(zebra_puzzle.SOLUTION))
|
Update zebra puzzle to reflect LogicProblem changes.
|
## Code Before:
import astor
from data import warehouse
from puzzle.examples.public_domain import zebra_puzzle
from puzzle.problems import logic_problem
from puzzle.puzzlepedia import prod_config
from spec.mamba import *
with _description('zebra_puzzle'):
with description('solution'):
with before.all:
warehouse.save()
prod_config.init()
self.subject = zebra_puzzle.get()
with after.all:
prod_config.reset()
warehouse.restore()
with it('identifies puzzle type'):
problems = self.subject.problems()
expect(problems).to(have_len(1))
problem = problems[0]
expect(problem).to(be_a(logic_problem.LogicProblem))
with it('parses expressions'):
problem = self.subject.problems()[0]
expect(astor.to_source(problem._parse())).to(
look_like(zebra_puzzle.PARSED))
with it('exports a model'):
problem = self.subject.problems()[0]
expect(problem.solution).to(look_like(zebra_puzzle.SOLUTION))
## Instruction:
Update zebra puzzle to reflect LogicProblem changes.
## Code After:
import astor
from data import warehouse
from puzzle.examples.public_domain import zebra_puzzle
from puzzle.problems import logic_problem
from puzzle.puzzlepedia import prod_config
from spec.mamba import *
with _description('zebra_puzzle'):
with description('solution'):
with before.all:
warehouse.save()
prod_config.init()
self.subject = zebra_puzzle.get()
with after.all:
prod_config.reset()
warehouse.restore()
with it('identifies puzzle type'):
problems = self.subject.problems()
expect(problems).to(have_len(1))
problem = problems[0]
expect(problem).to(be_a(logic_problem.LogicProblem))
with it('parses expressions'):
parsed = logic_problem._parse(zebra_puzzle.SOURCE.split('\n'))
expect(astor.to_source(parsed)).to(look_like(zebra_puzzle.PARSED))
with it('models puzzle'):
model = logic_problem._model(zebra_puzzle.SOURCE.split('\n'))
print(str(model))
with it('exports a solution'):
problem = self.subject.problems()[0]
expect(problem.solution).to(look_like(zebra_puzzle.SOLUTION))
|
...
expect(problem).to(be_a(logic_problem.LogicProblem))
with it('parses expressions'):
parsed = logic_problem._parse(zebra_puzzle.SOURCE.split('\n'))
expect(astor.to_source(parsed)).to(look_like(zebra_puzzle.PARSED))
with it('models puzzle'):
model = logic_problem._model(zebra_puzzle.SOURCE.split('\n'))
print(str(model))
with it('exports a solution'):
problem = self.subject.problems()[0]
expect(problem.solution).to(look_like(zebra_puzzle.SOLUTION))
...
|
0ebf51994a73fdc7c4f13b274fc41bef541eea52
|
deflect/widgets.py
|
deflect/widgets.py
|
from __future__ import unicode_literals
from itertools import chain
from django import forms
from django.utils.encoding import force_text
from django.utils.html import format_html
from django.utils.safestring import mark_safe
class DataListInput(forms.TextInput):
"""
A form widget that displays a standard ``TextInput`` field, as well
as an HTML5 datalist element. This provides a set of options that
the user can select from, along with the ability to enter a custom
value. Suggested options are matched as the user begins typing.
"""
def __init__(self, attrs=None, choices=()):
super(DataListInput, self).__init__(attrs)
self.choices = list(chain.from_iterable(choices))
def render(self, name, value, attrs={}, choices=()):
attrs['list'] = 'id_%s_list' % name
output = [super(DataListInput, self).render(name, value, attrs)]
options = self.render_options(name, choices)
if options:
output.append(options)
return mark_safe('\n'.join(output))
def render_options(self, name, choices):
output = []
output.append('<datalist id="id_%s_list">' % name)
output.append('<select>')
for option in chain(self.choices, choices):
output.append(format_html('<option value="{0}" />', force_text(option)))
output.append('</select>')
output.append('</datalist>')
return '\n'.join(output)
|
from __future__ import unicode_literals
from itertools import chain
from django import forms
from django.utils.encoding import force_text
from django.utils.html import format_html
from django.utils.safestring import mark_safe
class DataListInput(forms.TextInput):
"""
A form widget that displays a standard ``TextInput`` field, as well
as an HTML5 datalist element. This provides a set of options that
the user can select from, along with the ability to enter a custom
value. Suggested options are matched as the user begins typing.
"""
def __init__(self, attrs=None, choices=()):
super(DataListInput, self).__init__(attrs)
self.choices = list(chain.from_iterable(choices))
def render(self, name, value, attrs={}, choices=()):
attrs['list'] = 'id_%s_list' % name
output = [super(DataListInput, self).render(name, value, attrs)]
options = self.render_options(name, choices)
if options:
output.append(options)
return mark_safe('\n'.join(output))
def render_options(self, name, choices):
output = []
output.append('<datalist id="id_%s_list">' % name)
output.append('<select style="display:none">')
for option in chain(self.choices, choices):
output.append(format_html('<option value="{0}" />', force_text(option)))
output.append('</select>')
output.append('</datalist>')
return '\n'.join(output)
|
Hide the option set from incompatible browsers
|
Hide the option set from incompatible browsers
|
Python
|
bsd-3-clause
|
jbittel/django-deflect
|
from __future__ import unicode_literals
from itertools import chain
from django import forms
from django.utils.encoding import force_text
from django.utils.html import format_html
from django.utils.safestring import mark_safe
class DataListInput(forms.TextInput):
"""
A form widget that displays a standard ``TextInput`` field, as well
as an HTML5 datalist element. This provides a set of options that
the user can select from, along with the ability to enter a custom
value. Suggested options are matched as the user begins typing.
"""
def __init__(self, attrs=None, choices=()):
super(DataListInput, self).__init__(attrs)
self.choices = list(chain.from_iterable(choices))
def render(self, name, value, attrs={}, choices=()):
attrs['list'] = 'id_%s_list' % name
output = [super(DataListInput, self).render(name, value, attrs)]
options = self.render_options(name, choices)
if options:
output.append(options)
return mark_safe('\n'.join(output))
def render_options(self, name, choices):
output = []
output.append('<datalist id="id_%s_list">' % name)
- output.append('<select>')
+ output.append('<select style="display:none">')
for option in chain(self.choices, choices):
output.append(format_html('<option value="{0}" />', force_text(option)))
output.append('</select>')
output.append('</datalist>')
return '\n'.join(output)
|
Hide the option set from incompatible browsers
|
## Code Before:
from __future__ import unicode_literals
from itertools import chain
from django import forms
from django.utils.encoding import force_text
from django.utils.html import format_html
from django.utils.safestring import mark_safe
class DataListInput(forms.TextInput):
"""
A form widget that displays a standard ``TextInput`` field, as well
as an HTML5 datalist element. This provides a set of options that
the user can select from, along with the ability to enter a custom
value. Suggested options are matched as the user begins typing.
"""
def __init__(self, attrs=None, choices=()):
super(DataListInput, self).__init__(attrs)
self.choices = list(chain.from_iterable(choices))
def render(self, name, value, attrs={}, choices=()):
attrs['list'] = 'id_%s_list' % name
output = [super(DataListInput, self).render(name, value, attrs)]
options = self.render_options(name, choices)
if options:
output.append(options)
return mark_safe('\n'.join(output))
def render_options(self, name, choices):
output = []
output.append('<datalist id="id_%s_list">' % name)
output.append('<select>')
for option in chain(self.choices, choices):
output.append(format_html('<option value="{0}" />', force_text(option)))
output.append('</select>')
output.append('</datalist>')
return '\n'.join(output)
## Instruction:
Hide the option set from incompatible browsers
## Code After:
from __future__ import unicode_literals
from itertools import chain
from django import forms
from django.utils.encoding import force_text
from django.utils.html import format_html
from django.utils.safestring import mark_safe
class DataListInput(forms.TextInput):
"""
A form widget that displays a standard ``TextInput`` field, as well
as an HTML5 datalist element. This provides a set of options that
the user can select from, along with the ability to enter a custom
value. Suggested options are matched as the user begins typing.
"""
def __init__(self, attrs=None, choices=()):
super(DataListInput, self).__init__(attrs)
self.choices = list(chain.from_iterable(choices))
def render(self, name, value, attrs={}, choices=()):
attrs['list'] = 'id_%s_list' % name
output = [super(DataListInput, self).render(name, value, attrs)]
options = self.render_options(name, choices)
if options:
output.append(options)
return mark_safe('\n'.join(output))
def render_options(self, name, choices):
output = []
output.append('<datalist id="id_%s_list">' % name)
output.append('<select style="display:none">')
for option in chain(self.choices, choices):
output.append(format_html('<option value="{0}" />', force_text(option)))
output.append('</select>')
output.append('</datalist>')
return '\n'.join(output)
|
// ... existing code ...
def render_options(self, name, choices):
output = []
output.append('<datalist id="id_%s_list">' % name)
output.append('<select style="display:none">')
for option in chain(self.choices, choices):
output.append(format_html('<option value="{0}" />', force_text(option)))
output.append('</select>')
// ... rest of the code ...
|
61b38528b60203003b9595f7ba2204c287dc6970
|
string/compress.py
|
string/compress.py
|
def compress_str(str):
output = ""
curr_char = ""
char_count = ""
for i in str:
if curr_char != str[i]:
output = output + curr_char + char_count # add new unique character and its count to our output
curr_char = str[i] # move on to the next character in string
char_count = 1 # reset count to 1
|
def compress_str(str):
output = ""
curr_char = ""
char_count = ""
for i in str:
if curr_char != str[i]:
output = output + curr_char + char_count # add new unique character and its count to our output
curr_char = str[i] # move on to the next character in string
char_count = 1 # reset count to 1
else: # add to repeated count if there is a match
char_count += 1
|
Add to current count if there is a match
|
Add to current count if there is a match
|
Python
|
mit
|
derekmpham/interview-prep,derekmpham/interview-prep
|
def compress_str(str):
output = ""
curr_char = ""
char_count = ""
for i in str:
if curr_char != str[i]:
output = output + curr_char + char_count # add new unique character and its count to our output
curr_char = str[i] # move on to the next character in string
char_count = 1 # reset count to 1
+ else: # add to repeated count if there is a match
+ char_count += 1
|
Add to current count if there is a match
|
## Code Before:
def compress_str(str):
output = ""
curr_char = ""
char_count = ""
for i in str:
if curr_char != str[i]:
output = output + curr_char + char_count # add new unique character and its count to our output
curr_char = str[i] # move on to the next character in string
char_count = 1 # reset count to 1
## Instruction:
Add to current count if there is a match
## Code After:
def compress_str(str):
output = ""
curr_char = ""
char_count = ""
for i in str:
if curr_char != str[i]:
output = output + curr_char + char_count # add new unique character and its count to our output
curr_char = str[i] # move on to the next character in string
char_count = 1 # reset count to 1
else: # add to repeated count if there is a match
char_count += 1
|
...
output = output + curr_char + char_count # add new unique character and its count to our output
curr_char = str[i] # move on to the next character in string
char_count = 1 # reset count to 1
else: # add to repeated count if there is a match
char_count += 1
...
|
87771bda7fbf46519097ba433a7b4fd3f2cbaa7e
|
office_lunch_order/office_lunch_order_app/tests.py
|
office_lunch_order/office_lunch_order_app/tests.py
|
from django.test import TestCase, Client
c = Client()
response = c.get('/officelunchorder/')
response.status_code # 200
response.content
response = c.post('/officelunchorder/login/')
response.status_code # 200
response.content
response = c.get('/officelunchorder/logout/')
response.status_code # 200
response.content
response = c.get('/officelunchorder/orders/')
response.status_code # 302 found
response = c.get('/officelunchorder/new_order/')
response.status_code # 302 found
response = c.post('/officelunchorder/close_order/')
print(response.status_code) # 302 found
|
from django.test import TestCase, Client
c = Client()
response = c.get('/officelunchorder/')
response.status_code # 200
response.content
response = c.post('/officelunchorder/login/')
response.status_code # 200
response.content
response = c.get('/officelunchorder/logout/')
response.status_code # 200
response.content
response = c.get('/officelunchorder/orders/')
response.status_code # 302 found
response = c.get('/officelunchorder/new_order/')
response.status_code # 302 found
response = c.get('/officelunchorder/add_order/25/') # existing order_id
response.status_code # 302 found
response = c.get('/officelunchorder/order_details/25/') # existing order_id
response.status_code # 302 found
response = c.post('/officelunchorder/close_order/')
response.status_code # 302 found
|
Test add_order and order details with existing order_id url
|
Test add_order and order details with existing order_id url
|
Python
|
epl-1.0
|
MariuszKorotko/Office_Lunch_Order,MariuszKorotko/Office_Lunch_Order
|
from django.test import TestCase, Client
c = Client()
response = c.get('/officelunchorder/')
response.status_code # 200
response.content
response = c.post('/officelunchorder/login/')
response.status_code # 200
response.content
response = c.get('/officelunchorder/logout/')
response.status_code # 200
response.content
response = c.get('/officelunchorder/orders/')
response.status_code # 302 found
response = c.get('/officelunchorder/new_order/')
response.status_code # 302 found
+ response = c.get('/officelunchorder/add_order/25/') # existing order_id
+ response.status_code # 302 found
+ response = c.get('/officelunchorder/order_details/25/') # existing order_id
+ response.status_code # 302 found
response = c.post('/officelunchorder/close_order/')
- print(response.status_code) # 302 found
+ response.status_code # 302 found
+
+
|
Test add_order and order details with existing order_id url
|
## Code Before:
from django.test import TestCase, Client
c = Client()
response = c.get('/officelunchorder/')
response.status_code # 200
response.content
response = c.post('/officelunchorder/login/')
response.status_code # 200
response.content
response = c.get('/officelunchorder/logout/')
response.status_code # 200
response.content
response = c.get('/officelunchorder/orders/')
response.status_code # 302 found
response = c.get('/officelunchorder/new_order/')
response.status_code # 302 found
response = c.post('/officelunchorder/close_order/')
print(response.status_code) # 302 found
## Instruction:
Test add_order and order details with existing order_id url
## Code After:
from django.test import TestCase, Client
c = Client()
response = c.get('/officelunchorder/')
response.status_code # 200
response.content
response = c.post('/officelunchorder/login/')
response.status_code # 200
response.content
response = c.get('/officelunchorder/logout/')
response.status_code # 200
response.content
response = c.get('/officelunchorder/orders/')
response.status_code # 302 found
response = c.get('/officelunchorder/new_order/')
response.status_code # 302 found
response = c.get('/officelunchorder/add_order/25/') # existing order_id
response.status_code # 302 found
response = c.get('/officelunchorder/order_details/25/') # existing order_id
response.status_code # 302 found
response = c.post('/officelunchorder/close_order/')
response.status_code # 302 found
|
// ... existing code ...
response.status_code # 302 found
response = c.get('/officelunchorder/new_order/')
response.status_code # 302 found
response = c.get('/officelunchorder/add_order/25/') # existing order_id
response.status_code # 302 found
response = c.get('/officelunchorder/order_details/25/') # existing order_id
response.status_code # 302 found
response = c.post('/officelunchorder/close_order/')
response.status_code # 302 found
// ... rest of the code ...
|
b8938849ce9239836d2b601dd43324284f1b5604
|
whatinstalled.py
|
whatinstalled.py
|
import os
history_file = os.path.expanduser("~")+"/.bash_history"
f = open(history_file,"r")
keywords = ["pip", "tar", "brew", "apt-get", "install", "luarocks", "easy_install", "gem", "npm"]
for line in f:
for item in keywords:
if item in line:
print(line[:-1])
break
|
import os
history_file = os.path.expanduser("~")+"/.bash_history"
f = open(history_file,"r")
keywords = ["pip", "tar", "brew", "apt-get", "aptitude", "apt", "install", "luarocks", "easy_install", "gem", "npm", "bower"]
for line in f:
for item in keywords:
if item in line:
print(line[:-1])
break
|
Add aptitude and apt + bower
|
Add aptitude and apt + bower
|
Python
|
mit
|
AlexMili/WhatInstalled
|
import os
history_file = os.path.expanduser("~")+"/.bash_history"
f = open(history_file,"r")
- keywords = ["pip", "tar", "brew", "apt-get", "install", "luarocks", "easy_install", "gem", "npm"]
+ keywords = ["pip", "tar", "brew", "apt-get", "aptitude", "apt", "install", "luarocks", "easy_install", "gem", "npm", "bower"]
for line in f:
for item in keywords:
if item in line:
print(line[:-1])
break
+
|
Add aptitude and apt + bower
|
## Code Before:
import os
history_file = os.path.expanduser("~")+"/.bash_history"
f = open(history_file,"r")
keywords = ["pip", "tar", "brew", "apt-get", "install", "luarocks", "easy_install", "gem", "npm"]
for line in f:
for item in keywords:
if item in line:
print(line[:-1])
break
## Instruction:
Add aptitude and apt + bower
## Code After:
import os
history_file = os.path.expanduser("~")+"/.bash_history"
f = open(history_file,"r")
keywords = ["pip", "tar", "brew", "apt-get", "aptitude", "apt", "install", "luarocks", "easy_install", "gem", "npm", "bower"]
for line in f:
for item in keywords:
if item in line:
print(line[:-1])
break
|
// ... existing code ...
history_file = os.path.expanduser("~")+"/.bash_history"
f = open(history_file,"r")
keywords = ["pip", "tar", "brew", "apt-get", "aptitude", "apt", "install", "luarocks", "easy_install", "gem", "npm", "bower"]
for line in f:
for item in keywords:
// ... rest of the code ...
|
a6e2c0fc837b17321e2979cb12ba2d0e69603eac
|
orderedmodel/__init__.py
|
orderedmodel/__init__.py
|
__all__ = ['OrderedModel', 'OrderedModelAdmin']
from models import OrderedModel
from admin import OrderedModelAdmin
|
from .models import OrderedModel
from .admin import OrderedModelAdmin
__all__ = ['OrderedModel', 'OrderedModelAdmin']
try:
from django.conf import settings
except ImportError:
pass
else:
if 'mptt' in settings.INSTALLED_APPS:
from .mptt_models import OrderableMPTTModel
from .mptt_admin import OrderedMPTTModelAdmin
__all__ += ['OrderableMPTTModel', 'OrderedMPTTModelAdmin']
|
Make it easy importing of OrderableMPTTModel and OrderedMPTTModelAdmin in from orderedmodel module
|
Make it easy importing of OrderableMPTTModel and OrderedMPTTModelAdmin in from orderedmodel module
|
Python
|
bsd-3-clause
|
MagicSolutions/django-orderedmodel,MagicSolutions/django-orderedmodel
|
+ from .models import OrderedModel
+ from .admin import OrderedModelAdmin
+
__all__ = ['OrderedModel', 'OrderedModelAdmin']
- from models import OrderedModel
+ try:
+ from django.conf import settings
+ except ImportError:
+ pass
+ else:
+ if 'mptt' in settings.INSTALLED_APPS:
+ from .mptt_models import OrderableMPTTModel
- from admin import OrderedModelAdmin
+ from .mptt_admin import OrderedMPTTModelAdmin
+ __all__ += ['OrderableMPTTModel', 'OrderedMPTTModelAdmin']
|
Make it easy importing of OrderableMPTTModel and OrderedMPTTModelAdmin in from orderedmodel module
|
## Code Before:
__all__ = ['OrderedModel', 'OrderedModelAdmin']
from models import OrderedModel
from admin import OrderedModelAdmin
## Instruction:
Make it easy importing of OrderableMPTTModel and OrderedMPTTModelAdmin in from orderedmodel module
## Code After:
from .models import OrderedModel
from .admin import OrderedModelAdmin
__all__ = ['OrderedModel', 'OrderedModelAdmin']
try:
from django.conf import settings
except ImportError:
pass
else:
if 'mptt' in settings.INSTALLED_APPS:
from .mptt_models import OrderableMPTTModel
from .mptt_admin import OrderedMPTTModelAdmin
__all__ += ['OrderableMPTTModel', 'OrderedMPTTModelAdmin']
|
// ... existing code ...
from .models import OrderedModel
from .admin import OrderedModelAdmin
__all__ = ['OrderedModel', 'OrderedModelAdmin']
try:
from django.conf import settings
except ImportError:
pass
else:
if 'mptt' in settings.INSTALLED_APPS:
from .mptt_models import OrderableMPTTModel
from .mptt_admin import OrderedMPTTModelAdmin
__all__ += ['OrderableMPTTModel', 'OrderedMPTTModelAdmin']
// ... rest of the code ...
|
4f7a64f3060c196a434e504847efc511e34537f6
|
asyncssh/crypto/__init__.py
|
asyncssh/crypto/__init__.py
|
"""A shim for accessing cryptographic primitives needed by asyncssh"""
import importlib
from .cipher import register_cipher, lookup_cipher
from .curve25519 import Curve25519DH
from . import chacha
pyca_available = importlib.find_loader('cryptography')
pycrypto_available = importlib.find_loader('Crypto')
if pyca_available:
from . import pyca
if pycrypto_available:
from . import pycrypto
if pyca_available:
from .pyca.dsa import DSAPrivateKey, DSAPublicKey
from .pyca.rsa import RSAPrivateKey, RSAPublicKey
elif pycrypto_available:
from .pycrypto.dsa import DSAPrivateKey, DSAPublicKey
from .pycrypto.rsa import RSAPrivateKey, RSAPublicKey
else:
raise ImportError('No suitable crypto library found.')
|
"""A shim for accessing cryptographic primitives needed by asyncssh"""
import importlib
from .cipher import register_cipher, lookup_cipher
try:
from .curve25519 import Curve25519DH
except ImportError:
pass
from . import chacha
pyca_available = importlib.find_loader('cryptography')
pycrypto_available = importlib.find_loader('Crypto')
if pyca_available:
from . import pyca
if pycrypto_available:
from . import pycrypto
if pyca_available:
from .pyca.dsa import DSAPrivateKey, DSAPublicKey
from .pyca.rsa import RSAPrivateKey, RSAPublicKey
elif pycrypto_available:
from .pycrypto.dsa import DSAPrivateKey, DSAPublicKey
from .pycrypto.rsa import RSAPrivateKey, RSAPublicKey
else:
raise ImportError('No suitable crypto library found.')
|
Allow Curve25519DH import to fail in crypto package
|
Allow Curve25519DH import to fail in crypto package
With the refactoring to avoid pylint warnings, a problem was introduced
in importing the crypto module when the curve25519 dependencies were
unavailable. This commit fixes that problem.
|
Python
|
epl-1.0
|
jonathanslenders/asyncssh
|
"""A shim for accessing cryptographic primitives needed by asyncssh"""
import importlib
from .cipher import register_cipher, lookup_cipher
+
+ try:
- from .curve25519 import Curve25519DH
+ from .curve25519 import Curve25519DH
+ except ImportError:
+ pass
from . import chacha
pyca_available = importlib.find_loader('cryptography')
pycrypto_available = importlib.find_loader('Crypto')
if pyca_available:
from . import pyca
if pycrypto_available:
from . import pycrypto
if pyca_available:
from .pyca.dsa import DSAPrivateKey, DSAPublicKey
from .pyca.rsa import RSAPrivateKey, RSAPublicKey
elif pycrypto_available:
from .pycrypto.dsa import DSAPrivateKey, DSAPublicKey
from .pycrypto.rsa import RSAPrivateKey, RSAPublicKey
else:
raise ImportError('No suitable crypto library found.')
|
Allow Curve25519DH import to fail in crypto package
|
## Code Before:
"""A shim for accessing cryptographic primitives needed by asyncssh"""
import importlib
from .cipher import register_cipher, lookup_cipher
from .curve25519 import Curve25519DH
from . import chacha
pyca_available = importlib.find_loader('cryptography')
pycrypto_available = importlib.find_loader('Crypto')
if pyca_available:
from . import pyca
if pycrypto_available:
from . import pycrypto
if pyca_available:
from .pyca.dsa import DSAPrivateKey, DSAPublicKey
from .pyca.rsa import RSAPrivateKey, RSAPublicKey
elif pycrypto_available:
from .pycrypto.dsa import DSAPrivateKey, DSAPublicKey
from .pycrypto.rsa import RSAPrivateKey, RSAPublicKey
else:
raise ImportError('No suitable crypto library found.')
## Instruction:
Allow Curve25519DH import to fail in crypto package
## Code After:
"""A shim for accessing cryptographic primitives needed by asyncssh"""
import importlib
from .cipher import register_cipher, lookup_cipher
try:
from .curve25519 import Curve25519DH
except ImportError:
pass
from . import chacha
pyca_available = importlib.find_loader('cryptography')
pycrypto_available = importlib.find_loader('Crypto')
if pyca_available:
from . import pyca
if pycrypto_available:
from . import pycrypto
if pyca_available:
from .pyca.dsa import DSAPrivateKey, DSAPublicKey
from .pyca.rsa import RSAPrivateKey, RSAPublicKey
elif pycrypto_available:
from .pycrypto.dsa import DSAPrivateKey, DSAPublicKey
from .pycrypto.rsa import RSAPrivateKey, RSAPublicKey
else:
raise ImportError('No suitable crypto library found.')
|
// ... existing code ...
import importlib
from .cipher import register_cipher, lookup_cipher
try:
from .curve25519 import Curve25519DH
except ImportError:
pass
from . import chacha
// ... rest of the code ...
|
6696451b7c7a9b2de5b624b47159efae8fcf06b7
|
opwen_email_server/api/lokole.py
|
opwen_email_server/api/lokole.py
|
def upload(upload_info):
"""
:type upload_info: dict
"""
client_id = upload_info['client_id']
resource_id = upload_info['resource_id']
resource_type = upload_info['resource_type']
raise NotImplementedError
def download(client_id):
"""
:type client_id: str
:rtype dict
"""
return {
'resource_id': None,
'resource_type': None,
}
|
def upload(upload_info):
"""
:type upload_info: dict
"""
client_id = upload_info['client_id'] # noqa: F841
resource_id = upload_info['resource_id'] # noqa: F841
resource_type = upload_info['resource_type'] # noqa: F841
raise NotImplementedError
def download(client_id): # noqa: F841
"""
:type client_id: str
:rtype dict
"""
return {
'resource_id': None,
'resource_type': None,
}
|
Disable linter in in-progress code
|
Disable linter in in-progress code
|
Python
|
apache-2.0
|
ascoderu/opwen-cloudserver,ascoderu/opwen-cloudserver
|
def upload(upload_info):
"""
:type upload_info: dict
"""
- client_id = upload_info['client_id']
+ client_id = upload_info['client_id'] # noqa: F841
- resource_id = upload_info['resource_id']
+ resource_id = upload_info['resource_id'] # noqa: F841
- resource_type = upload_info['resource_type']
+ resource_type = upload_info['resource_type'] # noqa: F841
raise NotImplementedError
- def download(client_id):
+ def download(client_id): # noqa: F841
"""
:type client_id: str
:rtype dict
"""
return {
'resource_id': None,
'resource_type': None,
}
|
Disable linter in in-progress code
|
## Code Before:
def upload(upload_info):
"""
:type upload_info: dict
"""
client_id = upload_info['client_id']
resource_id = upload_info['resource_id']
resource_type = upload_info['resource_type']
raise NotImplementedError
def download(client_id):
"""
:type client_id: str
:rtype dict
"""
return {
'resource_id': None,
'resource_type': None,
}
## Instruction:
Disable linter in in-progress code
## Code After:
def upload(upload_info):
"""
:type upload_info: dict
"""
client_id = upload_info['client_id'] # noqa: F841
resource_id = upload_info['resource_id'] # noqa: F841
resource_type = upload_info['resource_type'] # noqa: F841
raise NotImplementedError
def download(client_id): # noqa: F841
"""
:type client_id: str
:rtype dict
"""
return {
'resource_id': None,
'resource_type': None,
}
|
# ... existing code ...
:type upload_info: dict
"""
client_id = upload_info['client_id'] # noqa: F841
resource_id = upload_info['resource_id'] # noqa: F841
resource_type = upload_info['resource_type'] # noqa: F841
raise NotImplementedError
def download(client_id): # noqa: F841
"""
:type client_id: str
:rtype dict
# ... rest of the code ...
|
bb4a67d2817ccca3b15e09db1d72823626bd2ed6
|
glitter/publisher/admin.py
|
glitter/publisher/admin.py
|
from __future__ import unicode_literals
from django import forms
from django.contrib.contenttypes.admin import GenericStackedInline
from .forms import object_version_choices
from .models import PublishAction
class ActionInline(GenericStackedInline):
model = PublishAction
fields = ('scheduled_time', 'publish_version')
extra = 0
def get_formset(self, request, obj=None, form=None, **kwargs):
class VersionForm(forms.ModelForm):
"""
Customised form which limits the users choices to versions which have been saved for
this object.
"""
class Meta:
widgets = {
'publish_version': forms.widgets.Select(
choices=object_version_choices(obj=obj),
),
}
BaseFormset = super(ActionInline, self).get_formset(
request, obj, form=VersionForm, **kwargs
)
class ActionFormset(BaseFormset):
"""
Customised formset to save the user who has created/updated the action.
"""
def save_new(self, form, commit):
obj = super(ActionFormset, self).save_new(form, commit=False)
obj.user = request.user
obj.save()
return obj
def save_existing(self, form, instance, commit):
obj = super(ActionFormset, self).save_existing(form, instance, commit=False)
obj.user = request.user
obj.save()
return obj
return ActionFormset
|
from __future__ import unicode_literals
from django import forms
from django.contrib.contenttypes.admin import GenericStackedInline
from .forms import object_version_choices
from .models import PublishAction
class ActionInline(GenericStackedInline):
model = PublishAction
fields = ('scheduled_time', 'publish_version')
extra = 0
def get_formset(self, request, obj=None, form=None, **kwargs):
BaseFormset = super(ActionInline, self).get_formset(request, obj, **kwargs)
class ActionFormset(BaseFormset):
"""
Customised formset to save the user who has created/updated the action.
"""
def save_new(self, form, commit):
obj = super(ActionFormset, self).save_new(form, commit=False)
obj.user = request.user
obj.save()
return obj
def save_existing(self, form, instance, commit):
obj = super(ActionFormset, self).save_existing(form, instance, commit=False)
obj.user = request.user
obj.save()
return obj
# Customised widget which limits the users choices to versions which have been saved for
# this object.
ActionFormset.form.base_fields['publish_version'].widget = forms.widgets.Select(
choices=object_version_choices(obj=obj),
)
return ActionFormset
|
Rework the versions form widget
|
Rework the versions form widget
|
Python
|
bsd-3-clause
|
developersociety/django-glitter,blancltd/django-glitter,blancltd/django-glitter,developersociety/django-glitter,blancltd/django-glitter,developersociety/django-glitter
|
from __future__ import unicode_literals
from django import forms
from django.contrib.contenttypes.admin import GenericStackedInline
from .forms import object_version_choices
from .models import PublishAction
class ActionInline(GenericStackedInline):
model = PublishAction
fields = ('scheduled_time', 'publish_version')
extra = 0
def get_formset(self, request, obj=None, form=None, **kwargs):
- class VersionForm(forms.ModelForm):
- """
- Customised form which limits the users choices to versions which have been saved for
- this object.
- """
- class Meta:
- widgets = {
- 'publish_version': forms.widgets.Select(
- choices=object_version_choices(obj=obj),
- ),
- }
-
- BaseFormset = super(ActionInline, self).get_formset(
+ BaseFormset = super(ActionInline, self).get_formset(request, obj, **kwargs)
- request, obj, form=VersionForm, **kwargs
- )
class ActionFormset(BaseFormset):
"""
Customised formset to save the user who has created/updated the action.
"""
def save_new(self, form, commit):
obj = super(ActionFormset, self).save_new(form, commit=False)
obj.user = request.user
obj.save()
return obj
def save_existing(self, form, instance, commit):
obj = super(ActionFormset, self).save_existing(form, instance, commit=False)
obj.user = request.user
obj.save()
return obj
+ # Customised widget which limits the users choices to versions which have been saved for
+ # this object.
+ ActionFormset.form.base_fields['publish_version'].widget = forms.widgets.Select(
+ choices=object_version_choices(obj=obj),
+ )
+
return ActionFormset
|
Rework the versions form widget
|
## Code Before:
from __future__ import unicode_literals
from django import forms
from django.contrib.contenttypes.admin import GenericStackedInline
from .forms import object_version_choices
from .models import PublishAction
class ActionInline(GenericStackedInline):
model = PublishAction
fields = ('scheduled_time', 'publish_version')
extra = 0
def get_formset(self, request, obj=None, form=None, **kwargs):
class VersionForm(forms.ModelForm):
"""
Customised form which limits the users choices to versions which have been saved for
this object.
"""
class Meta:
widgets = {
'publish_version': forms.widgets.Select(
choices=object_version_choices(obj=obj),
),
}
BaseFormset = super(ActionInline, self).get_formset(
request, obj, form=VersionForm, **kwargs
)
class ActionFormset(BaseFormset):
"""
Customised formset to save the user who has created/updated the action.
"""
def save_new(self, form, commit):
obj = super(ActionFormset, self).save_new(form, commit=False)
obj.user = request.user
obj.save()
return obj
def save_existing(self, form, instance, commit):
obj = super(ActionFormset, self).save_existing(form, instance, commit=False)
obj.user = request.user
obj.save()
return obj
return ActionFormset
## Instruction:
Rework the versions form widget
## Code After:
from __future__ import unicode_literals
from django import forms
from django.contrib.contenttypes.admin import GenericStackedInline
from .forms import object_version_choices
from .models import PublishAction
class ActionInline(GenericStackedInline):
model = PublishAction
fields = ('scheduled_time', 'publish_version')
extra = 0
def get_formset(self, request, obj=None, form=None, **kwargs):
BaseFormset = super(ActionInline, self).get_formset(request, obj, **kwargs)
class ActionFormset(BaseFormset):
"""
Customised formset to save the user who has created/updated the action.
"""
def save_new(self, form, commit):
obj = super(ActionFormset, self).save_new(form, commit=False)
obj.user = request.user
obj.save()
return obj
def save_existing(self, form, instance, commit):
obj = super(ActionFormset, self).save_existing(form, instance, commit=False)
obj.user = request.user
obj.save()
return obj
# Customised widget which limits the users choices to versions which have been saved for
# this object.
ActionFormset.form.base_fields['publish_version'].widget = forms.widgets.Select(
choices=object_version_choices(obj=obj),
)
return ActionFormset
|
...
extra = 0
def get_formset(self, request, obj=None, form=None, **kwargs):
BaseFormset = super(ActionInline, self).get_formset(request, obj, **kwargs)
class ActionFormset(BaseFormset):
"""
...
obj.save()
return obj
# Customised widget which limits the users choices to versions which have been saved for
# this object.
ActionFormset.form.base_fields['publish_version'].widget = forms.widgets.Select(
choices=object_version_choices(obj=obj),
)
return ActionFormset
...
|
06349ea257219e8ad1808fa4fd77f34f7371894a
|
test/test.py
|
test/test.py
|
import os, shutil
from nose import with_setup
from mbutil import mbtiles_to_disk, disk_to_mbtiles
def clear_data():
try: shutil.rmtree('test/output')
except Exception: pass
try: os.path.mkdir('test/output')
except Exception: pass
@with_setup(clear_data, clear_data)
def test_mbtiles_to_disk():
mbtiles_to_disk('test/data/one_tile.mbtiles', 'test/output')
assert os.path.exists('test/output/0/0/0.png')
assert os.path.exists('test/output/metadata.json')
@with_setup(clear_data, clear_data)
def test_mbtiles_to_disk_and_back():
mbtiles_to_disk('test/data/one_tile.mbtiles', 'test/output')
assert os.path.exists('test/output/0/0/0.png')
disk_to_mbtiles('test/output/', 'test/output/one.mbtiles')
assert os.path.exists('test/output/one.mbtiles')
@with_setup(clear_data, clear_data)
def test_utf8grid_mbtiles_to_disk():
mbtiles_to_disk('test/data/utf8grid.mbtiles', 'test/output')
assert os.path.exists('test/output/0/0/0.grid.json')
assert os.path.exists('test/output/0/0/0.png')
assert os.path.exists('test/output/metadata.json')
|
import os, shutil
from nose import with_setup
from mbutil import mbtiles_to_disk, disk_to_mbtiles
def clear_data():
try: shutil.rmtree('test/output')
except Exception: pass
@with_setup(clear_data, clear_data)
def test_mbtiles_to_disk():
mbtiles_to_disk('test/data/one_tile.mbtiles', 'test/output')
assert os.path.exists('test/output/0/0/0.png')
assert os.path.exists('test/output/metadata.json')
@with_setup(clear_data, clear_data)
def test_mbtiles_to_disk_and_back():
mbtiles_to_disk('test/data/one_tile.mbtiles', 'test/output')
assert os.path.exists('test/output/0/0/0.png')
disk_to_mbtiles('test/output/', 'test/output/one.mbtiles')
assert os.path.exists('test/output/one.mbtiles')
@with_setup(clear_data, clear_data)
def test_utf8grid_mbtiles_to_disk():
mbtiles_to_disk('test/data/utf8grid.mbtiles', 'test/output')
assert os.path.exists('test/output/0/0/0.grid.json')
assert os.path.exists('test/output/0/0/0.png')
assert os.path.exists('test/output/metadata.json')
|
Remove dead code, os.path.mkdir does not even exist
|
Remove dead code, os.path.mkdir does not even exist
|
Python
|
bsd-3-clause
|
davvo/mbutil-eniro,mapbox/mbutil,mapbox/mbutil
|
import os, shutil
from nose import with_setup
from mbutil import mbtiles_to_disk, disk_to_mbtiles
def clear_data():
try: shutil.rmtree('test/output')
- except Exception: pass
-
- try: os.path.mkdir('test/output')
except Exception: pass
@with_setup(clear_data, clear_data)
def test_mbtiles_to_disk():
mbtiles_to_disk('test/data/one_tile.mbtiles', 'test/output')
assert os.path.exists('test/output/0/0/0.png')
assert os.path.exists('test/output/metadata.json')
@with_setup(clear_data, clear_data)
def test_mbtiles_to_disk_and_back():
mbtiles_to_disk('test/data/one_tile.mbtiles', 'test/output')
assert os.path.exists('test/output/0/0/0.png')
disk_to_mbtiles('test/output/', 'test/output/one.mbtiles')
assert os.path.exists('test/output/one.mbtiles')
@with_setup(clear_data, clear_data)
def test_utf8grid_mbtiles_to_disk():
mbtiles_to_disk('test/data/utf8grid.mbtiles', 'test/output')
assert os.path.exists('test/output/0/0/0.grid.json')
assert os.path.exists('test/output/0/0/0.png')
assert os.path.exists('test/output/metadata.json')
|
Remove dead code, os.path.mkdir does not even exist
|
## Code Before:
import os, shutil
from nose import with_setup
from mbutil import mbtiles_to_disk, disk_to_mbtiles
def clear_data():
try: shutil.rmtree('test/output')
except Exception: pass
try: os.path.mkdir('test/output')
except Exception: pass
@with_setup(clear_data, clear_data)
def test_mbtiles_to_disk():
mbtiles_to_disk('test/data/one_tile.mbtiles', 'test/output')
assert os.path.exists('test/output/0/0/0.png')
assert os.path.exists('test/output/metadata.json')
@with_setup(clear_data, clear_data)
def test_mbtiles_to_disk_and_back():
mbtiles_to_disk('test/data/one_tile.mbtiles', 'test/output')
assert os.path.exists('test/output/0/0/0.png')
disk_to_mbtiles('test/output/', 'test/output/one.mbtiles')
assert os.path.exists('test/output/one.mbtiles')
@with_setup(clear_data, clear_data)
def test_utf8grid_mbtiles_to_disk():
mbtiles_to_disk('test/data/utf8grid.mbtiles', 'test/output')
assert os.path.exists('test/output/0/0/0.grid.json')
assert os.path.exists('test/output/0/0/0.png')
assert os.path.exists('test/output/metadata.json')
## Instruction:
Remove dead code, os.path.mkdir does not even exist
## Code After:
import os, shutil
from nose import with_setup
from mbutil import mbtiles_to_disk, disk_to_mbtiles
def clear_data():
try: shutil.rmtree('test/output')
except Exception: pass
@with_setup(clear_data, clear_data)
def test_mbtiles_to_disk():
mbtiles_to_disk('test/data/one_tile.mbtiles', 'test/output')
assert os.path.exists('test/output/0/0/0.png')
assert os.path.exists('test/output/metadata.json')
@with_setup(clear_data, clear_data)
def test_mbtiles_to_disk_and_back():
mbtiles_to_disk('test/data/one_tile.mbtiles', 'test/output')
assert os.path.exists('test/output/0/0/0.png')
disk_to_mbtiles('test/output/', 'test/output/one.mbtiles')
assert os.path.exists('test/output/one.mbtiles')
@with_setup(clear_data, clear_data)
def test_utf8grid_mbtiles_to_disk():
mbtiles_to_disk('test/data/utf8grid.mbtiles', 'test/output')
assert os.path.exists('test/output/0/0/0.grid.json')
assert os.path.exists('test/output/0/0/0.png')
assert os.path.exists('test/output/metadata.json')
|
...
def clear_data():
try: shutil.rmtree('test/output')
except Exception: pass
@with_setup(clear_data, clear_data)
...
|
dc071e4961c7db7e98e7dfdcd74cce368ce31039
|
dataportal/tests/test_examples.py
|
dataportal/tests/test_examples.py
|
from nose.tools import assert_true
from ..examples.sample_data import (temperature_ramp, multisource_event,
image_and_scalar)
from metadatastore.api import Document
def run_example(example):
events = example.run()
assert_true(isinstance(events, list))
assert_true(isinstance(events[0], Document))
def test_examples():
for example in [temperature_ramp, multisource_event, image_and_scalar]:
yield run_example, example
|
import subprocess
from nose.tools import assert_true, assert_equal
from ..examples.sample_data import (temperature_ramp, multisource_event,
image_and_scalar)
from metadatastore.api import Document
examples = [temperature_ramp, multisource_event, image_and_scalar]
def run_example_programmatically(example):
events = example.run()
assert_true(isinstance(events, list))
assert_true(isinstance(events[0], Document))
def run_example_from_commandline(example):
command = ['python', example.__file__]
p = subprocess.Popen(command)
return_code = p.wait()
assert_equal(return_code, 0) # successful execution
def test_examples_programmatically():
for example in examples:
yield run_example_programmatically, example
def test_examples_from_commandline():
for example in examples:
yield run_example_from_commandline, example
|
Test commandline execution of examples.
|
TST: Test commandline execution of examples.
|
Python
|
bsd-3-clause
|
tacaswell/dataportal,NSLS-II/datamuxer,tacaswell/dataportal,danielballan/datamuxer,NSLS-II/dataportal,ericdill/datamuxer,NSLS-II/dataportal,ericdill/databroker,ericdill/datamuxer,ericdill/databroker,danielballan/dataportal,danielballan/datamuxer,danielballan/dataportal
|
+ import subprocess
- from nose.tools import assert_true
+ from nose.tools import assert_true, assert_equal
from ..examples.sample_data import (temperature_ramp, multisource_event,
image_and_scalar)
from metadatastore.api import Document
- def run_example(example):
+
+ examples = [temperature_ramp, multisource_event, image_and_scalar]
+
+
+ def run_example_programmatically(example):
events = example.run()
assert_true(isinstance(events, list))
assert_true(isinstance(events[0], Document))
- def test_examples():
- for example in [temperature_ramp, multisource_event, image_and_scalar]:
- yield run_example, example
+ def run_example_from_commandline(example):
+ command = ['python', example.__file__]
+ p = subprocess.Popen(command)
+ return_code = p.wait()
+ assert_equal(return_code, 0) # successful execution
+
+
+ def test_examples_programmatically():
+ for example in examples:
+ yield run_example_programmatically, example
+
+
+ def test_examples_from_commandline():
+ for example in examples:
+ yield run_example_from_commandline, example
+
|
Test commandline execution of examples.
|
## Code Before:
from nose.tools import assert_true
from ..examples.sample_data import (temperature_ramp, multisource_event,
image_and_scalar)
from metadatastore.api import Document
def run_example(example):
events = example.run()
assert_true(isinstance(events, list))
assert_true(isinstance(events[0], Document))
def test_examples():
for example in [temperature_ramp, multisource_event, image_and_scalar]:
yield run_example, example
## Instruction:
Test commandline execution of examples.
## Code After:
import subprocess
from nose.tools import assert_true, assert_equal
from ..examples.sample_data import (temperature_ramp, multisource_event,
image_and_scalar)
from metadatastore.api import Document
examples = [temperature_ramp, multisource_event, image_and_scalar]
def run_example_programmatically(example):
events = example.run()
assert_true(isinstance(events, list))
assert_true(isinstance(events[0], Document))
def run_example_from_commandline(example):
command = ['python', example.__file__]
p = subprocess.Popen(command)
return_code = p.wait()
assert_equal(return_code, 0) # successful execution
def test_examples_programmatically():
for example in examples:
yield run_example_programmatically, example
def test_examples_from_commandline():
for example in examples:
yield run_example_from_commandline, example
|
// ... existing code ...
import subprocess
from nose.tools import assert_true, assert_equal
from ..examples.sample_data import (temperature_ramp, multisource_event,
image_and_scalar)
from metadatastore.api import Document
examples = [temperature_ramp, multisource_event, image_and_scalar]
def run_example_programmatically(example):
events = example.run()
assert_true(isinstance(events, list))
assert_true(isinstance(events[0], Document))
def run_example_from_commandline(example):
command = ['python', example.__file__]
p = subprocess.Popen(command)
return_code = p.wait()
assert_equal(return_code, 0) # successful execution
def test_examples_programmatically():
for example in examples:
yield run_example_programmatically, example
def test_examples_from_commandline():
for example in examples:
yield run_example_from_commandline, example
// ... rest of the code ...
|
534066b1228bb0070c1d62445155afa696a37921
|
contrail_provisioning/config/templates/contrail_plugin_ini.py
|
contrail_provisioning/config/templates/contrail_plugin_ini.py
|
import string
template = string.Template("""
[APISERVER]
api_server_ip = $__contrail_api_server_ip__
api_server_port = $__contrail_api_server_port__
multi_tenancy = $__contrail_multi_tenancy__
#use_ssl = False
#insecure = False
#certfile=$__contrail_api_server_cert_file__
#keyfile=$__contrail_api_server_key_file__
#cafile=$__contrail_api_server_ca_file__
contrail_extensions = ipam:neutron_plugin_contrail.plugins.opencontrail.contrail_plugin_ipam.NeutronPluginContrailIpam,policy:neutron_plugin_contrail.plugins.opencontrail.contrail_plugin_policy.NeutronPluginContrailPolicy,route-table:neutron_plugin_contrail.plugins.opencontrail.contrail_plugin_vpc.NeutronPluginContrailVpc,contrail:None
[COLLECTOR]
analytics_api_ip = $__contrail_analytics_server_ip__
analytics_api_port = $__contrail_analytics_server_port__
[KEYSTONE]
auth_url = $__contrail_ks_auth_protocol__://$__contrail_keystone_ip__:$__contrail_ks_auth_port__/v2.0
admin_user=$__contrail_admin_user__
admin_password=$__contrail_admin_password__
admin_tenant_name=$__contrail_admin_tenant_name__
""")
|
import string
template = string.Template("""
[APISERVER]
api_server_ip = $__contrail_api_server_ip__
api_server_port = $__contrail_api_server_port__
multi_tenancy = $__contrail_multi_tenancy__
#use_ssl = False
#insecure = False
#certfile=$__contrail_api_server_cert_file__
#keyfile=$__contrail_api_server_key_file__
#cafile=$__contrail_api_server_ca_file__
contrail_extensions = ipam:neutron_plugin_contrail.plugins.opencontrail.contrail_plugin_ipam.NeutronPluginContrailIpam,policy:neutron_plugin_contrail.plugins.opencontrail.contrail_plugin_policy.NeutronPluginContrailPolicy,route-table:neutron_plugin_contrail.plugins.opencontrail.contrail_plugin_vpc.NeutronPluginContrailVpc,contrail:None,service-interface:None,vf-binding:None
[COLLECTOR]
analytics_api_ip = $__contrail_analytics_server_ip__
analytics_api_port = $__contrail_analytics_server_port__
[KEYSTONE]
auth_url = $__contrail_ks_auth_protocol__://$__contrail_keystone_ip__:$__contrail_ks_auth_port__/v2.0
admin_user=$__contrail_admin_user__
admin_password=$__contrail_admin_password__
admin_tenant_name=$__contrail_admin_tenant_name__
""")
|
Enable service-interface and vf-binding extensions by default in contrail based provisioning.
|
Enable service-interface and vf-binding extensions by default in
contrail based provisioning.
Change-Id: I5916f41cdf12ad54e74c0f76de244ed60f57aea5
Partial-Bug: 1556336
|
Python
|
apache-2.0
|
Juniper/contrail-provisioning,Juniper/contrail-provisioning
|
import string
template = string.Template("""
[APISERVER]
api_server_ip = $__contrail_api_server_ip__
api_server_port = $__contrail_api_server_port__
multi_tenancy = $__contrail_multi_tenancy__
#use_ssl = False
#insecure = False
#certfile=$__contrail_api_server_cert_file__
#keyfile=$__contrail_api_server_key_file__
#cafile=$__contrail_api_server_ca_file__
- contrail_extensions = ipam:neutron_plugin_contrail.plugins.opencontrail.contrail_plugin_ipam.NeutronPluginContrailIpam,policy:neutron_plugin_contrail.plugins.opencontrail.contrail_plugin_policy.NeutronPluginContrailPolicy,route-table:neutron_plugin_contrail.plugins.opencontrail.contrail_plugin_vpc.NeutronPluginContrailVpc,contrail:None
+ contrail_extensions = ipam:neutron_plugin_contrail.plugins.opencontrail.contrail_plugin_ipam.NeutronPluginContrailIpam,policy:neutron_plugin_contrail.plugins.opencontrail.contrail_plugin_policy.NeutronPluginContrailPolicy,route-table:neutron_plugin_contrail.plugins.opencontrail.contrail_plugin_vpc.NeutronPluginContrailVpc,contrail:None,service-interface:None,vf-binding:None
[COLLECTOR]
analytics_api_ip = $__contrail_analytics_server_ip__
analytics_api_port = $__contrail_analytics_server_port__
[KEYSTONE]
auth_url = $__contrail_ks_auth_protocol__://$__contrail_keystone_ip__:$__contrail_ks_auth_port__/v2.0
admin_user=$__contrail_admin_user__
admin_password=$__contrail_admin_password__
admin_tenant_name=$__contrail_admin_tenant_name__
""")
|
Enable service-interface and vf-binding extensions by default in contrail based provisioning.
|
## Code Before:
import string
template = string.Template("""
[APISERVER]
api_server_ip = $__contrail_api_server_ip__
api_server_port = $__contrail_api_server_port__
multi_tenancy = $__contrail_multi_tenancy__
#use_ssl = False
#insecure = False
#certfile=$__contrail_api_server_cert_file__
#keyfile=$__contrail_api_server_key_file__
#cafile=$__contrail_api_server_ca_file__
contrail_extensions = ipam:neutron_plugin_contrail.plugins.opencontrail.contrail_plugin_ipam.NeutronPluginContrailIpam,policy:neutron_plugin_contrail.plugins.opencontrail.contrail_plugin_policy.NeutronPluginContrailPolicy,route-table:neutron_plugin_contrail.plugins.opencontrail.contrail_plugin_vpc.NeutronPluginContrailVpc,contrail:None
[COLLECTOR]
analytics_api_ip = $__contrail_analytics_server_ip__
analytics_api_port = $__contrail_analytics_server_port__
[KEYSTONE]
auth_url = $__contrail_ks_auth_protocol__://$__contrail_keystone_ip__:$__contrail_ks_auth_port__/v2.0
admin_user=$__contrail_admin_user__
admin_password=$__contrail_admin_password__
admin_tenant_name=$__contrail_admin_tenant_name__
""")
## Instruction:
Enable service-interface and vf-binding extensions by default in contrail based provisioning.
## Code After:
import string
template = string.Template("""
[APISERVER]
api_server_ip = $__contrail_api_server_ip__
api_server_port = $__contrail_api_server_port__
multi_tenancy = $__contrail_multi_tenancy__
#use_ssl = False
#insecure = False
#certfile=$__contrail_api_server_cert_file__
#keyfile=$__contrail_api_server_key_file__
#cafile=$__contrail_api_server_ca_file__
contrail_extensions = ipam:neutron_plugin_contrail.plugins.opencontrail.contrail_plugin_ipam.NeutronPluginContrailIpam,policy:neutron_plugin_contrail.plugins.opencontrail.contrail_plugin_policy.NeutronPluginContrailPolicy,route-table:neutron_plugin_contrail.plugins.opencontrail.contrail_plugin_vpc.NeutronPluginContrailVpc,contrail:None,service-interface:None,vf-binding:None
[COLLECTOR]
analytics_api_ip = $__contrail_analytics_server_ip__
analytics_api_port = $__contrail_analytics_server_port__
[KEYSTONE]
auth_url = $__contrail_ks_auth_protocol__://$__contrail_keystone_ip__:$__contrail_ks_auth_port__/v2.0
admin_user=$__contrail_admin_user__
admin_password=$__contrail_admin_password__
admin_tenant_name=$__contrail_admin_tenant_name__
""")
|
...
#certfile=$__contrail_api_server_cert_file__
#keyfile=$__contrail_api_server_key_file__
#cafile=$__contrail_api_server_ca_file__
contrail_extensions = ipam:neutron_plugin_contrail.plugins.opencontrail.contrail_plugin_ipam.NeutronPluginContrailIpam,policy:neutron_plugin_contrail.plugins.opencontrail.contrail_plugin_policy.NeutronPluginContrailPolicy,route-table:neutron_plugin_contrail.plugins.opencontrail.contrail_plugin_vpc.NeutronPluginContrailVpc,contrail:None,service-interface:None,vf-binding:None
[COLLECTOR]
analytics_api_ip = $__contrail_analytics_server_ip__
...
|
c8779edcb4078c799b7112625b5495f63a00e428
|
l10n_ro_partner_unique/models/res_partner.py
|
l10n_ro_partner_unique/models/res_partner.py
|
from odoo import _, api, models
from odoo.exceptions import ValidationError
class ResPartner(models.Model):
_inherit = "res.partner"
@api.model
def _get_vat_nrc_constrain_domain(self):
domain = [
("company_id", "=", self.company_id),
("parent_id", "=", False),
("vat", "=", self.vat),
"|",
("nrc", "=", self.nrc),
("nrc", "=", False),
]
return domain
@api.constrains("vat", "nrc")
def _check_vat_nrc_unique(self):
for record in self:
if record.vat:
domain = record._get_vat_nrc_constrain_domain()
found = self.env["res.partner"].search(domain)
if len(found) > 1:
raise ValidationError(
_("The VAT and NRC pair (%s, %s) must be unique ids=%s!")
% (record.vat, record.nrc, found.ids)
)
|
from odoo import _, api, models
from odoo.exceptions import ValidationError
class ResPartner(models.Model):
_inherit = "res.partner"
@api.model
def _get_vat_nrc_constrain_domain(self):
domain = [
("company_id", "=", self.company_id.id if self.company_id else False),
("parent_id", "=", False),
("vat", "=", self.vat),
"|",
("nrc", "=", self.nrc),
("nrc", "=", False),
]
return domain
@api.constrains("vat", "nrc")
def _check_vat_nrc_unique(self):
for record in self:
if record.vat:
domain = record._get_vat_nrc_constrain_domain()
found = self.env["res.partner"].search(domain)
if len(found) > 1:
raise ValidationError(
_("The VAT and NRC pair (%s, %s) must be unique ids=%s!")
% (record.vat, record.nrc, found.ids)
)
|
Add vat unique per comapny
|
Add vat unique per comapny
|
Python
|
agpl-3.0
|
OCA/l10n-romania,OCA/l10n-romania
|
from odoo import _, api, models
from odoo.exceptions import ValidationError
class ResPartner(models.Model):
_inherit = "res.partner"
@api.model
def _get_vat_nrc_constrain_domain(self):
domain = [
- ("company_id", "=", self.company_id),
+ ("company_id", "=", self.company_id.id if self.company_id else False),
("parent_id", "=", False),
("vat", "=", self.vat),
"|",
("nrc", "=", self.nrc),
("nrc", "=", False),
]
return domain
@api.constrains("vat", "nrc")
def _check_vat_nrc_unique(self):
for record in self:
if record.vat:
domain = record._get_vat_nrc_constrain_domain()
found = self.env["res.partner"].search(domain)
if len(found) > 1:
raise ValidationError(
_("The VAT and NRC pair (%s, %s) must be unique ids=%s!")
% (record.vat, record.nrc, found.ids)
)
|
Add vat unique per comapny
|
## Code Before:
from odoo import _, api, models
from odoo.exceptions import ValidationError
class ResPartner(models.Model):
_inherit = "res.partner"
@api.model
def _get_vat_nrc_constrain_domain(self):
domain = [
("company_id", "=", self.company_id),
("parent_id", "=", False),
("vat", "=", self.vat),
"|",
("nrc", "=", self.nrc),
("nrc", "=", False),
]
return domain
@api.constrains("vat", "nrc")
def _check_vat_nrc_unique(self):
for record in self:
if record.vat:
domain = record._get_vat_nrc_constrain_domain()
found = self.env["res.partner"].search(domain)
if len(found) > 1:
raise ValidationError(
_("The VAT and NRC pair (%s, %s) must be unique ids=%s!")
% (record.vat, record.nrc, found.ids)
)
## Instruction:
Add vat unique per comapny
## Code After:
from odoo import _, api, models
from odoo.exceptions import ValidationError
class ResPartner(models.Model):
_inherit = "res.partner"
@api.model
def _get_vat_nrc_constrain_domain(self):
domain = [
("company_id", "=", self.company_id.id if self.company_id else False),
("parent_id", "=", False),
("vat", "=", self.vat),
"|",
("nrc", "=", self.nrc),
("nrc", "=", False),
]
return domain
@api.constrains("vat", "nrc")
def _check_vat_nrc_unique(self):
for record in self:
if record.vat:
domain = record._get_vat_nrc_constrain_domain()
found = self.env["res.partner"].search(domain)
if len(found) > 1:
raise ValidationError(
_("The VAT and NRC pair (%s, %s) must be unique ids=%s!")
% (record.vat, record.nrc, found.ids)
)
|
...
@api.model
def _get_vat_nrc_constrain_domain(self):
domain = [
("company_id", "=", self.company_id.id if self.company_id else False),
("parent_id", "=", False),
("vat", "=", self.vat),
"|",
...
|
7437382d966d39c4de21d2686bd8f31a23e5c47b
|
IPython/html/texteditor/handlers.py
|
IPython/html/texteditor/handlers.py
|
"""Tornado handlers for the terminal emulator."""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from tornado import web
from ..base.handlers import IPythonHandler, path_regex
from ..utils import url_escape
class EditorHandler(IPythonHandler):
"""Render the text editor interface."""
@web.authenticated
def get(self, path):
path = path.strip('/')
if not self.contents_manager.file_exists(path):
raise web.HTTPError(404, u'File does not exist: %s' % path)
self.write(self.render_template('texteditor.html',
file_path=url_escape(path),
)
)
default_handlers = [
(r"/texteditor%s" % path_regex, EditorHandler),
]
|
"""Tornado handlers for the terminal emulator."""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from tornado import web
from ..base.handlers import IPythonHandler, path_regex
from ..utils import url_escape
class EditorHandler(IPythonHandler):
"""Render the text editor interface."""
@web.authenticated
def get(self, path):
path = path.strip('/')
if not self.contents_manager.file_exists(path):
raise web.HTTPError(404, u'File does not exist: %s' % path)
self.write(self.render_template('texteditor.html',
file_path=url_escape(path),
page_title=path.rsplit('/', 1)[-1] + " (editing)",
)
)
default_handlers = [
(r"/texteditor%s" % path_regex, EditorHandler),
]
|
Set page title for editor
|
Set page title for editor
|
Python
|
bsd-3-clause
|
ipython/ipython,ipython/ipython
|
"""Tornado handlers for the terminal emulator."""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from tornado import web
from ..base.handlers import IPythonHandler, path_regex
from ..utils import url_escape
class EditorHandler(IPythonHandler):
"""Render the text editor interface."""
@web.authenticated
def get(self, path):
path = path.strip('/')
if not self.contents_manager.file_exists(path):
raise web.HTTPError(404, u'File does not exist: %s' % path)
self.write(self.render_template('texteditor.html',
file_path=url_escape(path),
+ page_title=path.rsplit('/', 1)[-1] + " (editing)",
)
)
default_handlers = [
(r"/texteditor%s" % path_regex, EditorHandler),
]
|
Set page title for editor
|
## Code Before:
"""Tornado handlers for the terminal emulator."""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from tornado import web
from ..base.handlers import IPythonHandler, path_regex
from ..utils import url_escape
class EditorHandler(IPythonHandler):
"""Render the text editor interface."""
@web.authenticated
def get(self, path):
path = path.strip('/')
if not self.contents_manager.file_exists(path):
raise web.HTTPError(404, u'File does not exist: %s' % path)
self.write(self.render_template('texteditor.html',
file_path=url_escape(path),
)
)
default_handlers = [
(r"/texteditor%s" % path_regex, EditorHandler),
]
## Instruction:
Set page title for editor
## Code After:
"""Tornado handlers for the terminal emulator."""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from tornado import web
from ..base.handlers import IPythonHandler, path_regex
from ..utils import url_escape
class EditorHandler(IPythonHandler):
"""Render the text editor interface."""
@web.authenticated
def get(self, path):
path = path.strip('/')
if not self.contents_manager.file_exists(path):
raise web.HTTPError(404, u'File does not exist: %s' % path)
self.write(self.render_template('texteditor.html',
file_path=url_escape(path),
page_title=path.rsplit('/', 1)[-1] + " (editing)",
)
)
default_handlers = [
(r"/texteditor%s" % path_regex, EditorHandler),
]
|
// ... existing code ...
self.write(self.render_template('texteditor.html',
file_path=url_escape(path),
page_title=path.rsplit('/', 1)[-1] + " (editing)",
)
)
// ... rest of the code ...
|
7bdcc30612636d2c27ea01a7d14b1839696fa7a0
|
newsman/watchdog/clean_process.py
|
newsman/watchdog/clean_process.py
|
# @author chengdujin
# @contact [email protected]
# @created Aug. 22, 2013
import sys
reload(sys)
sys.setdefaultencoding('UTF-8')
import subprocess
def clean():
"""
kill zombie processes if there is any
"""
command = "kill -HUP `ps -A -ostat,ppid | grep -e '^[Zz]' | awk '{print $2}'`"
subprocess.Popen(command, stderr=subprocess.PIPE, shell=True)
if __name__ == '__main__':
clean()
|
# @author chengdujin
# @contact [email protected]
# @created Aug. 22, 2013
import sys
reload(sys)
sys.setdefaultencoding('UTF-8')
import subprocess
def clean():
"""
kill zombie processes if there is any
"""
command = "kill -HUP `ps -A -ostat,ppid | grep -e '^[Zz]' | awk '{print $2}'`"
subprocess.Popen(command, stderr=subprocess.PIPE, shell=True)
command = "ps -xal | grep p[y]thon | grep '<defunct>' | awk '{print $4}' | xargs kill -9"
subprocess.Popen(command, stderr=subprocess.PIPE, shell=True)
if __name__ == '__main__':
clean()
|
Add code to remove defunct python processes
|
Add code to remove defunct python processes
|
Python
|
agpl-3.0
|
chengdujin/newsman,chengdujin/newsman,chengdujin/newsman
|
# @author chengdujin
# @contact [email protected]
# @created Aug. 22, 2013
import sys
reload(sys)
sys.setdefaultencoding('UTF-8')
import subprocess
def clean():
"""
kill zombie processes if there is any
"""
command = "kill -HUP `ps -A -ostat,ppid | grep -e '^[Zz]' | awk '{print $2}'`"
subprocess.Popen(command, stderr=subprocess.PIPE, shell=True)
+ command = "ps -xal | grep p[y]thon | grep '<defunct>' | awk '{print $4}' | xargs kill -9"
+ subprocess.Popen(command, stderr=subprocess.PIPE, shell=True)
+
if __name__ == '__main__':
clean()
|
Add code to remove defunct python processes
|
## Code Before:
# @author chengdujin
# @contact [email protected]
# @created Aug. 22, 2013
import sys
reload(sys)
sys.setdefaultencoding('UTF-8')
import subprocess
def clean():
"""
kill zombie processes if there is any
"""
command = "kill -HUP `ps -A -ostat,ppid | grep -e '^[Zz]' | awk '{print $2}'`"
subprocess.Popen(command, stderr=subprocess.PIPE, shell=True)
if __name__ == '__main__':
clean()
## Instruction:
Add code to remove defunct python processes
## Code After:
# @author chengdujin
# @contact [email protected]
# @created Aug. 22, 2013
import sys
reload(sys)
sys.setdefaultencoding('UTF-8')
import subprocess
def clean():
"""
kill zombie processes if there is any
"""
command = "kill -HUP `ps -A -ostat,ppid | grep -e '^[Zz]' | awk '{print $2}'`"
subprocess.Popen(command, stderr=subprocess.PIPE, shell=True)
command = "ps -xal | grep p[y]thon | grep '<defunct>' | awk '{print $4}' | xargs kill -9"
subprocess.Popen(command, stderr=subprocess.PIPE, shell=True)
if __name__ == '__main__':
clean()
|
...
command = "kill -HUP `ps -A -ostat,ppid | grep -e '^[Zz]' | awk '{print $2}'`"
subprocess.Popen(command, stderr=subprocess.PIPE, shell=True)
command = "ps -xal | grep p[y]thon | grep '<defunct>' | awk '{print $4}' | xargs kill -9"
subprocess.Popen(command, stderr=subprocess.PIPE, shell=True)
if __name__ == '__main__':
clean()
...
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.