commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 10
2.94k
| new_contents
stringlengths 21
3.18k
| subject
stringlengths 16
444
| message
stringlengths 17
2.63k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43k
| ndiff
stringlengths 52
3.32k
| instruction
stringlengths 16
444
| content
stringlengths 133
4.32k
| fuzzy_diff
stringlengths 33
3.23k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
45f33dcf98b7b20fbedf3e05ca5c575ce3cbcbb3
|
scripts/generate-invoice.py
|
scripts/generate-invoice.py
|
import argparse
import yaml
import jinja2
import weasyprint
parser = argparse.ArgumentParser()
parser.add_argument('--data', help='path to data directory', required=True)
parser.add_argument('--number', help='Invoice number', type=int, required=True)
args = parser.parse_args()
data_directory = str(args.data)
invoice_number = str(args.number)
supplier_file = open(data_directory + 'data/supplier.yaml')
supplier_data = yaml.safe_load(supplier_file.read())
supplier_file.close()
invoice_file = open(data_directory + 'data/invoices/' + invoice_number + '.yaml')
invoice_data = yaml.safe_load(invoice_file.read())
invoice_file.close()
# TODO: Validation
# TODO: Sum of invoice items equals total
# TODO: Invoice number matches filename
client_file = open(data_directory + 'data/clients/' + invoice_data['client'] + '.yaml')
client_data = yaml.safe_load(client_file.read())
client_file.close()
template_environment = jinja2.Environment(loader = jinja2.FileSystemLoader('../templates/'))
template = template_environment.get_template('invoice.html')
html_data = template.render(supplier = supplier_data, invoice = invoice_data, client = client_data)
weasyprint.HTML(string = html_data).write_pdf(data_directory + 'output/invoices/' + invoice_number + '.pdf')
|
import argparse
import yaml
import jinja2
import weasyprint
parser = argparse.ArgumentParser()
parser.add_argument('--data', help='path to data directory', required=True)
parser.add_argument('--number', help='Invoice number', type=int, required=True)
args = parser.parse_args()
data_directory = str(args.data)
invoice_number = str(args.number)
supplier_file = open(data_directory + 'data/supplier.yaml')
supplier_data = yaml.safe_load(supplier_file.read())
supplier_file.close()
invoice_file = open(data_directory + 'data/invoices/' + invoice_number + '.yaml')
invoice_data = yaml.safe_load(invoice_file.read())
invoice_file.close()
client_file = open(data_directory + 'data/clients/' + invoice_data['client'] + '.yaml')
client_data = yaml.safe_load(client_file.read())
client_file.close()
template_environment = jinja2.Environment(loader = jinja2.FileSystemLoader('../templates/'))
template = template_environment.get_template('invoice.html')
html_data = template.render(supplier = supplier_data, invoice = invoice_data, client = client_data)
weasyprint.HTML(string = html_data).write_pdf(data_directory + 'output/invoices/' + invoice_number + '.pdf')
|
Move TODO items to GitHub issues
|
Move TODO items to GitHub issues
|
Python
|
mit
|
pwaring/125-accounts,pwaring/125-accounts
|
import argparse
import yaml
import jinja2
import weasyprint
parser = argparse.ArgumentParser()
parser.add_argument('--data', help='path to data directory', required=True)
parser.add_argument('--number', help='Invoice number', type=int, required=True)
args = parser.parse_args()
data_directory = str(args.data)
invoice_number = str(args.number)
supplier_file = open(data_directory + 'data/supplier.yaml')
supplier_data = yaml.safe_load(supplier_file.read())
supplier_file.close()
invoice_file = open(data_directory + 'data/invoices/' + invoice_number + '.yaml')
invoice_data = yaml.safe_load(invoice_file.read())
invoice_file.close()
- # TODO: Validation
- # TODO: Sum of invoice items equals total
- # TODO: Invoice number matches filename
-
client_file = open(data_directory + 'data/clients/' + invoice_data['client'] + '.yaml')
client_data = yaml.safe_load(client_file.read())
client_file.close()
template_environment = jinja2.Environment(loader = jinja2.FileSystemLoader('../templates/'))
template = template_environment.get_template('invoice.html')
html_data = template.render(supplier = supplier_data, invoice = invoice_data, client = client_data)
weasyprint.HTML(string = html_data).write_pdf(data_directory + 'output/invoices/' + invoice_number + '.pdf')
|
Move TODO items to GitHub issues
|
## Code Before:
import argparse
import yaml
import jinja2
import weasyprint
parser = argparse.ArgumentParser()
parser.add_argument('--data', help='path to data directory', required=True)
parser.add_argument('--number', help='Invoice number', type=int, required=True)
args = parser.parse_args()
data_directory = str(args.data)
invoice_number = str(args.number)
supplier_file = open(data_directory + 'data/supplier.yaml')
supplier_data = yaml.safe_load(supplier_file.read())
supplier_file.close()
invoice_file = open(data_directory + 'data/invoices/' + invoice_number + '.yaml')
invoice_data = yaml.safe_load(invoice_file.read())
invoice_file.close()
# TODO: Validation
# TODO: Sum of invoice items equals total
# TODO: Invoice number matches filename
client_file = open(data_directory + 'data/clients/' + invoice_data['client'] + '.yaml')
client_data = yaml.safe_load(client_file.read())
client_file.close()
template_environment = jinja2.Environment(loader = jinja2.FileSystemLoader('../templates/'))
template = template_environment.get_template('invoice.html')
html_data = template.render(supplier = supplier_data, invoice = invoice_data, client = client_data)
weasyprint.HTML(string = html_data).write_pdf(data_directory + 'output/invoices/' + invoice_number + '.pdf')
## Instruction:
Move TODO items to GitHub issues
## Code After:
import argparse
import yaml
import jinja2
import weasyprint
parser = argparse.ArgumentParser()
parser.add_argument('--data', help='path to data directory', required=True)
parser.add_argument('--number', help='Invoice number', type=int, required=True)
args = parser.parse_args()
data_directory = str(args.data)
invoice_number = str(args.number)
supplier_file = open(data_directory + 'data/supplier.yaml')
supplier_data = yaml.safe_load(supplier_file.read())
supplier_file.close()
invoice_file = open(data_directory + 'data/invoices/' + invoice_number + '.yaml')
invoice_data = yaml.safe_load(invoice_file.read())
invoice_file.close()
client_file = open(data_directory + 'data/clients/' + invoice_data['client'] + '.yaml')
client_data = yaml.safe_load(client_file.read())
client_file.close()
template_environment = jinja2.Environment(loader = jinja2.FileSystemLoader('../templates/'))
template = template_environment.get_template('invoice.html')
html_data = template.render(supplier = supplier_data, invoice = invoice_data, client = client_data)
weasyprint.HTML(string = html_data).write_pdf(data_directory + 'output/invoices/' + invoice_number + '.pdf')
|
# ... existing code ...
invoice_data = yaml.safe_load(invoice_file.read())
invoice_file.close()
client_file = open(data_directory + 'data/clients/' + invoice_data['client'] + '.yaml')
client_data = yaml.safe_load(client_file.read())
client_file.close()
# ... rest of the code ...
|
4f71339cad35b2444ea295fd4b518e539f1088bb
|
fluent_faq/urls.py
|
fluent_faq/urls.py
|
from django.conf.urls import patterns, url
from .views import FaqQuestionList, FaqCategoryDetail, FaqQuestionDetail
urlpatterns = patterns('',
url(r'^$', FaqQuestionList.as_view(), name='faqquestion_index'),
url(r'^(?P<slug>[^/]+)/$', FaqCategoryDetail.as_view(), name='faqcategory_detail'),
url(r'^(?P<cat_slug>[^/]+)/(?P<slug>[^/]+)/$', FaqQuestionDetail.as_view(), name='faqquestion_detail'),
)
|
from django.conf.urls import url
from .views import FaqQuestionList, FaqCategoryDetail, FaqQuestionDetail
urlpatterns = [
url(r'^$', FaqQuestionList.as_view(), name='faqquestion_index'),
url(r'^(?P<slug>[^/]+)/$', FaqCategoryDetail.as_view(), name='faqcategory_detail'),
url(r'^(?P<cat_slug>[^/]+)/(?P<slug>[^/]+)/$', FaqQuestionDetail.as_view(), name='faqquestion_detail'),
]
|
Fix Django 1.9 warnings about patterns('', ..)
|
Fix Django 1.9 warnings about patterns('', ..)
|
Python
|
apache-2.0
|
edoburu/django-fluent-faq,edoburu/django-fluent-faq
|
- from django.conf.urls import patterns, url
+ from django.conf.urls import url
from .views import FaqQuestionList, FaqCategoryDetail, FaqQuestionDetail
- urlpatterns = patterns('',
+ urlpatterns = [
url(r'^$', FaqQuestionList.as_view(), name='faqquestion_index'),
url(r'^(?P<slug>[^/]+)/$', FaqCategoryDetail.as_view(), name='faqcategory_detail'),
url(r'^(?P<cat_slug>[^/]+)/(?P<slug>[^/]+)/$', FaqQuestionDetail.as_view(), name='faqquestion_detail'),
- )
+ ]
|
Fix Django 1.9 warnings about patterns('', ..)
|
## Code Before:
from django.conf.urls import patterns, url
from .views import FaqQuestionList, FaqCategoryDetail, FaqQuestionDetail
urlpatterns = patterns('',
url(r'^$', FaqQuestionList.as_view(), name='faqquestion_index'),
url(r'^(?P<slug>[^/]+)/$', FaqCategoryDetail.as_view(), name='faqcategory_detail'),
url(r'^(?P<cat_slug>[^/]+)/(?P<slug>[^/]+)/$', FaqQuestionDetail.as_view(), name='faqquestion_detail'),
)
## Instruction:
Fix Django 1.9 warnings about patterns('', ..)
## Code After:
from django.conf.urls import url
from .views import FaqQuestionList, FaqCategoryDetail, FaqQuestionDetail
urlpatterns = [
url(r'^$', FaqQuestionList.as_view(), name='faqquestion_index'),
url(r'^(?P<slug>[^/]+)/$', FaqCategoryDetail.as_view(), name='faqcategory_detail'),
url(r'^(?P<cat_slug>[^/]+)/(?P<slug>[^/]+)/$', FaqQuestionDetail.as_view(), name='faqquestion_detail'),
]
|
# ... existing code ...
from django.conf.urls import url
from .views import FaqQuestionList, FaqCategoryDetail, FaqQuestionDetail
urlpatterns = [
url(r'^$', FaqQuestionList.as_view(), name='faqquestion_index'),
url(r'^(?P<slug>[^/]+)/$', FaqCategoryDetail.as_view(), name='faqcategory_detail'),
url(r'^(?P<cat_slug>[^/]+)/(?P<slug>[^/]+)/$', FaqQuestionDetail.as_view(), name='faqquestion_detail'),
]
# ... rest of the code ...
|
51f32076e8708c55420989b660323cdfd9fc6650
|
cycy/interpreter.py
|
cycy/interpreter.py
|
from cycy import compiler
from cycy.parser.sourceparser import parse
class CyCy(object):
"""
The main CyCy interpreter.
"""
def run(self, bytecode):
pass
def interpret(source):
print "Hello, world!"
return
bytecode = compiler.Context.to_bytecode(parse(source.getContent()))
CyCy().run(bytecode)
|
from cycy import compiler
from cycy.parser.sourceparser import parse
class CyCy(object):
"""
The main CyCy interpreter.
"""
def run(self, bytecode):
pass
def interpret(source):
bytecode = compiler.Context.to_bytecode(parse(source))
CyCy().run(bytecode)
|
Break the tests to show us we're not writing RPython.
|
Break the tests to show us we're not writing RPython.
|
Python
|
mit
|
Magnetic/cycy,Magnetic/cycy,Magnetic/cycy
|
from cycy import compiler
from cycy.parser.sourceparser import parse
class CyCy(object):
"""
The main CyCy interpreter.
"""
def run(self, bytecode):
pass
def interpret(source):
- print "Hello, world!"
- return
-
- bytecode = compiler.Context.to_bytecode(parse(source.getContent()))
+ bytecode = compiler.Context.to_bytecode(parse(source))
CyCy().run(bytecode)
|
Break the tests to show us we're not writing RPython.
|
## Code Before:
from cycy import compiler
from cycy.parser.sourceparser import parse
class CyCy(object):
"""
The main CyCy interpreter.
"""
def run(self, bytecode):
pass
def interpret(source):
print "Hello, world!"
return
bytecode = compiler.Context.to_bytecode(parse(source.getContent()))
CyCy().run(bytecode)
## Instruction:
Break the tests to show us we're not writing RPython.
## Code After:
from cycy import compiler
from cycy.parser.sourceparser import parse
class CyCy(object):
"""
The main CyCy interpreter.
"""
def run(self, bytecode):
pass
def interpret(source):
bytecode = compiler.Context.to_bytecode(parse(source))
CyCy().run(bytecode)
|
# ... existing code ...
def interpret(source):
bytecode = compiler.Context.to_bytecode(parse(source))
CyCy().run(bytecode)
# ... rest of the code ...
|
7740ff36679b13be9d63b333cff35f913e0066dc
|
python/tests/py3/test_asyncio.py
|
python/tests/py3/test_asyncio.py
|
import asyncio
import pytest
def test_hello_world(workspace):
workspace.src('main.py', r"""
import asyncio
async def main():
print('Hello, ', end='')
await asyncio.sleep(1)
print('World!')
# Python 3.7+
asyncio.run(main())
""")
r = workspace.run('python main.py')
assert r.out == 'Hello, World!'
@pytest.mark.asyncio
async def test_hello_world__pytest_asyncio(workspace):
result = await async_task('World')
assert result == 'Hello, World!'
async def async_task(input):
await asyncio.sleep(1)
return 'Hello, %s!' % input
def test_import_asyncio_not_needed_for_using_async_await_keywords(workspace):
workspace.src('main.py', r"""
async def main():
pass
print(type(main()))
""")
r = workspace.run('python main.py')
assert r.out == "<class 'coroutine'>"
assert r.err == "main.py:4: RuntimeWarning: coroutine 'main' was never awaited\n print(type(main()))"
|
import asyncio
import pytest
def test_hello_world(workspace):
workspace.src('main.py', r"""
import asyncio
async def do_something_else():
print('...', end='')
await asyncio.sleep(1)
print('!', end='')
async def say_hello_async(who):
print('Hello, ', end='')
await asyncio.sleep(1)
print(who, end='')
async def main():
await asyncio.gather(say_hello_async('World'), do_something_else())
asyncio.run(main())
""")
r = workspace.run('python main.py')
assert r.out == 'Hello, ...World!'
@pytest.mark.asyncio
async def test_hello_world__pytest_asyncio(capsys):
async def do_something_else():
print('...', end='')
await asyncio.sleep(1)
print('!', end='')
async def say_hello_async(who):
print('Hello, ', end='')
await asyncio.sleep(1)
print(who, end='')
await asyncio.gather(say_hello_async('World'), do_something_else())
out, _ = capsys.readouterr()
assert out == 'Hello, ...World!'
def test_import_asyncio_not_needed_for_using_async_await_keywords(workspace):
workspace.src('main.py', r"""
async def main():
pass
print(type(main()))
""")
r = workspace.run('python main.py')
assert r.out == "<class 'coroutine'>"
assert r.err == "main.py:4: RuntimeWarning: coroutine 'main' was never awaited\n print(type(main()))"
|
Make hello world (asyncio) more involved
|
[python] Make hello world (asyncio) more involved
|
Python
|
mit
|
imsardine/learning,imsardine/learning,imsardine/learning,imsardine/learning,imsardine/learning,imsardine/learning,imsardine/learning
|
import asyncio
import pytest
def test_hello_world(workspace):
workspace.src('main.py', r"""
import asyncio
- async def main():
+ async def do_something_else():
+ print('...', end='')
+ await asyncio.sleep(1)
+ print('!', end='')
+
+ async def say_hello_async(who):
print('Hello, ', end='')
await asyncio.sleep(1)
- print('World!')
+ print(who, end='')
- # Python 3.7+
+ async def main():
+ await asyncio.gather(say_hello_async('World'), do_something_else())
+
asyncio.run(main())
""")
r = workspace.run('python main.py')
- assert r.out == 'Hello, World!'
+ assert r.out == 'Hello, ...World!'
@pytest.mark.asyncio
- async def test_hello_world__pytest_asyncio(workspace):
+ async def test_hello_world__pytest_asyncio(capsys):
- result = await async_task('World')
- assert result == 'Hello, World!'
+ async def do_something_else():
+ print('...', end='')
+ await asyncio.sleep(1)
+ print('!', end='')
- async def async_task(input):
+ async def say_hello_async(who):
+ print('Hello, ', end='')
- await asyncio.sleep(1)
+ await asyncio.sleep(1)
- return 'Hello, %s!' % input
+ print(who, end='')
+
+ await asyncio.gather(say_hello_async('World'), do_something_else())
+
+ out, _ = capsys.readouterr()
+ assert out == 'Hello, ...World!'
def test_import_asyncio_not_needed_for_using_async_await_keywords(workspace):
workspace.src('main.py', r"""
async def main():
pass
print(type(main()))
""")
r = workspace.run('python main.py')
assert r.out == "<class 'coroutine'>"
assert r.err == "main.py:4: RuntimeWarning: coroutine 'main' was never awaited\n print(type(main()))"
|
Make hello world (asyncio) more involved
|
## Code Before:
import asyncio
import pytest
def test_hello_world(workspace):
workspace.src('main.py', r"""
import asyncio
async def main():
print('Hello, ', end='')
await asyncio.sleep(1)
print('World!')
# Python 3.7+
asyncio.run(main())
""")
r = workspace.run('python main.py')
assert r.out == 'Hello, World!'
@pytest.mark.asyncio
async def test_hello_world__pytest_asyncio(workspace):
result = await async_task('World')
assert result == 'Hello, World!'
async def async_task(input):
await asyncio.sleep(1)
return 'Hello, %s!' % input
def test_import_asyncio_not_needed_for_using_async_await_keywords(workspace):
workspace.src('main.py', r"""
async def main():
pass
print(type(main()))
""")
r = workspace.run('python main.py')
assert r.out == "<class 'coroutine'>"
assert r.err == "main.py:4: RuntimeWarning: coroutine 'main' was never awaited\n print(type(main()))"
## Instruction:
Make hello world (asyncio) more involved
## Code After:
import asyncio
import pytest
def test_hello_world(workspace):
workspace.src('main.py', r"""
import asyncio
async def do_something_else():
print('...', end='')
await asyncio.sleep(1)
print('!', end='')
async def say_hello_async(who):
print('Hello, ', end='')
await asyncio.sleep(1)
print(who, end='')
async def main():
await asyncio.gather(say_hello_async('World'), do_something_else())
asyncio.run(main())
""")
r = workspace.run('python main.py')
assert r.out == 'Hello, ...World!'
@pytest.mark.asyncio
async def test_hello_world__pytest_asyncio(capsys):
async def do_something_else():
print('...', end='')
await asyncio.sleep(1)
print('!', end='')
async def say_hello_async(who):
print('Hello, ', end='')
await asyncio.sleep(1)
print(who, end='')
await asyncio.gather(say_hello_async('World'), do_something_else())
out, _ = capsys.readouterr()
assert out == 'Hello, ...World!'
def test_import_asyncio_not_needed_for_using_async_await_keywords(workspace):
workspace.src('main.py', r"""
async def main():
pass
print(type(main()))
""")
r = workspace.run('python main.py')
assert r.out == "<class 'coroutine'>"
assert r.err == "main.py:4: RuntimeWarning: coroutine 'main' was never awaited\n print(type(main()))"
|
// ... existing code ...
workspace.src('main.py', r"""
import asyncio
async def do_something_else():
print('...', end='')
await asyncio.sleep(1)
print('!', end='')
async def say_hello_async(who):
print('Hello, ', end='')
await asyncio.sleep(1)
print(who, end='')
async def main():
await asyncio.gather(say_hello_async('World'), do_something_else())
asyncio.run(main())
""")
r = workspace.run('python main.py')
assert r.out == 'Hello, ...World!'
@pytest.mark.asyncio
async def test_hello_world__pytest_asyncio(capsys):
async def do_something_else():
print('...', end='')
await asyncio.sleep(1)
print('!', end='')
async def say_hello_async(who):
print('Hello, ', end='')
await asyncio.sleep(1)
print(who, end='')
await asyncio.gather(say_hello_async('World'), do_something_else())
out, _ = capsys.readouterr()
assert out == 'Hello, ...World!'
def test_import_asyncio_not_needed_for_using_async_await_keywords(workspace):
workspace.src('main.py', r"""
// ... rest of the code ...
|
72d89466e40fadeb246b6d69ab0e7035f6bcc8da
|
gql/transport/requests.py
|
gql/transport/requests.py
|
from __future__ import absolute_import
import requests
from graphql.execution import ExecutionResult
from graphql.language.printer import print_ast
from .http import HTTPTransport
class RequestsHTTPTransport(HTTPTransport):
def __init__(self, auth=None, *args, **kwargs):
super(RequestsHTTPTransport, self).__init__(*args, **kwargs)
self.auth = auth
def execute(self, document, variable_values=None):
query_str = print_ast(document)
request = requests.post(
self.url,
data={
'query': query_str,
'variables': variable_values
},
headers=self.client_headers,
auth=self.auth
)
result = request.json()
assert 'errors' in result or 'data' in result, 'Received non-compatible response "{}"'.format(result)
return ExecutionResult(
errors=result.get('errors'),
data=result.get('data')
)
|
from __future__ import absolute_import
import requests
from graphql.execution import ExecutionResult
from graphql.language.printer import print_ast
from .http import HTTPTransport
class RequestsHTTPTransport(HTTPTransport):
def __init__(self, auth=None, *args, **kwargs):
super(RequestsHTTPTransport, self).__init__(*args, **kwargs)
self.auth = auth
def execute(self, document, variable_values=None):
query_str = print_ast(document)
request = requests.post(
self.url,
data={
'query': query_str,
'variables': variable_values
},
headers=self.client_headers,
auth=self.auth
)
request.raise_for_status()
result = request.json()
assert 'errors' in result or 'data' in result, 'Received non-compatible response "{}"'.format(result)
return ExecutionResult(
errors=result.get('errors'),
data=result.get('data')
)
|
Raise exception if HTTP request failed
|
Raise exception if HTTP request failed
|
Python
|
mit
|
graphql-python/gql
|
from __future__ import absolute_import
import requests
from graphql.execution import ExecutionResult
from graphql.language.printer import print_ast
from .http import HTTPTransport
class RequestsHTTPTransport(HTTPTransport):
def __init__(self, auth=None, *args, **kwargs):
super(RequestsHTTPTransport, self).__init__(*args, **kwargs)
self.auth = auth
def execute(self, document, variable_values=None):
query_str = print_ast(document)
request = requests.post(
self.url,
data={
'query': query_str,
'variables': variable_values
},
headers=self.client_headers,
auth=self.auth
)
+ request.raise_for_status()
+
result = request.json()
assert 'errors' in result or 'data' in result, 'Received non-compatible response "{}"'.format(result)
return ExecutionResult(
errors=result.get('errors'),
data=result.get('data')
)
|
Raise exception if HTTP request failed
|
## Code Before:
from __future__ import absolute_import
import requests
from graphql.execution import ExecutionResult
from graphql.language.printer import print_ast
from .http import HTTPTransport
class RequestsHTTPTransport(HTTPTransport):
def __init__(self, auth=None, *args, **kwargs):
super(RequestsHTTPTransport, self).__init__(*args, **kwargs)
self.auth = auth
def execute(self, document, variable_values=None):
query_str = print_ast(document)
request = requests.post(
self.url,
data={
'query': query_str,
'variables': variable_values
},
headers=self.client_headers,
auth=self.auth
)
result = request.json()
assert 'errors' in result or 'data' in result, 'Received non-compatible response "{}"'.format(result)
return ExecutionResult(
errors=result.get('errors'),
data=result.get('data')
)
## Instruction:
Raise exception if HTTP request failed
## Code After:
from __future__ import absolute_import
import requests
from graphql.execution import ExecutionResult
from graphql.language.printer import print_ast
from .http import HTTPTransport
class RequestsHTTPTransport(HTTPTransport):
def __init__(self, auth=None, *args, **kwargs):
super(RequestsHTTPTransport, self).__init__(*args, **kwargs)
self.auth = auth
def execute(self, document, variable_values=None):
query_str = print_ast(document)
request = requests.post(
self.url,
data={
'query': query_str,
'variables': variable_values
},
headers=self.client_headers,
auth=self.auth
)
request.raise_for_status()
result = request.json()
assert 'errors' in result or 'data' in result, 'Received non-compatible response "{}"'.format(result)
return ExecutionResult(
errors=result.get('errors'),
data=result.get('data')
)
|
...
headers=self.client_headers,
auth=self.auth
)
request.raise_for_status()
result = request.json()
assert 'errors' in result or 'data' in result, 'Received non-compatible response "{}"'.format(result)
return ExecutionResult(
...
|
cf748e2bc4f28a11c79555f2e6c3d1f89d027709
|
tests/test_memory_leak.py
|
tests/test_memory_leak.py
|
import resource
import pytest
from .models import TestModel as DirtyMixinModel
pytestmark = pytest.mark.django_db
def test_rss_usage():
DirtyMixinModel()
rss_1 = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
for _ in range(1000):
DirtyMixinModel()
rss_2 = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
assert rss_2 == rss_1, 'There is a memory leak!'
|
import gc
import resource
import pytest
from .models import TestModel as DirtyMixinModel
pytestmark = pytest.mark.django_db
def test_rss_usage():
DirtyMixinModel()
gc.collect()
rss_1 = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
for _ in range(1000):
DirtyMixinModel()
gc.collect()
rss_2 = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
assert rss_2 == rss_1, 'There is a memory leak!'
|
Call gc.collect() before measuring memory usage.
|
Call gc.collect() before measuring memory usage.
|
Python
|
bsd-3-clause
|
romgar/django-dirtyfields,smn/django-dirtyfields
|
+ import gc
import resource
import pytest
from .models import TestModel as DirtyMixinModel
pytestmark = pytest.mark.django_db
def test_rss_usage():
DirtyMixinModel()
+ gc.collect()
rss_1 = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
for _ in range(1000):
DirtyMixinModel()
+ gc.collect()
rss_2 = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
assert rss_2 == rss_1, 'There is a memory leak!'
|
Call gc.collect() before measuring memory usage.
|
## Code Before:
import resource
import pytest
from .models import TestModel as DirtyMixinModel
pytestmark = pytest.mark.django_db
def test_rss_usage():
DirtyMixinModel()
rss_1 = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
for _ in range(1000):
DirtyMixinModel()
rss_2 = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
assert rss_2 == rss_1, 'There is a memory leak!'
## Instruction:
Call gc.collect() before measuring memory usage.
## Code After:
import gc
import resource
import pytest
from .models import TestModel as DirtyMixinModel
pytestmark = pytest.mark.django_db
def test_rss_usage():
DirtyMixinModel()
gc.collect()
rss_1 = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
for _ in range(1000):
DirtyMixinModel()
gc.collect()
rss_2 = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
assert rss_2 == rss_1, 'There is a memory leak!'
|
// ... existing code ...
import gc
import resource
import pytest
// ... modified code ...
def test_rss_usage():
DirtyMixinModel()
gc.collect()
rss_1 = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
for _ in range(1000):
DirtyMixinModel()
gc.collect()
rss_2 = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
assert rss_2 == rss_1, 'There is a memory leak!'
// ... rest of the code ...
|
c55a42737a99104734a79e946304849258bfa44b
|
aplib/__init__.py
|
aplib/__init__.py
|
from make_socket_for_ip import make_socket_for_ip
|
try:
from make_socket_for_ip import make_socket_for_ip
except ImportError:
def make_socket_for_ip(ip, stype=None):
raise NotImplementedError
|
Allow aplib to be imported when coro is not installed.
|
Allow aplib to be imported when coro is not installed.
|
Python
|
mit
|
ironport/aplib,ironport/aplib
|
+ try:
+ from make_socket_for_ip import make_socket_for_ip
+ except ImportError:
+ def make_socket_for_ip(ip, stype=None):
+ raise NotImplementedError
- from make_socket_for_ip import make_socket_for_ip
-
|
Allow aplib to be imported when coro is not installed.
|
## Code Before:
from make_socket_for_ip import make_socket_for_ip
## Instruction:
Allow aplib to be imported when coro is not installed.
## Code After:
try:
from make_socket_for_ip import make_socket_for_ip
except ImportError:
def make_socket_for_ip(ip, stype=None):
raise NotImplementedError
|
# ... existing code ...
try:
from make_socket_for_ip import make_socket_for_ip
except ImportError:
def make_socket_for_ip(ip, stype=None):
raise NotImplementedError
# ... rest of the code ...
|
da2f01c6fbeba6a0df3d6bb2c6dcf6a6ae659a76
|
opentreemap/registration_backend/urls.py
|
opentreemap/registration_backend/urls.py
|
from django.conf.urls import patterns
from django.conf.urls import include
from django.conf.urls import url
from django.views.generic.base import TemplateView
from views import RegistrationView, ActivationView
urlpatterns = patterns('',
url(r'^activate/complete/$',
TemplateView.as_view(template_name='registration/activation_complete.html'), # NOQA
name='registration_activation_complete'),
# Activation keys get matched by \w+ instead of the more specific
# [a-fA-F0-9]{40} because a bad activation key should still get
# to the view; that way it can return a sensible "invalid key"
# message instead of a confusing 404.
url(r'^activate/(?P<activation_key>\w+)/$',
ActivationView.as_view(),
name='registration_activate'),
url(r'^register/$',
RegistrationView.as_view(),
name='registration_register'),
url(r'^register/complete/$',
TemplateView.as_view(template_name='registration/registration_complete.html'), # NOQA
name='registration_complete'),
url(r'^register/closed/$',
TemplateView.as_view(template_name='registration/registration_closed.html'), # NOQA
name='registration_disallowed'),
(r'', include('registration.auth_urls')),
) # NOQA
|
from django.conf.urls import patterns
from django.conf.urls import include
from django.conf.urls import url
from django.views.generic.base import TemplateView
from registration.forms import RegistrationFormUniqueEmail
from views import RegistrationView, ActivationView
urlpatterns = patterns('',
url(r'^activate/complete/$',
TemplateView.as_view(template_name='registration/activation_complete.html'), # NOQA
name='registration_activation_complete'),
# Activation keys get matched by \w+ instead of the more specific
# [a-fA-F0-9]{40} because a bad activation key should still get
# to the view; that way it can return a sensible "invalid key"
# message instead of a confusing 404.
url(r'^activate/(?P<activation_key>\w+)/$',
ActivationView.as_view(),
name='registration_activate'),
url(r'^register/$',
RegistrationView.as_view(form_class=RegistrationFormUniqueEmail),
name='registration_register'),
url(r'^register/complete/$',
TemplateView.as_view(template_name='registration/registration_complete.html'), # NOQA
name='registration_complete'),
url(r'^register/closed/$',
TemplateView.as_view(template_name='registration/registration_closed.html'), # NOQA
name='registration_disallowed'),
(r'', include('registration.auth_urls')),
) # NOQA
|
Use registration form to prevent duplicate emails
|
Use registration form to prevent duplicate emails
Fixes #725.
We enforce this both at the database level, and here, using django
registration's ready-made form class.
|
Python
|
agpl-3.0
|
clever-crow-consulting/otm-core,maurizi/otm-core,clever-crow-consulting/otm-core,RickMohr/otm-core,recklessromeo/otm-core,maurizi/otm-core,maurizi/otm-core,RickMohr/otm-core,RickMohr/otm-core,recklessromeo/otm-core,recklessromeo/otm-core,RickMohr/otm-core,clever-crow-consulting/otm-core,clever-crow-consulting/otm-core,recklessromeo/otm-core,maurizi/otm-core
|
from django.conf.urls import patterns
from django.conf.urls import include
from django.conf.urls import url
from django.views.generic.base import TemplateView
+
+ from registration.forms import RegistrationFormUniqueEmail
from views import RegistrationView, ActivationView
urlpatterns = patterns('',
url(r'^activate/complete/$',
TemplateView.as_view(template_name='registration/activation_complete.html'), # NOQA
name='registration_activation_complete'),
# Activation keys get matched by \w+ instead of the more specific
# [a-fA-F0-9]{40} because a bad activation key should still get
# to the view; that way it can return a sensible "invalid key"
# message instead of a confusing 404.
url(r'^activate/(?P<activation_key>\w+)/$',
ActivationView.as_view(),
name='registration_activate'),
url(r'^register/$',
- RegistrationView.as_view(),
+ RegistrationView.as_view(form_class=RegistrationFormUniqueEmail),
name='registration_register'),
url(r'^register/complete/$',
TemplateView.as_view(template_name='registration/registration_complete.html'), # NOQA
name='registration_complete'),
url(r'^register/closed/$',
TemplateView.as_view(template_name='registration/registration_closed.html'), # NOQA
name='registration_disallowed'),
(r'', include('registration.auth_urls')),
) # NOQA
|
Use registration form to prevent duplicate emails
|
## Code Before:
from django.conf.urls import patterns
from django.conf.urls import include
from django.conf.urls import url
from django.views.generic.base import TemplateView
from views import RegistrationView, ActivationView
urlpatterns = patterns('',
url(r'^activate/complete/$',
TemplateView.as_view(template_name='registration/activation_complete.html'), # NOQA
name='registration_activation_complete'),
# Activation keys get matched by \w+ instead of the more specific
# [a-fA-F0-9]{40} because a bad activation key should still get
# to the view; that way it can return a sensible "invalid key"
# message instead of a confusing 404.
url(r'^activate/(?P<activation_key>\w+)/$',
ActivationView.as_view(),
name='registration_activate'),
url(r'^register/$',
RegistrationView.as_view(),
name='registration_register'),
url(r'^register/complete/$',
TemplateView.as_view(template_name='registration/registration_complete.html'), # NOQA
name='registration_complete'),
url(r'^register/closed/$',
TemplateView.as_view(template_name='registration/registration_closed.html'), # NOQA
name='registration_disallowed'),
(r'', include('registration.auth_urls')),
) # NOQA
## Instruction:
Use registration form to prevent duplicate emails
## Code After:
from django.conf.urls import patterns
from django.conf.urls import include
from django.conf.urls import url
from django.views.generic.base import TemplateView
from registration.forms import RegistrationFormUniqueEmail
from views import RegistrationView, ActivationView
urlpatterns = patterns('',
url(r'^activate/complete/$',
TemplateView.as_view(template_name='registration/activation_complete.html'), # NOQA
name='registration_activation_complete'),
# Activation keys get matched by \w+ instead of the more specific
# [a-fA-F0-9]{40} because a bad activation key should still get
# to the view; that way it can return a sensible "invalid key"
# message instead of a confusing 404.
url(r'^activate/(?P<activation_key>\w+)/$',
ActivationView.as_view(),
name='registration_activate'),
url(r'^register/$',
RegistrationView.as_view(form_class=RegistrationFormUniqueEmail),
name='registration_register'),
url(r'^register/complete/$',
TemplateView.as_view(template_name='registration/registration_complete.html'), # NOQA
name='registration_complete'),
url(r'^register/closed/$',
TemplateView.as_view(template_name='registration/registration_closed.html'), # NOQA
name='registration_disallowed'),
(r'', include('registration.auth_urls')),
) # NOQA
|
...
from django.conf.urls import include
from django.conf.urls import url
from django.views.generic.base import TemplateView
from registration.forms import RegistrationFormUniqueEmail
from views import RegistrationView, ActivationView
...
ActivationView.as_view(),
name='registration_activate'),
url(r'^register/$',
RegistrationView.as_view(form_class=RegistrationFormUniqueEmail),
name='registration_register'),
url(r'^register/complete/$',
TemplateView.as_view(template_name='registration/registration_complete.html'), # NOQA
...
|
81c567e4be0d3c2f91d3cfa3d04b0b738859da6a
|
yargy/utils.py
|
yargy/utils.py
|
from itertools import count, takewhile
def frange(start, stop, step):
return takewhile(lambda x: x <= stop, (start + i * step for i in count()))
def get_original_text(text, tokens):
'''
Returns original text captured by parser grammars
'''
if not tokens:
return None
head, tail = tokens[0], tokens[-1]
start, end = head.position[0], tail.position[1]
return text[start:end]
|
from itertools import count, takewhile
def frange(start, stop, step):
return takewhile(lambda x: x <= stop, (start + i * step for i in count()))
def get_tokens_position(tokens):
if not tokens:
return None
head, tail = tokens[0], tokens[-1]
return head.position[0], tail.position[1]
def get_original_text(text, tokens):
'''
Returns original text captured by parser grammars
'''
position = get_tokens_position(tokens)
if not position:
return None
else:
start, end = position
return text[start:end]
# stealed from rosettacode
ROMAN_VALUES = (
('I', 1),
('IV', 4),
('V', 5),
('IX', 9),
('X', 10),
('XL', 40),
('L', 50),
('XC', 90),
('C', 100),
('CD', 400),
('D', 500),
('CM', 900),
('M', 1000),
)
def decode_roman_number(number):
total = 0
for symbol, value in reversed(ROMAN_VALUES):
while number.startswith(symbol):
total += value
number = number[len(symbol):]
return total
|
Add get_tokens_position and decode_roman_number functions
|
Add get_tokens_position and decode_roman_number functions
|
Python
|
mit
|
bureaucratic-labs/yargy
|
from itertools import count, takewhile
def frange(start, stop, step):
return takewhile(lambda x: x <= stop, (start + i * step for i in count()))
+ def get_tokens_position(tokens):
+ if not tokens:
+ return None
+ head, tail = tokens[0], tokens[-1]
+ return head.position[0], tail.position[1]
+
def get_original_text(text, tokens):
'''
Returns original text captured by parser grammars
'''
+ position = get_tokens_position(tokens)
- if not tokens:
+ if not position:
return None
- head, tail = tokens[0], tokens[-1]
- start, end = head.position[0], tail.position[1]
+ else:
+ start, end = position
return text[start:end]
+ # stealed from rosettacode
+ ROMAN_VALUES = (
+ ('I', 1),
+ ('IV', 4),
+ ('V', 5),
+ ('IX', 9),
+ ('X', 10),
+ ('XL', 40),
+ ('L', 50),
+ ('XC', 90),
+ ('C', 100),
+ ('CD', 400),
+ ('D', 500),
+ ('CM', 900),
+ ('M', 1000),
+ )
+
+ def decode_roman_number(number):
+ total = 0
+ for symbol, value in reversed(ROMAN_VALUES):
+ while number.startswith(symbol):
+ total += value
+ number = number[len(symbol):]
+ return total
+
|
Add get_tokens_position and decode_roman_number functions
|
## Code Before:
from itertools import count, takewhile
def frange(start, stop, step):
return takewhile(lambda x: x <= stop, (start + i * step for i in count()))
def get_original_text(text, tokens):
'''
Returns original text captured by parser grammars
'''
if not tokens:
return None
head, tail = tokens[0], tokens[-1]
start, end = head.position[0], tail.position[1]
return text[start:end]
## Instruction:
Add get_tokens_position and decode_roman_number functions
## Code After:
from itertools import count, takewhile
def frange(start, stop, step):
return takewhile(lambda x: x <= stop, (start + i * step for i in count()))
def get_tokens_position(tokens):
if not tokens:
return None
head, tail = tokens[0], tokens[-1]
return head.position[0], tail.position[1]
def get_original_text(text, tokens):
'''
Returns original text captured by parser grammars
'''
position = get_tokens_position(tokens)
if not position:
return None
else:
start, end = position
return text[start:end]
# stealed from rosettacode
ROMAN_VALUES = (
('I', 1),
('IV', 4),
('V', 5),
('IX', 9),
('X', 10),
('XL', 40),
('L', 50),
('XC', 90),
('C', 100),
('CD', 400),
('D', 500),
('CM', 900),
('M', 1000),
)
def decode_roman_number(number):
total = 0
for symbol, value in reversed(ROMAN_VALUES):
while number.startswith(symbol):
total += value
number = number[len(symbol):]
return total
|
# ... existing code ...
def frange(start, stop, step):
return takewhile(lambda x: x <= stop, (start + i * step for i in count()))
def get_tokens_position(tokens):
if not tokens:
return None
head, tail = tokens[0], tokens[-1]
return head.position[0], tail.position[1]
def get_original_text(text, tokens):
'''
Returns original text captured by parser grammars
'''
position = get_tokens_position(tokens)
if not position:
return None
else:
start, end = position
return text[start:end]
# stealed from rosettacode
ROMAN_VALUES = (
('I', 1),
('IV', 4),
('V', 5),
('IX', 9),
('X', 10),
('XL', 40),
('L', 50),
('XC', 90),
('C', 100),
('CD', 400),
('D', 500),
('CM', 900),
('M', 1000),
)
def decode_roman_number(number):
total = 0
for symbol, value in reversed(ROMAN_VALUES):
while number.startswith(symbol):
total += value
number = number[len(symbol):]
return total
# ... rest of the code ...
|
12ee53b4ec4d4fafd7f7af692dda0b0553b82226
|
metpy/io/tests/test_nexrad.py
|
metpy/io/tests/test_nexrad.py
|
import os.path
from numpy.testing import TestCase
from metpy.io.nexrad import Level2File, Level3File
curdir, f = os.path.split(__file__)
datadir = os.path.join(curdir, '../../../examples/testdata')
class TestLevel3(TestCase):
def test_basic(self):
Level3File(os.path.join(datadir, 'nids/Level3_FFC_N0Q_20140407_1805.nids'))
class TestLevel2(TestCase):
def test_basic(self):
Level2File(os.path.join(datadir, 'KTLX20130520_190411_V06.gz'))
|
import glob
import os.path
from numpy.testing import TestCase
from metpy.io.nexrad import Level2File, Level3File
curdir, f = os.path.split(__file__)
datadir = os.path.join(curdir, '../../../examples/testdata')
def test_generator():
for fname in glob.glob(os.path.join(datadir, 'nids', 'KOUN*')):
yield read_level3_file, fname
def read_level3_file(fname):
Level3File(fname)
class TestLevel3(TestCase):
def test_basic(self):
Level3File(os.path.join(datadir, 'nids/Level3_FFC_N0Q_20140407_1805.nids'))
class TestLevel2(TestCase):
def test_basic(self):
Level2File(os.path.join(datadir, 'KTLX20130520_190411_V06.gz'))
|
Add parameterized test for nids.
|
Add parameterized test for nids.
|
Python
|
bsd-3-clause
|
ahaberlie/MetPy,Unidata/MetPy,dopplershift/MetPy,deeplycloudy/MetPy,jrleeman/MetPy,dopplershift/MetPy,jrleeman/MetPy,ahill818/MetPy,Unidata/MetPy,ShawnMurd/MetPy,ahaberlie/MetPy
|
+ import glob
import os.path
from numpy.testing import TestCase
from metpy.io.nexrad import Level2File, Level3File
curdir, f = os.path.split(__file__)
datadir = os.path.join(curdir, '../../../examples/testdata')
+
+
+ def test_generator():
+ for fname in glob.glob(os.path.join(datadir, 'nids', 'KOUN*')):
+ yield read_level3_file, fname
+
+
+ def read_level3_file(fname):
+ Level3File(fname)
class TestLevel3(TestCase):
def test_basic(self):
Level3File(os.path.join(datadir, 'nids/Level3_FFC_N0Q_20140407_1805.nids'))
class TestLevel2(TestCase):
def test_basic(self):
Level2File(os.path.join(datadir, 'KTLX20130520_190411_V06.gz'))
|
Add parameterized test for nids.
|
## Code Before:
import os.path
from numpy.testing import TestCase
from metpy.io.nexrad import Level2File, Level3File
curdir, f = os.path.split(__file__)
datadir = os.path.join(curdir, '../../../examples/testdata')
class TestLevel3(TestCase):
def test_basic(self):
Level3File(os.path.join(datadir, 'nids/Level3_FFC_N0Q_20140407_1805.nids'))
class TestLevel2(TestCase):
def test_basic(self):
Level2File(os.path.join(datadir, 'KTLX20130520_190411_V06.gz'))
## Instruction:
Add parameterized test for nids.
## Code After:
import glob
import os.path
from numpy.testing import TestCase
from metpy.io.nexrad import Level2File, Level3File
curdir, f = os.path.split(__file__)
datadir = os.path.join(curdir, '../../../examples/testdata')
def test_generator():
for fname in glob.glob(os.path.join(datadir, 'nids', 'KOUN*')):
yield read_level3_file, fname
def read_level3_file(fname):
Level3File(fname)
class TestLevel3(TestCase):
def test_basic(self):
Level3File(os.path.join(datadir, 'nids/Level3_FFC_N0Q_20140407_1805.nids'))
class TestLevel2(TestCase):
def test_basic(self):
Level2File(os.path.join(datadir, 'KTLX20130520_190411_V06.gz'))
|
# ... existing code ...
import glob
import os.path
from numpy.testing import TestCase
from metpy.io.nexrad import Level2File, Level3File
# ... modified code ...
curdir, f = os.path.split(__file__)
datadir = os.path.join(curdir, '../../../examples/testdata')
def test_generator():
for fname in glob.glob(os.path.join(datadir, 'nids', 'KOUN*')):
yield read_level3_file, fname
def read_level3_file(fname):
Level3File(fname)
class TestLevel3(TestCase):
# ... rest of the code ...
|
1ccd9e7f15cfaccfadf7e4e977dbde724885cab9
|
tests/test_sync_call.py
|
tests/test_sync_call.py
|
import time
from switchy import sync_caller
from switchy.apps.players import TonePlay
def test_toneplay(fsip):
'''Test the synchronous caller with a simple toneplay
'''
with sync_caller(fsip, apps={"TonePlay": TonePlay}) as caller:
# have the external prof call itself by default
assert 'TonePlay' in caller.app_names
sess, waitfor = caller(
"doggy@{}:{}".format(caller.client.server, 5080),
'TonePlay',
timeout=3,
)
assert sess.is_outbound()
time.sleep(1)
sess.hangup()
time.sleep(0.1)
assert caller.client.listener.count_calls() == 0
|
import time
from switchy import sync_caller
from switchy.apps.players import TonePlay, PlayRec
def test_toneplay(fsip):
'''Test the synchronous caller with a simple toneplay
'''
with sync_caller(fsip, apps={"TonePlay": TonePlay}) as caller:
# have the external prof call itself by default
assert 'TonePlay' in caller.app_names
sess, waitfor = caller(
"doggy@{}:{}".format(caller.client.server, 5080),
'TonePlay',
timeout=3,
)
assert sess.is_outbound()
time.sleep(1)
sess.hangup()
time.sleep(0.1)
assert caller.client.listener.count_calls() == 0
def test_playrec(fsip):
'''Test the synchronous caller with a simulated conversation using the the
`PlayRec` app. Currently this test does no audio checking but merely verifies
the callback chain is invoked as expected.
'''
with sync_caller(fsip, apps={"PlayRec": PlayRec}) as caller:
# have the external prof call itself by default
caller.apps.PlayRec.sim_convo = True
sess, waitfor = caller(
"doggy@{}:{}".format(caller.client.server, 5080),
'PlayRec',
timeout=10,
)
waitfor(sess, 'recorded', timeout=15)
waitfor(sess.call.get_peer(sess), 'recorded', timeout=15)
assert sess.call.vars['record']
time.sleep(1)
assert sess.hungup
|
Add a `PlayRec` app unit test
|
Add a `PlayRec` app unit test
Merely checks that all sessions are recorded as expected and that the call
is hung up afterwards. None of the recorded audio content is verified.
|
Python
|
mpl-2.0
|
sangoma/switchy,wwezhuimeng/switch
|
import time
from switchy import sync_caller
- from switchy.apps.players import TonePlay
+ from switchy.apps.players import TonePlay, PlayRec
def test_toneplay(fsip):
'''Test the synchronous caller with a simple toneplay
'''
with sync_caller(fsip, apps={"TonePlay": TonePlay}) as caller:
# have the external prof call itself by default
assert 'TonePlay' in caller.app_names
sess, waitfor = caller(
"doggy@{}:{}".format(caller.client.server, 5080),
'TonePlay',
timeout=3,
)
assert sess.is_outbound()
time.sleep(1)
sess.hangup()
time.sleep(0.1)
assert caller.client.listener.count_calls() == 0
+
+ def test_playrec(fsip):
+ '''Test the synchronous caller with a simulated conversation using the the
+ `PlayRec` app. Currently this test does no audio checking but merely verifies
+ the callback chain is invoked as expected.
+ '''
+ with sync_caller(fsip, apps={"PlayRec": PlayRec}) as caller:
+ # have the external prof call itself by default
+ caller.apps.PlayRec.sim_convo = True
+ sess, waitfor = caller(
+ "doggy@{}:{}".format(caller.client.server, 5080),
+ 'PlayRec',
+ timeout=10,
+ )
+ waitfor(sess, 'recorded', timeout=15)
+ waitfor(sess.call.get_peer(sess), 'recorded', timeout=15)
+ assert sess.call.vars['record']
+ time.sleep(1)
+ assert sess.hungup
+
|
Add a `PlayRec` app unit test
|
## Code Before:
import time
from switchy import sync_caller
from switchy.apps.players import TonePlay
def test_toneplay(fsip):
'''Test the synchronous caller with a simple toneplay
'''
with sync_caller(fsip, apps={"TonePlay": TonePlay}) as caller:
# have the external prof call itself by default
assert 'TonePlay' in caller.app_names
sess, waitfor = caller(
"doggy@{}:{}".format(caller.client.server, 5080),
'TonePlay',
timeout=3,
)
assert sess.is_outbound()
time.sleep(1)
sess.hangup()
time.sleep(0.1)
assert caller.client.listener.count_calls() == 0
## Instruction:
Add a `PlayRec` app unit test
## Code After:
import time
from switchy import sync_caller
from switchy.apps.players import TonePlay, PlayRec
def test_toneplay(fsip):
'''Test the synchronous caller with a simple toneplay
'''
with sync_caller(fsip, apps={"TonePlay": TonePlay}) as caller:
# have the external prof call itself by default
assert 'TonePlay' in caller.app_names
sess, waitfor = caller(
"doggy@{}:{}".format(caller.client.server, 5080),
'TonePlay',
timeout=3,
)
assert sess.is_outbound()
time.sleep(1)
sess.hangup()
time.sleep(0.1)
assert caller.client.listener.count_calls() == 0
def test_playrec(fsip):
'''Test the synchronous caller with a simulated conversation using the the
`PlayRec` app. Currently this test does no audio checking but merely verifies
the callback chain is invoked as expected.
'''
with sync_caller(fsip, apps={"PlayRec": PlayRec}) as caller:
# have the external prof call itself by default
caller.apps.PlayRec.sim_convo = True
sess, waitfor = caller(
"doggy@{}:{}".format(caller.client.server, 5080),
'PlayRec',
timeout=10,
)
waitfor(sess, 'recorded', timeout=15)
waitfor(sess.call.get_peer(sess), 'recorded', timeout=15)
assert sess.call.vars['record']
time.sleep(1)
assert sess.hungup
|
// ... existing code ...
import time
from switchy import sync_caller
from switchy.apps.players import TonePlay, PlayRec
def test_toneplay(fsip):
// ... modified code ...
sess.hangup()
time.sleep(0.1)
assert caller.client.listener.count_calls() == 0
def test_playrec(fsip):
'''Test the synchronous caller with a simulated conversation using the the
`PlayRec` app. Currently this test does no audio checking but merely verifies
the callback chain is invoked as expected.
'''
with sync_caller(fsip, apps={"PlayRec": PlayRec}) as caller:
# have the external prof call itself by default
caller.apps.PlayRec.sim_convo = True
sess, waitfor = caller(
"doggy@{}:{}".format(caller.client.server, 5080),
'PlayRec',
timeout=10,
)
waitfor(sess, 'recorded', timeout=15)
waitfor(sess.call.get_peer(sess), 'recorded', timeout=15)
assert sess.call.vars['record']
time.sleep(1)
assert sess.hungup
// ... rest of the code ...
|
36c2e7449b7817a66b60eaff4c8518ae6d4f4a01
|
categories/tests.py
|
categories/tests.py
|
from .models import Category
from .serializers import CategorySerializer
from employees.models import Employee
from django.core.urlresolvers import reverse
from rest_framework import status
from rest_framework.test import APITestCase
class CategoryTestCase(APITestCase):
def setUp(self):
Category.objects.create(name='Coworker')
Employee.objects.create_superuser('user1', '[email protected]', 'user1password')
Category.objects.create(name='Category1', weight=2)
Category.objects.create(name='Category2')
self.client.login(username='user1', password='user1password')
def test_category_creation(self):
category1 = Category.objects.get(name='Category1')
category2 = Category.objects.get(name='Category2')
self.assertEqual(category1.weight, 2)
self.assertEqual(category2.weight, 1)
def test_category_list(self):
categories = Category.objects.all()
response_data = CategorySerializer(categories, many=True).data
url = reverse('categories:category_list')
response = self.client.get(url, format='json')
self.assertEqual(response.data, response_data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
from .models import Category
from .serializers import CategorySerializer
from employees.models import Employee
from django.core.urlresolvers import reverse
from rest_framework import status
from rest_framework.test import APITestCase
class CategoryTestCase(APITestCase):
def setUp(self):
Category.objects.create(name='Coworker')
Employee.objects.create_superuser('user1', '[email protected]', 'user1password')
Category.objects.create(name='Category1', weight=2)
Category.objects.create(name='Category2')
self.client.login(username='user1', password='user1password')
def test_category_creation(self):
category1 = Category.objects.get(name='Category1')
category2 = Category.objects.get(name='Category2')
self.assertEqual(category1.weight, 2)
self.assertEqual(category2.weight, 1)
|
Remove categoy_list test until urls will fixed.
|
Remove categoy_list test until urls will fixed.
|
Python
|
apache-2.0
|
belatrix/BackendAllStars
|
from .models import Category
from .serializers import CategorySerializer
from employees.models import Employee
from django.core.urlresolvers import reverse
from rest_framework import status
from rest_framework.test import APITestCase
class CategoryTestCase(APITestCase):
def setUp(self):
Category.objects.create(name='Coworker')
Employee.objects.create_superuser('user1', '[email protected]', 'user1password')
Category.objects.create(name='Category1', weight=2)
Category.objects.create(name='Category2')
self.client.login(username='user1', password='user1password')
def test_category_creation(self):
category1 = Category.objects.get(name='Category1')
category2 = Category.objects.get(name='Category2')
self.assertEqual(category1.weight, 2)
self.assertEqual(category2.weight, 1)
- def test_category_list(self):
- categories = Category.objects.all()
- response_data = CategorySerializer(categories, many=True).data
- url = reverse('categories:category_list')
- response = self.client.get(url, format='json')
- self.assertEqual(response.data, response_data)
- self.assertEqual(response.status_code, status.HTTP_200_OK)
-
|
Remove categoy_list test until urls will fixed.
|
## Code Before:
from .models import Category
from .serializers import CategorySerializer
from employees.models import Employee
from django.core.urlresolvers import reverse
from rest_framework import status
from rest_framework.test import APITestCase
class CategoryTestCase(APITestCase):
def setUp(self):
Category.objects.create(name='Coworker')
Employee.objects.create_superuser('user1', '[email protected]', 'user1password')
Category.objects.create(name='Category1', weight=2)
Category.objects.create(name='Category2')
self.client.login(username='user1', password='user1password')
def test_category_creation(self):
category1 = Category.objects.get(name='Category1')
category2 = Category.objects.get(name='Category2')
self.assertEqual(category1.weight, 2)
self.assertEqual(category2.weight, 1)
def test_category_list(self):
categories = Category.objects.all()
response_data = CategorySerializer(categories, many=True).data
url = reverse('categories:category_list')
response = self.client.get(url, format='json')
self.assertEqual(response.data, response_data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
## Instruction:
Remove categoy_list test until urls will fixed.
## Code After:
from .models import Category
from .serializers import CategorySerializer
from employees.models import Employee
from django.core.urlresolvers import reverse
from rest_framework import status
from rest_framework.test import APITestCase
class CategoryTestCase(APITestCase):
def setUp(self):
Category.objects.create(name='Coworker')
Employee.objects.create_superuser('user1', '[email protected]', 'user1password')
Category.objects.create(name='Category1', weight=2)
Category.objects.create(name='Category2')
self.client.login(username='user1', password='user1password')
def test_category_creation(self):
category1 = Category.objects.get(name='Category1')
category2 = Category.objects.get(name='Category2')
self.assertEqual(category1.weight, 2)
self.assertEqual(category2.weight, 1)
|
...
category2 = Category.objects.get(name='Category2')
self.assertEqual(category1.weight, 2)
self.assertEqual(category2.weight, 1)
...
|
bb34b21ebd2378f944498708ac4f13d16aa61aa1
|
src/mist/io/tests/api/features/steps/backends.py
|
src/mist/io/tests/api/features/steps/backends.py
|
from behave import *
@given(u'"{text}" backend added')
def given_backend(context, text):
backends = context.client.list_backends()
for backend in backends:
if text in backend['title']:
return
@when(u'I list backends')
def list_backends(context):
context.backends = context.client.list_backends()
|
from behave import *
@given(u'"{text}" backend added through api')
def given_backend(context, text):
backends = context.client.list_backends()
for backend in backends:
if text in backend['title']:
return
@when(u'I list backends')
def list_backends(context):
context.backends = context.client.list_backends()
|
Rename Behave steps for api tests
|
Rename Behave steps for api tests
|
Python
|
agpl-3.0
|
johnnyWalnut/mist.io,DimensionDataCBUSydney/mist.io,zBMNForks/mist.io,afivos/mist.io,Lao-liu/mist.io,Lao-liu/mist.io,munkiat/mist.io,kelonye/mist.io,kelonye/mist.io,afivos/mist.io,Lao-liu/mist.io,Lao-liu/mist.io,DimensionDataCBUSydney/mist.io,johnnyWalnut/mist.io,zBMNForks/mist.io,DimensionDataCBUSydney/mist.io,DimensionDataCBUSydney/mist.io,munkiat/mist.io,zBMNForks/mist.io,munkiat/mist.io,johnnyWalnut/mist.io,afivos/mist.io,munkiat/mist.io,kelonye/mist.io
|
from behave import *
- @given(u'"{text}" backend added')
+ @given(u'"{text}" backend added through api')
def given_backend(context, text):
backends = context.client.list_backends()
for backend in backends:
if text in backend['title']:
return
@when(u'I list backends')
def list_backends(context):
context.backends = context.client.list_backends()
|
Rename Behave steps for api tests
|
## Code Before:
from behave import *
@given(u'"{text}" backend added')
def given_backend(context, text):
backends = context.client.list_backends()
for backend in backends:
if text in backend['title']:
return
@when(u'I list backends')
def list_backends(context):
context.backends = context.client.list_backends()
## Instruction:
Rename Behave steps for api tests
## Code After:
from behave import *
@given(u'"{text}" backend added through api')
def given_backend(context, text):
backends = context.client.list_backends()
for backend in backends:
if text in backend['title']:
return
@when(u'I list backends')
def list_backends(context):
context.backends = context.client.list_backends()
|
# ... existing code ...
from behave import *
@given(u'"{text}" backend added through api')
def given_backend(context, text):
backends = context.client.list_backends()
# ... rest of the code ...
|
87d2e511b0fedd2a09610c35337336d443a756a4
|
tests/unit/cli/filewatch/test_stat.py
|
tests/unit/cli/filewatch/test_stat.py
|
import os
from chalice.cli.filewatch import stat
class FakeOSUtils(object):
def __init__(self):
self.initial_scan = True
def walk(self, rootdir):
yield 'rootdir', [], ['bad-file', 'baz']
if self.initial_scan:
self.initial_scan = False
def joinpath(self, *parts):
return os.path.join(*parts)
def mtime(self, path):
if self.initial_scan:
return 1
if path.endswith('bad-file'):
raise OSError("Bad file")
return 2
def test_can_ignore_stat_errors():
calls = []
def callback(*args, **kwargs):
calls.append((args, kwargs))
watcher = stat.StatFileWatcher(FakeOSUtils())
watcher.watch_for_file_changes('rootdir', callback)
assert len(calls) == 1
|
import os
import time
from chalice.cli.filewatch import stat
class FakeOSUtils(object):
def __init__(self):
self.initial_scan = True
def walk(self, rootdir):
yield 'rootdir', [], ['bad-file', 'baz']
if self.initial_scan:
self.initial_scan = False
def joinpath(self, *parts):
return os.path.join(*parts)
def mtime(self, path):
if self.initial_scan:
return 1
if path.endswith('bad-file'):
raise OSError("Bad file")
return 2
def test_can_ignore_stat_errors():
calls = []
def callback(*args, **kwargs):
calls.append((args, kwargs))
watcher = stat.StatFileWatcher(FakeOSUtils())
watcher.watch_for_file_changes('rootdir', callback)
for _ in range(10):
if len(calls) == 1:
break
time.sleep(0.2)
else:
raise AssertionError("Expected callback to be invoked but was not.")
|
Add polling loop to allow time for callback to be invoked
|
Add polling loop to allow time for callback to be invoked
|
Python
|
apache-2.0
|
awslabs/chalice
|
import os
+ import time
from chalice.cli.filewatch import stat
class FakeOSUtils(object):
def __init__(self):
self.initial_scan = True
def walk(self, rootdir):
yield 'rootdir', [], ['bad-file', 'baz']
if self.initial_scan:
self.initial_scan = False
def joinpath(self, *parts):
return os.path.join(*parts)
def mtime(self, path):
if self.initial_scan:
return 1
if path.endswith('bad-file'):
raise OSError("Bad file")
return 2
def test_can_ignore_stat_errors():
calls = []
def callback(*args, **kwargs):
calls.append((args, kwargs))
watcher = stat.StatFileWatcher(FakeOSUtils())
watcher.watch_for_file_changes('rootdir', callback)
+ for _ in range(10):
- assert len(calls) == 1
+ if len(calls) == 1:
+ break
+ time.sleep(0.2)
+ else:
+ raise AssertionError("Expected callback to be invoked but was not.")
|
Add polling loop to allow time for callback to be invoked
|
## Code Before:
import os
from chalice.cli.filewatch import stat
class FakeOSUtils(object):
def __init__(self):
self.initial_scan = True
def walk(self, rootdir):
yield 'rootdir', [], ['bad-file', 'baz']
if self.initial_scan:
self.initial_scan = False
def joinpath(self, *parts):
return os.path.join(*parts)
def mtime(self, path):
if self.initial_scan:
return 1
if path.endswith('bad-file'):
raise OSError("Bad file")
return 2
def test_can_ignore_stat_errors():
calls = []
def callback(*args, **kwargs):
calls.append((args, kwargs))
watcher = stat.StatFileWatcher(FakeOSUtils())
watcher.watch_for_file_changes('rootdir', callback)
assert len(calls) == 1
## Instruction:
Add polling loop to allow time for callback to be invoked
## Code After:
import os
import time
from chalice.cli.filewatch import stat
class FakeOSUtils(object):
def __init__(self):
self.initial_scan = True
def walk(self, rootdir):
yield 'rootdir', [], ['bad-file', 'baz']
if self.initial_scan:
self.initial_scan = False
def joinpath(self, *parts):
return os.path.join(*parts)
def mtime(self, path):
if self.initial_scan:
return 1
if path.endswith('bad-file'):
raise OSError("Bad file")
return 2
def test_can_ignore_stat_errors():
calls = []
def callback(*args, **kwargs):
calls.append((args, kwargs))
watcher = stat.StatFileWatcher(FakeOSUtils())
watcher.watch_for_file_changes('rootdir', callback)
for _ in range(10):
if len(calls) == 1:
break
time.sleep(0.2)
else:
raise AssertionError("Expected callback to be invoked but was not.")
|
// ... existing code ...
import os
import time
from chalice.cli.filewatch import stat
// ... modified code ...
watcher = stat.StatFileWatcher(FakeOSUtils())
watcher.watch_for_file_changes('rootdir', callback)
for _ in range(10):
if len(calls) == 1:
break
time.sleep(0.2)
else:
raise AssertionError("Expected callback to be invoked but was not.")
// ... rest of the code ...
|
aeb68225cc9c999b51b1733bffaf684280044c97
|
salt/utils/yamldumper.py
|
salt/utils/yamldumper.py
|
'''
salt.utils.yamldumper
~~~~~~~~~~~~~~~~~~~~~
'''
from __future__ import absolute_import
try:
from yaml import CDumper as Dumper
from yaml import CSafeDumper as SafeDumper
except ImportError:
from yaml import Dumper
from yaml import SafeDumper
from salt.utils.odict import OrderedDict
class OrderedDumper(Dumper):
'''
A YAML dumper that represents python OrderedDict as simple YAML map.
'''
class SafeOrderedDumper(SafeDumper):
'''
A YAML safe dumper that represents python OrderedDict as simple YAML map.
'''
def represent_ordereddict(dumper, data):
return dumper.represent_dict(data.items())
OrderedDumper.add_representer(OrderedDict, represent_ordereddict)
SafeOrderedDumper.add_representer(OrderedDict, represent_ordereddict)
|
'''
salt.utils.yamldumper
~~~~~~~~~~~~~~~~~~~~~
'''
# pylint: disable=W0232
# class has no __init__ method
from __future__ import absolute_import
try:
from yaml import CDumper as Dumper
from yaml import CSafeDumper as SafeDumper
except ImportError:
from yaml import Dumper
from yaml import SafeDumper
from salt.utils.odict import OrderedDict
class OrderedDumper(Dumper):
'''
A YAML dumper that represents python OrderedDict as simple YAML map.
'''
class SafeOrderedDumper(SafeDumper):
'''
A YAML safe dumper that represents python OrderedDict as simple YAML map.
'''
def represent_ordereddict(dumper, data):
return dumper.represent_dict(data.items())
OrderedDumper.add_representer(OrderedDict, represent_ordereddict)
SafeOrderedDumper.add_representer(OrderedDict, represent_ordereddict)
|
Disable W0232, no `__init__` method.
|
Disable W0232, no `__init__` method.
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
'''
salt.utils.yamldumper
~~~~~~~~~~~~~~~~~~~~~
'''
+ # pylint: disable=W0232
+ # class has no __init__ method
from __future__ import absolute_import
try:
from yaml import CDumper as Dumper
from yaml import CSafeDumper as SafeDumper
except ImportError:
from yaml import Dumper
from yaml import SafeDumper
from salt.utils.odict import OrderedDict
class OrderedDumper(Dumper):
'''
A YAML dumper that represents python OrderedDict as simple YAML map.
'''
class SafeOrderedDumper(SafeDumper):
'''
A YAML safe dumper that represents python OrderedDict as simple YAML map.
'''
def represent_ordereddict(dumper, data):
return dumper.represent_dict(data.items())
OrderedDumper.add_representer(OrderedDict, represent_ordereddict)
SafeOrderedDumper.add_representer(OrderedDict, represent_ordereddict)
|
Disable W0232, no `__init__` method.
|
## Code Before:
'''
salt.utils.yamldumper
~~~~~~~~~~~~~~~~~~~~~
'''
from __future__ import absolute_import
try:
from yaml import CDumper as Dumper
from yaml import CSafeDumper as SafeDumper
except ImportError:
from yaml import Dumper
from yaml import SafeDumper
from salt.utils.odict import OrderedDict
class OrderedDumper(Dumper):
'''
A YAML dumper that represents python OrderedDict as simple YAML map.
'''
class SafeOrderedDumper(SafeDumper):
'''
A YAML safe dumper that represents python OrderedDict as simple YAML map.
'''
def represent_ordereddict(dumper, data):
return dumper.represent_dict(data.items())
OrderedDumper.add_representer(OrderedDict, represent_ordereddict)
SafeOrderedDumper.add_representer(OrderedDict, represent_ordereddict)
## Instruction:
Disable W0232, no `__init__` method.
## Code After:
'''
salt.utils.yamldumper
~~~~~~~~~~~~~~~~~~~~~
'''
# pylint: disable=W0232
# class has no __init__ method
from __future__ import absolute_import
try:
from yaml import CDumper as Dumper
from yaml import CSafeDumper as SafeDumper
except ImportError:
from yaml import Dumper
from yaml import SafeDumper
from salt.utils.odict import OrderedDict
class OrderedDumper(Dumper):
'''
A YAML dumper that represents python OrderedDict as simple YAML map.
'''
class SafeOrderedDumper(SafeDumper):
'''
A YAML safe dumper that represents python OrderedDict as simple YAML map.
'''
def represent_ordereddict(dumper, data):
return dumper.represent_dict(data.items())
OrderedDumper.add_representer(OrderedDict, represent_ordereddict)
SafeOrderedDumper.add_representer(OrderedDict, represent_ordereddict)
|
...
~~~~~~~~~~~~~~~~~~~~~
'''
# pylint: disable=W0232
# class has no __init__ method
from __future__ import absolute_import
try:
...
|
49f5802a02a550cc8cee3be417426a83c31de5c9
|
Source/Git/Experiments/git_log.py
|
Source/Git/Experiments/git_log.py
|
import sys
import git
r = git.Repo( sys.argv[1] )
def printTree( tree, indent=0 ):
prefix = ' '*indent
print( prefix, '-' * 16 )
print( prefix, 'Tree path %s' % (tree.path,) )
for blob in tree:
print( prefix, '%s %s (%s)' % (blob.type, blob.path, blob.hexsha) )
for child in tree.trees:
printTree( child, indent+4 )
for index, commit in enumerate(r.iter_commits( None )):
print( '=' * 60 )
for name in sorted( dir( commit ) ):
if name[0] not in 'abcdefghijklmnopqrstuvwxyz':
continue
print( 'Commit: %s: %r' % (name, getattr( commit, name )) )
print( '-' * 60 )
stats = commit.stats
for name in sorted( dir( stats ) ):
if name[0] not in 'abcdefghijklmnopqrstuvwxyz':
continue
if name == 'files':
for file in stats.files:
print( 'Commit.Stats.files: %s: %r' % (file, stats.files[file]) )
else:
print( 'Commit.Stats: %s: %r' % (name, getattr( stats, name )) )
print( '-' * 60 )
tree = commit.tree
printTree( tree )
|
import sys
import git
r = git.Repo( sys.argv[1] )
def printTree( tree, indent=0 ):
prefix = ' '*indent
print( prefix, '-' * 16 )
print( prefix, 'Tree path %s' % (tree.path,) )
for blob in tree:
print( prefix, '%s %s (%s)' % (blob.type, blob.path, blob.hexsha) )
for child in tree.trees:
printTree( child, indent+4 )
for index, commit in enumerate(r.iter_commits( None )):
print( '=' * 60 )
for name in sorted( dir( commit ) ):
if name[0] not in 'abcdefghijklmnopqrstuvwxyz':
continue
print( 'Commit: %s: %r' % (name, getattr( commit, name )) )
print( '-' * 60 )
stats = commit.stats
for name in sorted( dir( stats ) ):
if name[0] not in 'abcdefghijklmnopqrstuvwxyz':
continue
if name == 'files':
for file in stats.files:
print( 'Commit.Stats.files: %s: %r' % (file, stats.files[file]) )
else:
print( 'Commit.Stats: %s: %r' % (name, getattr( stats, name )) )
print( '-' * 60 )
tree = commit.tree
printTree( tree )
if index > 1:
break
|
Exit the loop early when experimenting.
|
Exit the loop early when experimenting.
|
Python
|
apache-2.0
|
barry-scott/scm-workbench,barry-scott/scm-workbench,barry-scott/scm-workbench
|
import sys
import git
r = git.Repo( sys.argv[1] )
def printTree( tree, indent=0 ):
prefix = ' '*indent
print( prefix, '-' * 16 )
print( prefix, 'Tree path %s' % (tree.path,) )
for blob in tree:
print( prefix, '%s %s (%s)' % (blob.type, blob.path, blob.hexsha) )
for child in tree.trees:
printTree( child, indent+4 )
for index, commit in enumerate(r.iter_commits( None )):
print( '=' * 60 )
for name in sorted( dir( commit ) ):
if name[0] not in 'abcdefghijklmnopqrstuvwxyz':
continue
print( 'Commit: %s: %r' % (name, getattr( commit, name )) )
print( '-' * 60 )
stats = commit.stats
for name in sorted( dir( stats ) ):
if name[0] not in 'abcdefghijklmnopqrstuvwxyz':
continue
if name == 'files':
for file in stats.files:
print( 'Commit.Stats.files: %s: %r' % (file, stats.files[file]) )
else:
print( 'Commit.Stats: %s: %r' % (name, getattr( stats, name )) )
print( '-' * 60 )
tree = commit.tree
printTree( tree )
+ if index > 1:
+ break
+
|
Exit the loop early when experimenting.
|
## Code Before:
import sys
import git
r = git.Repo( sys.argv[1] )
def printTree( tree, indent=0 ):
prefix = ' '*indent
print( prefix, '-' * 16 )
print( prefix, 'Tree path %s' % (tree.path,) )
for blob in tree:
print( prefix, '%s %s (%s)' % (blob.type, blob.path, blob.hexsha) )
for child in tree.trees:
printTree( child, indent+4 )
for index, commit in enumerate(r.iter_commits( None )):
print( '=' * 60 )
for name in sorted( dir( commit ) ):
if name[0] not in 'abcdefghijklmnopqrstuvwxyz':
continue
print( 'Commit: %s: %r' % (name, getattr( commit, name )) )
print( '-' * 60 )
stats = commit.stats
for name in sorted( dir( stats ) ):
if name[0] not in 'abcdefghijklmnopqrstuvwxyz':
continue
if name == 'files':
for file in stats.files:
print( 'Commit.Stats.files: %s: %r' % (file, stats.files[file]) )
else:
print( 'Commit.Stats: %s: %r' % (name, getattr( stats, name )) )
print( '-' * 60 )
tree = commit.tree
printTree( tree )
## Instruction:
Exit the loop early when experimenting.
## Code After:
import sys
import git
r = git.Repo( sys.argv[1] )
def printTree( tree, indent=0 ):
prefix = ' '*indent
print( prefix, '-' * 16 )
print( prefix, 'Tree path %s' % (tree.path,) )
for blob in tree:
print( prefix, '%s %s (%s)' % (blob.type, blob.path, blob.hexsha) )
for child in tree.trees:
printTree( child, indent+4 )
for index, commit in enumerate(r.iter_commits( None )):
print( '=' * 60 )
for name in sorted( dir( commit ) ):
if name[0] not in 'abcdefghijklmnopqrstuvwxyz':
continue
print( 'Commit: %s: %r' % (name, getattr( commit, name )) )
print( '-' * 60 )
stats = commit.stats
for name in sorted( dir( stats ) ):
if name[0] not in 'abcdefghijklmnopqrstuvwxyz':
continue
if name == 'files':
for file in stats.files:
print( 'Commit.Stats.files: %s: %r' % (file, stats.files[file]) )
else:
print( 'Commit.Stats: %s: %r' % (name, getattr( stats, name )) )
print( '-' * 60 )
tree = commit.tree
printTree( tree )
if index > 1:
break
|
# ... existing code ...
tree = commit.tree
printTree( tree )
if index > 1:
break
# ... rest of the code ...
|
d51b9786b1cc72dd01549a8547f06efc27aab4c3
|
tests/test_settings.py
|
tests/test_settings.py
|
from test_app.settings import *
INSTALLED_APPS += ("cmsplugin_rst",)
## DJANGO CMSPLUGIN RST CONF ##
CMSPLUGIN_RST_WRITER_NAME = "html4css1"
CMSPLUGIN_RST_CONTENT_PREFIX = """
.. |nbsp| unicode:: 0xA0
:trim:
*Global Prefix: Start of Content*
"""
CMSPLUGIN_RST_CONTENT_SUFFIX = \
"""*Global Suffix: End of Content*"""
CMSPLUGIN_RST_SETTINGS_OVERRIDES = {"initial_header_level": 2, # minimum "h2" when rendered to html
"doctitle_xform": False, # important, to have even lone titles stay in the html fragment
"sectsubtitle_xform": False, # we disable the promotion of the title of a lone subsection to a subtitle
'file_insertion_enabled': False, # SECURITY MEASURE (file hacking)
'raw_enabled': False, # SECURITY MEASURE (script tag)
'smart_quotes': "alt"}
#"'language_code': "fr" ## SEEMS BROKEN!
def add_stuffs_to_soup(soup):
soup.div.append("""String Appended Via Beautifulsoup Postprocessor""")
CMSPLUGIN_RST_POSTPROCESSORS = [
"test_settings.add_stuffs_to_soup"
]
|
from textwrap import dedent
from test_app.settings import *
INSTALLED_APPS += ("cmsplugin_rst",)
## DJANGO CMSPLUGIN RST CONF ##
if not os.environ.get("CMSPLUGIN_RST_SKIP_CONF"): # use this flag to test the zero-conf case
CMSPLUGIN_RST_WRITER_NAME = "html4css1"
CMSPLUGIN_RST_CONTENT_PREFIX = dedent("""
.. |nbsp| unicode:: 0xA0
:trim:
*Global Prefix: Start of Content*
""")
CMSPLUGIN_RST_CONTENT_SUFFIX = \
"""*Global Suffix: End of Content*"""
CMSPLUGIN_RST_SETTINGS_OVERRIDES = {"initial_header_level": 2, # minimum "h2" when rendered to html
"smart_quotes": "alt"}
#"'language_code': "fr" # weirdly seems BROKEN!
def add_stuffs_to_soup(soup):
soup.div.append("""String Appended Via Beautifulsoup Postprocessor""")
CMSPLUGIN_RST_POSTPROCESSORS = [
"test_settings.add_stuffs_to_soup"
]
|
Make test settings skippable via CMSPLUGIN_RST_SKIP_CONF environment variable.
|
Make test settings skippable via CMSPLUGIN_RST_SKIP_CONF environment variable.
|
Python
|
bsd-3-clause
|
pakal/cmsplugin-rst,ojii/cmsplugin-rst
|
+ from textwrap import dedent
from test_app.settings import *
INSTALLED_APPS += ("cmsplugin_rst",)
-
+
## DJANGO CMSPLUGIN RST CONF ##
- CMSPLUGIN_RST_WRITER_NAME = "html4css1"
+ if not os.environ.get("CMSPLUGIN_RST_SKIP_CONF"): # use this flag to test the zero-conf case
- CMSPLUGIN_RST_CONTENT_PREFIX = """
+ CMSPLUGIN_RST_WRITER_NAME = "html4css1"
+ CMSPLUGIN_RST_CONTENT_PREFIX = dedent("""
+
- .. |nbsp| unicode:: 0xA0
+ .. |nbsp| unicode:: 0xA0
- :trim:
+ :trim:
- *Global Prefix: Start of Content*
+ *Global Prefix: Start of Content*
- """
+ """)
- CMSPLUGIN_RST_CONTENT_SUFFIX = \
+ CMSPLUGIN_RST_CONTENT_SUFFIX = \
- """*Global Suffix: End of Content*"""
+ """*Global Suffix: End of Content*"""
- CMSPLUGIN_RST_SETTINGS_OVERRIDES = {"initial_header_level": 2, # minimum "h2" when rendered to html
+ CMSPLUGIN_RST_SETTINGS_OVERRIDES = {"initial_header_level": 2, # minimum "h2" when rendered to html
- "doctitle_xform": False, # important, to have even lone titles stay in the html fragment
- "sectsubtitle_xform": False, # we disable the promotion of the title of a lone subsection to a subtitle
- 'file_insertion_enabled': False, # SECURITY MEASURE (file hacking)
- 'raw_enabled': False, # SECURITY MEASURE (script tag)
- 'smart_quotes': "alt"}
+ "smart_quotes": "alt"}
- #"'language_code': "fr" ## SEEMS BROKEN!
+ #"'language_code': "fr" # weirdly seems BROKEN!
-
+
- def add_stuffs_to_soup(soup):
+ def add_stuffs_to_soup(soup):
- soup.div.append("""String Appended Via Beautifulsoup Postprocessor""")
+ soup.div.append("""String Appended Via Beautifulsoup Postprocessor""")
- CMSPLUGIN_RST_POSTPROCESSORS = [
+ CMSPLUGIN_RST_POSTPROCESSORS = [
- "test_settings.add_stuffs_to_soup"
+ "test_settings.add_stuffs_to_soup"
- ]
+ ]
-
|
Make test settings skippable via CMSPLUGIN_RST_SKIP_CONF environment variable.
|
## Code Before:
from test_app.settings import *
INSTALLED_APPS += ("cmsplugin_rst",)
## DJANGO CMSPLUGIN RST CONF ##
CMSPLUGIN_RST_WRITER_NAME = "html4css1"
CMSPLUGIN_RST_CONTENT_PREFIX = """
.. |nbsp| unicode:: 0xA0
:trim:
*Global Prefix: Start of Content*
"""
CMSPLUGIN_RST_CONTENT_SUFFIX = \
"""*Global Suffix: End of Content*"""
CMSPLUGIN_RST_SETTINGS_OVERRIDES = {"initial_header_level": 2, # minimum "h2" when rendered to html
"doctitle_xform": False, # important, to have even lone titles stay in the html fragment
"sectsubtitle_xform": False, # we disable the promotion of the title of a lone subsection to a subtitle
'file_insertion_enabled': False, # SECURITY MEASURE (file hacking)
'raw_enabled': False, # SECURITY MEASURE (script tag)
'smart_quotes': "alt"}
#"'language_code': "fr" ## SEEMS BROKEN!
def add_stuffs_to_soup(soup):
soup.div.append("""String Appended Via Beautifulsoup Postprocessor""")
CMSPLUGIN_RST_POSTPROCESSORS = [
"test_settings.add_stuffs_to_soup"
]
## Instruction:
Make test settings skippable via CMSPLUGIN_RST_SKIP_CONF environment variable.
## Code After:
from textwrap import dedent
from test_app.settings import *
INSTALLED_APPS += ("cmsplugin_rst",)
## DJANGO CMSPLUGIN RST CONF ##
if not os.environ.get("CMSPLUGIN_RST_SKIP_CONF"): # use this flag to test the zero-conf case
CMSPLUGIN_RST_WRITER_NAME = "html4css1"
CMSPLUGIN_RST_CONTENT_PREFIX = dedent("""
.. |nbsp| unicode:: 0xA0
:trim:
*Global Prefix: Start of Content*
""")
CMSPLUGIN_RST_CONTENT_SUFFIX = \
"""*Global Suffix: End of Content*"""
CMSPLUGIN_RST_SETTINGS_OVERRIDES = {"initial_header_level": 2, # minimum "h2" when rendered to html
"smart_quotes": "alt"}
#"'language_code': "fr" # weirdly seems BROKEN!
def add_stuffs_to_soup(soup):
soup.div.append("""String Appended Via Beautifulsoup Postprocessor""")
CMSPLUGIN_RST_POSTPROCESSORS = [
"test_settings.add_stuffs_to_soup"
]
|
// ... existing code ...
from textwrap import dedent
from test_app.settings import *
INSTALLED_APPS += ("cmsplugin_rst",)
## DJANGO CMSPLUGIN RST CONF ##
if not os.environ.get("CMSPLUGIN_RST_SKIP_CONF"): # use this flag to test the zero-conf case
CMSPLUGIN_RST_WRITER_NAME = "html4css1"
CMSPLUGIN_RST_CONTENT_PREFIX = dedent("""
.. |nbsp| unicode:: 0xA0
:trim:
*Global Prefix: Start of Content*
""")
CMSPLUGIN_RST_CONTENT_SUFFIX = \
"""*Global Suffix: End of Content*"""
CMSPLUGIN_RST_SETTINGS_OVERRIDES = {"initial_header_level": 2, # minimum "h2" when rendered to html
"smart_quotes": "alt"}
#"'language_code': "fr" # weirdly seems BROKEN!
def add_stuffs_to_soup(soup):
soup.div.append("""String Appended Via Beautifulsoup Postprocessor""")
CMSPLUGIN_RST_POSTPROCESSORS = [
"test_settings.add_stuffs_to_soup"
]
// ... rest of the code ...
|
c2eeb0a7d8d3a2692537f2004052b9fad9b1527a
|
tests/test_utils.py
|
tests/test_utils.py
|
import os
from unittest import TestCase
import requests
from furikura import utils
def test_request():
requests.get('https://example.com')
class TestUtils(TestCase):
def test_get_file(self):
self.assertEqual(utils.get_file("testfile"), "/usr/share/testfile")
def test_check_connection(self):
self.addTypeEqualityFunc(type, utils.check_connection(test_request))
def test_autostart(self):
utils.autostart('add')
self.assertTrue(os.path.islink(os.path.expanduser('~/.config/autostart/furikura.desktop')))
utils.autostart('remove')
self.assertFalse(os.path.islink(os.path.expanduser('~/.config/autostart/furikura.desktop')))
|
import os
from unittest import TestCase
import requests
from furikura import utils
def test_request():
requests.get('https://example.com')
class TestUtils(TestCase):
def test_get_file(self):
self.assertEqual(utils.get_file("testfile"), "/usr/share/testfile")
def test_check_connection(self):
self.addTypeEqualityFunc(type, utils.check_connection(test_request))
def test_autostart(self):
os.makedirs(os.path.expanduser('~/.config/autostart/'), exist_ok=True)
utils.autostart('add')
self.assertTrue(os.path.islink(os.path.expanduser('~/.config/autostart/furikura.desktop')))
utils.autostart('remove')
self.assertFalse(os.path.islink(os.path.expanduser('~/.config/autostart/furikura.desktop')))
|
Create autostart folder for tests
|
Create autostart folder for tests
|
Python
|
mit
|
benjamindean/furi-kura,benjamindean/furi-kura
|
import os
from unittest import TestCase
import requests
from furikura import utils
def test_request():
requests.get('https://example.com')
class TestUtils(TestCase):
def test_get_file(self):
self.assertEqual(utils.get_file("testfile"), "/usr/share/testfile")
def test_check_connection(self):
self.addTypeEqualityFunc(type, utils.check_connection(test_request))
def test_autostart(self):
+ os.makedirs(os.path.expanduser('~/.config/autostart/'), exist_ok=True)
+
utils.autostart('add')
self.assertTrue(os.path.islink(os.path.expanduser('~/.config/autostart/furikura.desktop')))
utils.autostart('remove')
self.assertFalse(os.path.islink(os.path.expanduser('~/.config/autostart/furikura.desktop')))
|
Create autostart folder for tests
|
## Code Before:
import os
from unittest import TestCase
import requests
from furikura import utils
def test_request():
requests.get('https://example.com')
class TestUtils(TestCase):
def test_get_file(self):
self.assertEqual(utils.get_file("testfile"), "/usr/share/testfile")
def test_check_connection(self):
self.addTypeEqualityFunc(type, utils.check_connection(test_request))
def test_autostart(self):
utils.autostart('add')
self.assertTrue(os.path.islink(os.path.expanduser('~/.config/autostart/furikura.desktop')))
utils.autostart('remove')
self.assertFalse(os.path.islink(os.path.expanduser('~/.config/autostart/furikura.desktop')))
## Instruction:
Create autostart folder for tests
## Code After:
import os
from unittest import TestCase
import requests
from furikura import utils
def test_request():
requests.get('https://example.com')
class TestUtils(TestCase):
def test_get_file(self):
self.assertEqual(utils.get_file("testfile"), "/usr/share/testfile")
def test_check_connection(self):
self.addTypeEqualityFunc(type, utils.check_connection(test_request))
def test_autostart(self):
os.makedirs(os.path.expanduser('~/.config/autostart/'), exist_ok=True)
utils.autostart('add')
self.assertTrue(os.path.islink(os.path.expanduser('~/.config/autostart/furikura.desktop')))
utils.autostart('remove')
self.assertFalse(os.path.islink(os.path.expanduser('~/.config/autostart/furikura.desktop')))
|
# ... existing code ...
self.addTypeEqualityFunc(type, utils.check_connection(test_request))
def test_autostart(self):
os.makedirs(os.path.expanduser('~/.config/autostart/'), exist_ok=True)
utils.autostart('add')
self.assertTrue(os.path.islink(os.path.expanduser('~/.config/autostart/furikura.desktop')))
# ... rest of the code ...
|
4f70897d5a85f1822a93df9bc91979ea79594901
|
nose2/tests/unit/test_generators_plugin.py
|
nose2/tests/unit/test_generators_plugin.py
|
from nose2.plugins.loader import functions
from nose2.tests._common import TestCase
class TestGeneratorUnpack(TestCase):
tags = ['unit']
def setUp(self):
self.expect = [(0, ('call', (0, 1))),
(1, ('call', (1, 2))),
(2, ('call', (2, 3))),]
def test_unpack_handles_nose_style_generators(self):
def gen():
for i in range(0, 3):
yield 'call', i, i + 1
out = list(functions.Functions().unpack(gen()))
self.assertEqual(out, self.expect)
def test_unpack_handles_unittest2_style_generators(self):
def gen():
for i in range(0, 3):
yield 'call', (i, i + 1)
out = list(functions.Functions().unpack(gen()))
self.assertEqual(out, self.expect)
|
from nose2 import events, loader, session
from nose2.plugins.loader import generators
from nose2.tests._common import TestCase
class TestGeneratorUnpack(TestCase):
tags = ['unit']
def setUp(self):
self.session = session.Session()
self.loader = loader.PluggableTestLoader(self.session)
self.expect = [(0, ('call', (0, 1))),
(1, ('call', (1, 2))),
(2, ('call', (2, 3))),]
self.plugin = generators.Generators(session=self.session)
def test_unpack_handles_nose_style_generators(self):
def gen():
for i in range(0, 3):
yield 'call', i, i + 1
out = list(self.plugin.unpack(gen()))
self.assertEqual(out, self.expect)
def test_unpack_handles_unittest2_style_generators(self):
def gen():
for i in range(0, 3):
yield 'call', (i, i + 1)
out = list(self.plugin.unpack(gen()))
self.assertEqual(out, self.expect)
def test_ignores_ordinary_functions(self):
class Mod(object):
pass
def test():
pass
m = Mod()
m.test = test
event = events.LoadFromModuleEvent(self.loader, m)
self.session.hooks.loadTestsFromModule(event)
self.assertEqual(len(event.extraTests), 0)
def test_can_load_tests_from_generator_functions(self):
class Mod(object):
pass
def check(x):
assert x == 1
def test():
yield check, 1
yield check, 2
m = Mod()
m.test = test
event = events.LoadFromModuleEvent(self.loader, m)
self.session.hooks.loadTestsFromModule(event)
self.assertEqual(len(event.extraTests), 2)
|
Add initial tests for generators plugin
|
Add initial tests for generators plugin
|
Python
|
bsd-2-clause
|
ojengwa/nose2,ojengwa/nose2,leth/nose2,little-dude/nose2,ptthiem/nose2,ezigman/nose2,ezigman/nose2,ptthiem/nose2,leth/nose2,little-dude/nose2
|
+ from nose2 import events, loader, session
- from nose2.plugins.loader import functions
+ from nose2.plugins.loader import generators
from nose2.tests._common import TestCase
class TestGeneratorUnpack(TestCase):
tags = ['unit']
def setUp(self):
+ self.session = session.Session()
+ self.loader = loader.PluggableTestLoader(self.session)
self.expect = [(0, ('call', (0, 1))),
(1, ('call', (1, 2))),
(2, ('call', (2, 3))),]
+ self.plugin = generators.Generators(session=self.session)
def test_unpack_handles_nose_style_generators(self):
def gen():
for i in range(0, 3):
yield 'call', i, i + 1
- out = list(functions.Functions().unpack(gen()))
+ out = list(self.plugin.unpack(gen()))
self.assertEqual(out, self.expect)
def test_unpack_handles_unittest2_style_generators(self):
def gen():
for i in range(0, 3):
yield 'call', (i, i + 1)
- out = list(functions.Functions().unpack(gen()))
+ out = list(self.plugin.unpack(gen()))
self.assertEqual(out, self.expect)
+ def test_ignores_ordinary_functions(self):
+ class Mod(object):
+ pass
+ def test():
+ pass
+ m = Mod()
+ m.test = test
+ event = events.LoadFromModuleEvent(self.loader, m)
+ self.session.hooks.loadTestsFromModule(event)
+ self.assertEqual(len(event.extraTests), 0)
+
+ def test_can_load_tests_from_generator_functions(self):
+ class Mod(object):
+ pass
+ def check(x):
+ assert x == 1
+ def test():
+ yield check, 1
+ yield check, 2
+ m = Mod()
+ m.test = test
+ event = events.LoadFromModuleEvent(self.loader, m)
+ self.session.hooks.loadTestsFromModule(event)
+ self.assertEqual(len(event.extraTests), 2)
+
|
Add initial tests for generators plugin
|
## Code Before:
from nose2.plugins.loader import functions
from nose2.tests._common import TestCase
class TestGeneratorUnpack(TestCase):
tags = ['unit']
def setUp(self):
self.expect = [(0, ('call', (0, 1))),
(1, ('call', (1, 2))),
(2, ('call', (2, 3))),]
def test_unpack_handles_nose_style_generators(self):
def gen():
for i in range(0, 3):
yield 'call', i, i + 1
out = list(functions.Functions().unpack(gen()))
self.assertEqual(out, self.expect)
def test_unpack_handles_unittest2_style_generators(self):
def gen():
for i in range(0, 3):
yield 'call', (i, i + 1)
out = list(functions.Functions().unpack(gen()))
self.assertEqual(out, self.expect)
## Instruction:
Add initial tests for generators plugin
## Code After:
from nose2 import events, loader, session
from nose2.plugins.loader import generators
from nose2.tests._common import TestCase
class TestGeneratorUnpack(TestCase):
tags = ['unit']
def setUp(self):
self.session = session.Session()
self.loader = loader.PluggableTestLoader(self.session)
self.expect = [(0, ('call', (0, 1))),
(1, ('call', (1, 2))),
(2, ('call', (2, 3))),]
self.plugin = generators.Generators(session=self.session)
def test_unpack_handles_nose_style_generators(self):
def gen():
for i in range(0, 3):
yield 'call', i, i + 1
out = list(self.plugin.unpack(gen()))
self.assertEqual(out, self.expect)
def test_unpack_handles_unittest2_style_generators(self):
def gen():
for i in range(0, 3):
yield 'call', (i, i + 1)
out = list(self.plugin.unpack(gen()))
self.assertEqual(out, self.expect)
def test_ignores_ordinary_functions(self):
class Mod(object):
pass
def test():
pass
m = Mod()
m.test = test
event = events.LoadFromModuleEvent(self.loader, m)
self.session.hooks.loadTestsFromModule(event)
self.assertEqual(len(event.extraTests), 0)
def test_can_load_tests_from_generator_functions(self):
class Mod(object):
pass
def check(x):
assert x == 1
def test():
yield check, 1
yield check, 2
m = Mod()
m.test = test
event = events.LoadFromModuleEvent(self.loader, m)
self.session.hooks.loadTestsFromModule(event)
self.assertEqual(len(event.extraTests), 2)
|
// ... existing code ...
from nose2 import events, loader, session
from nose2.plugins.loader import generators
from nose2.tests._common import TestCase
// ... modified code ...
tags = ['unit']
def setUp(self):
self.session = session.Session()
self.loader = loader.PluggableTestLoader(self.session)
self.expect = [(0, ('call', (0, 1))),
(1, ('call', (1, 2))),
(2, ('call', (2, 3))),]
self.plugin = generators.Generators(session=self.session)
def test_unpack_handles_nose_style_generators(self):
def gen():
for i in range(0, 3):
yield 'call', i, i + 1
out = list(self.plugin.unpack(gen()))
self.assertEqual(out, self.expect)
def test_unpack_handles_unittest2_style_generators(self):
...
def gen():
for i in range(0, 3):
yield 'call', (i, i + 1)
out = list(self.plugin.unpack(gen()))
self.assertEqual(out, self.expect)
def test_ignores_ordinary_functions(self):
class Mod(object):
pass
def test():
pass
m = Mod()
m.test = test
event = events.LoadFromModuleEvent(self.loader, m)
self.session.hooks.loadTestsFromModule(event)
self.assertEqual(len(event.extraTests), 0)
def test_can_load_tests_from_generator_functions(self):
class Mod(object):
pass
def check(x):
assert x == 1
def test():
yield check, 1
yield check, 2
m = Mod()
m.test = test
event = events.LoadFromModuleEvent(self.loader, m)
self.session.hooks.loadTestsFromModule(event)
self.assertEqual(len(event.extraTests), 2)
// ... rest of the code ...
|
2722a59aad0775f1bcd1e81232ff445b9012a2ae
|
ssim/compat.py
|
ssim/compat.py
|
"""Compatibility routines."""
from __future__ import absolute_import
import sys
try:
import Image # pylint: disable=import-error,unused-import
except ImportError:
from PIL import Image # pylint: disable=unused-import
try:
import ImageOps # pylint: disable=import-error,unused-import
except ImportError:
from PIL import ImageOps # pylint: disable=unused-import
if sys.version_info[0] > 2:
basestring = (str, bytes) # pylint: disable=redefined-builtin,invalid-name
else:
basestring = basestring # pylint: disable=invalid-name
|
"""Compatibility routines."""
from __future__ import absolute_import
import sys
try:
import Image # pylint: disable=import-error,unused-import
except ImportError:
from PIL import Image # pylint: disable=unused-import
try:
import ImageOps # pylint: disable=import-error,unused-import
except ImportError:
from PIL import ImageOps # pylint: disable=unused-import
if sys.version_info[0] > 2:
basestring = (str, bytes) # pylint: disable=redefined-builtin,invalid-name
else:
# pylint: disable=redefined-variable-type
basestring = basestring # pylint: disable=invalid-name
|
Add pylint to disable redefined variable.
|
Add pylint to disable redefined variable.
|
Python
|
mit
|
jterrace/pyssim
|
"""Compatibility routines."""
from __future__ import absolute_import
import sys
try:
import Image # pylint: disable=import-error,unused-import
except ImportError:
from PIL import Image # pylint: disable=unused-import
try:
import ImageOps # pylint: disable=import-error,unused-import
except ImportError:
from PIL import ImageOps # pylint: disable=unused-import
if sys.version_info[0] > 2:
basestring = (str, bytes) # pylint: disable=redefined-builtin,invalid-name
else:
+ # pylint: disable=redefined-variable-type
basestring = basestring # pylint: disable=invalid-name
|
Add pylint to disable redefined variable.
|
## Code Before:
"""Compatibility routines."""
from __future__ import absolute_import
import sys
try:
import Image # pylint: disable=import-error,unused-import
except ImportError:
from PIL import Image # pylint: disable=unused-import
try:
import ImageOps # pylint: disable=import-error,unused-import
except ImportError:
from PIL import ImageOps # pylint: disable=unused-import
if sys.version_info[0] > 2:
basestring = (str, bytes) # pylint: disable=redefined-builtin,invalid-name
else:
basestring = basestring # pylint: disable=invalid-name
## Instruction:
Add pylint to disable redefined variable.
## Code After:
"""Compatibility routines."""
from __future__ import absolute_import
import sys
try:
import Image # pylint: disable=import-error,unused-import
except ImportError:
from PIL import Image # pylint: disable=unused-import
try:
import ImageOps # pylint: disable=import-error,unused-import
except ImportError:
from PIL import ImageOps # pylint: disable=unused-import
if sys.version_info[0] > 2:
basestring = (str, bytes) # pylint: disable=redefined-builtin,invalid-name
else:
# pylint: disable=redefined-variable-type
basestring = basestring # pylint: disable=invalid-name
|
...
if sys.version_info[0] > 2:
basestring = (str, bytes) # pylint: disable=redefined-builtin,invalid-name
else:
# pylint: disable=redefined-variable-type
basestring = basestring # pylint: disable=invalid-name
...
|
c12cbae226f42405a998b93c6fd7049aadc6a19c
|
build.py
|
build.py
|
import os
import string
if __name__ == '__main__':
patch_file = 'example.patch'
base_name, _ = os.path.splitext(patch_file)
values = {
'name': 'kpatch-module-{}'.format(base_name),
'patch_file': patch_file,
'kmod_filename': 'kpatch-{}.ko'.format(base_name),
'description': 'Package generated from {} by '
'kpatch-package-builder'.format(patch_file),
}
with open('kpatch-patch.spec') as f:
spec_template = string.Template(f.read())
print(spec_template.substitute(values))
|
import os
import string
def generate_rpm_spec(template, patch_file):
spec_template = string.Template(template)
base_name, _ = os.path.splitext(patch_file)
values = {
'name': 'kpatch-module-{}'.format(base_name),
'patch_file': patch_file,
'kmod_filename': 'kpatch-{}.ko'.format(base_name),
'description': 'Package generated from {} by '
'kpatch-package-builder'.format(patch_file),
}
return spec_template.substitute(values)
if __name__ == '__main__':
with open('kpatch-patch.spec') as f:
template = f.read()
print(generate_rpm_spec(template, 'example.patch'))
|
Split spec generation into function
|
Split spec generation into function
|
Python
|
mit
|
centos-livepatching/kpatch-package-builder
|
import os
import string
- if __name__ == '__main__':
- patch_file = 'example.patch'
+ def generate_rpm_spec(template, patch_file):
+ spec_template = string.Template(template)
+
base_name, _ = os.path.splitext(patch_file)
values = {
'name': 'kpatch-module-{}'.format(base_name),
'patch_file': patch_file,
'kmod_filename': 'kpatch-{}.ko'.format(base_name),
'description': 'Package generated from {} by '
'kpatch-package-builder'.format(patch_file),
}
+ return spec_template.substitute(values)
+
+ if __name__ == '__main__':
+
with open('kpatch-patch.spec') as f:
- spec_template = string.Template(f.read())
+ template = f.read()
- print(spec_template.substitute(values))
+ print(generate_rpm_spec(template, 'example.patch'))
|
Split spec generation into function
|
## Code Before:
import os
import string
if __name__ == '__main__':
patch_file = 'example.patch'
base_name, _ = os.path.splitext(patch_file)
values = {
'name': 'kpatch-module-{}'.format(base_name),
'patch_file': patch_file,
'kmod_filename': 'kpatch-{}.ko'.format(base_name),
'description': 'Package generated from {} by '
'kpatch-package-builder'.format(patch_file),
}
with open('kpatch-patch.spec') as f:
spec_template = string.Template(f.read())
print(spec_template.substitute(values))
## Instruction:
Split spec generation into function
## Code After:
import os
import string
def generate_rpm_spec(template, patch_file):
spec_template = string.Template(template)
base_name, _ = os.path.splitext(patch_file)
values = {
'name': 'kpatch-module-{}'.format(base_name),
'patch_file': patch_file,
'kmod_filename': 'kpatch-{}.ko'.format(base_name),
'description': 'Package generated from {} by '
'kpatch-package-builder'.format(patch_file),
}
return spec_template.substitute(values)
if __name__ == '__main__':
with open('kpatch-patch.spec') as f:
template = f.read()
print(generate_rpm_spec(template, 'example.patch'))
|
# ... existing code ...
import os
import string
def generate_rpm_spec(template, patch_file):
spec_template = string.Template(template)
base_name, _ = os.path.splitext(patch_file)
values = {
# ... modified code ...
'kpatch-package-builder'.format(patch_file),
}
return spec_template.substitute(values)
if __name__ == '__main__':
with open('kpatch-patch.spec') as f:
template = f.read()
print(generate_rpm_spec(template, 'example.patch'))
# ... rest of the code ...
|
ea1389d6dfb0060cda8d194079aacc900bbf56ae
|
simple_graph.py
|
simple_graph.py
|
from __future__ import print_function
from __future__ import unicode_literals
class Graph(object):
''' Create an empty graph. '''
def __init__(self):
self.graph = {}
return
def nodes():
return nodes
def edges():
return edges
def add_node(self, node):
self.graph.setdefault(node, [])
return
def add_edge(self, node1, node2):
return
def del_node(self, node):
try:
del self.graph[node]
except KeyError:
raise KeyError('node not in graph')
def has_node(self, node):
return node in self.graph
def neighbors(self, node):
return self.graph[node]
def adjecent(self, node1, node2):
if node2 in self.graph[node1] or node1 in self.graph[node2]:
return True
else:
return False
|
from __future__ import print_function
from __future__ import unicode_literals
class Graph(object):
''' Create an empty graph. '''
def __init__(self):
self.graph = {}
return
def nodes(self):
return self.graph.keys()
def edges(self):
edge_list = []
for key, value in self.graph():
for item in value:
edge_list.append((key, item))
return edge_list
def add_node(self, node):
self.graph.setdefault(node, [])
def add_edge(self, node1, node2):
if node1 in self.graph:
self.graph.append(node2)
else:
self.graph[node1] = node2
def del_node(self, node):
try:
del self.graph[node]
except KeyError:
raise KeyError('node not in graph')
def has_node(self, node):
return node in self.graph
def neighbors(self, node):
return self.graph[node]
def adjecent(self, node1, node2):
if node2 in self.graph[node1] or node1 in self.graph[node2]:
return True
else:
return False
|
Add functions for adding a node, an edge, and defining a node
|
Add functions for adding a node, an edge, and defining a node
|
Python
|
mit
|
constanthatz/data-structures
|
from __future__ import print_function
from __future__ import unicode_literals
class Graph(object):
''' Create an empty graph. '''
def __init__(self):
self.graph = {}
return
- def nodes():
+ def nodes(self):
- return nodes
+ return self.graph.keys()
- def edges():
+ def edges(self):
+ edge_list = []
+ for key, value in self.graph():
+ for item in value:
+ edge_list.append((key, item))
- return edges
+ return edge_list
def add_node(self, node):
self.graph.setdefault(node, [])
- return
def add_edge(self, node1, node2):
- return
+ if node1 in self.graph:
+ self.graph.append(node2)
+ else:
+ self.graph[node1] = node2
def del_node(self, node):
try:
del self.graph[node]
except KeyError:
raise KeyError('node not in graph')
def has_node(self, node):
return node in self.graph
def neighbors(self, node):
return self.graph[node]
def adjecent(self, node1, node2):
if node2 in self.graph[node1] or node1 in self.graph[node2]:
return True
else:
return False
|
Add functions for adding a node, an edge, and defining a node
|
## Code Before:
from __future__ import print_function
from __future__ import unicode_literals
class Graph(object):
''' Create an empty graph. '''
def __init__(self):
self.graph = {}
return
def nodes():
return nodes
def edges():
return edges
def add_node(self, node):
self.graph.setdefault(node, [])
return
def add_edge(self, node1, node2):
return
def del_node(self, node):
try:
del self.graph[node]
except KeyError:
raise KeyError('node not in graph')
def has_node(self, node):
return node in self.graph
def neighbors(self, node):
return self.graph[node]
def adjecent(self, node1, node2):
if node2 in self.graph[node1] or node1 in self.graph[node2]:
return True
else:
return False
## Instruction:
Add functions for adding a node, an edge, and defining a node
## Code After:
from __future__ import print_function
from __future__ import unicode_literals
class Graph(object):
''' Create an empty graph. '''
def __init__(self):
self.graph = {}
return
def nodes(self):
return self.graph.keys()
def edges(self):
edge_list = []
for key, value in self.graph():
for item in value:
edge_list.append((key, item))
return edge_list
def add_node(self, node):
self.graph.setdefault(node, [])
def add_edge(self, node1, node2):
if node1 in self.graph:
self.graph.append(node2)
else:
self.graph[node1] = node2
def del_node(self, node):
try:
del self.graph[node]
except KeyError:
raise KeyError('node not in graph')
def has_node(self, node):
return node in self.graph
def neighbors(self, node):
return self.graph[node]
def adjecent(self, node1, node2):
if node2 in self.graph[node1] or node1 in self.graph[node2]:
return True
else:
return False
|
// ... existing code ...
self.graph = {}
return
def nodes(self):
return self.graph.keys()
def edges(self):
edge_list = []
for key, value in self.graph():
for item in value:
edge_list.append((key, item))
return edge_list
def add_node(self, node):
self.graph.setdefault(node, [])
def add_edge(self, node1, node2):
if node1 in self.graph:
self.graph.append(node2)
else:
self.graph[node1] = node2
def del_node(self, node):
try:
// ... rest of the code ...
|
bb4a0ca8626f0287a5366e97313018fcc59bcf8f
|
demo/__init__.py
|
demo/__init__.py
|
__project__ = 'TemplateDemo'
__version__ = '0.0.0'
VERSION = "{0} v{1}".format(__project__, __version__)
|
from pkg_resources import DistributionNotFound, get_distribution
try:
__version__ = get_distribution('TemplateDemo').version
except DistributionNotFound:
__version__ = '(local)'
|
Deploy Travis CI build 1156 to GitHub
|
Deploy Travis CI build 1156 to GitHub
|
Python
|
mit
|
jacebrowning/template-python-demo
|
+ from pkg_resources import DistributionNotFound, get_distribution
- __project__ = 'TemplateDemo'
- __version__ = '0.0.0'
- VERSION = "{0} v{1}".format(__project__, __version__)
+ try:
+ __version__ = get_distribution('TemplateDemo').version
+ except DistributionNotFound:
+ __version__ = '(local)'
+
|
Deploy Travis CI build 1156 to GitHub
|
## Code Before:
__project__ = 'TemplateDemo'
__version__ = '0.0.0'
VERSION = "{0} v{1}".format(__project__, __version__)
## Instruction:
Deploy Travis CI build 1156 to GitHub
## Code After:
from pkg_resources import DistributionNotFound, get_distribution
try:
__version__ = get_distribution('TemplateDemo').version
except DistributionNotFound:
__version__ = '(local)'
|
...
from pkg_resources import DistributionNotFound, get_distribution
try:
__version__ = get_distribution('TemplateDemo').version
except DistributionNotFound:
__version__ = '(local)'
...
|
8605b07f2f5951f8a0b85d3d77baa1758723fb64
|
auth0/v2/authentication/users.py
|
auth0/v2/authentication/users.py
|
from .base import AuthenticationBase
class Users(AuthenticationBase):
def __init__(self, domain):
self.domain = domain
def userinfo(self, access_token):
return self.get(
url='https://%s/userinfo' % self.domain,
headers={'Authorization': 'Bearer %s' % access_token}
)
def tokeninfo(self, jwt):
return self.post(
url='https://%s/tokeninfo' % self.domain,
data={'id_token': jwt},
headers={'Content-Type: application/json'}
)
|
from .base import AuthenticationBase
class Users(AuthenticationBase):
"""Userinfo related endpoints.
Args:
domain (str): Your auth0 domain (e.g: username.auth0.com)
"""
def __init__(self, domain):
self.domain = domain
def userinfo(self, access_token):
"""Returns the user information based on the Auth0 access token.
Args:
access_token (str): Auth0 access token (obtained during login).
Returns:
The user profile.
"""
return self.get(
url='https://%s/userinfo' % self.domain,
headers={'Authorization': 'Bearer %s' % access_token}
)
def tokeninfo(self, jwt):
"""Returns user profile based on the user's jwt
Validates a JSON Web Token (signature and expiration) and returns the
user information associated with the user id (sub property) of
the token.
Args:
jwt (str): User's jwt
Returns:
The user profile.
"""
return self.post(
url='https://%s/tokeninfo' % self.domain,
data={'id_token': jwt},
headers={'Content-Type: application/json'}
)
|
Add docstrings to Users class
|
Add docstrings to Users class
|
Python
|
mit
|
auth0/auth0-python,auth0/auth0-python
|
from .base import AuthenticationBase
class Users(AuthenticationBase):
+ """Userinfo related endpoints.
+
+ Args:
+ domain (str): Your auth0 domain (e.g: username.auth0.com)
+ """
+
def __init__(self, domain):
self.domain = domain
def userinfo(self, access_token):
+
+ """Returns the user information based on the Auth0 access token.
+
+ Args:
+ access_token (str): Auth0 access token (obtained during login).
+
+ Returns:
+ The user profile.
+ """
+
return self.get(
url='https://%s/userinfo' % self.domain,
headers={'Authorization': 'Bearer %s' % access_token}
)
def tokeninfo(self, jwt):
+
+ """Returns user profile based on the user's jwt
+
+ Validates a JSON Web Token (signature and expiration) and returns the
+ user information associated with the user id (sub property) of
+ the token.
+
+ Args:
+ jwt (str): User's jwt
+
+ Returns:
+ The user profile.
+ """
+
return self.post(
url='https://%s/tokeninfo' % self.domain,
data={'id_token': jwt},
headers={'Content-Type: application/json'}
)
|
Add docstrings to Users class
|
## Code Before:
from .base import AuthenticationBase
class Users(AuthenticationBase):
def __init__(self, domain):
self.domain = domain
def userinfo(self, access_token):
return self.get(
url='https://%s/userinfo' % self.domain,
headers={'Authorization': 'Bearer %s' % access_token}
)
def tokeninfo(self, jwt):
return self.post(
url='https://%s/tokeninfo' % self.domain,
data={'id_token': jwt},
headers={'Content-Type: application/json'}
)
## Instruction:
Add docstrings to Users class
## Code After:
from .base import AuthenticationBase
class Users(AuthenticationBase):
"""Userinfo related endpoints.
Args:
domain (str): Your auth0 domain (e.g: username.auth0.com)
"""
def __init__(self, domain):
self.domain = domain
def userinfo(self, access_token):
"""Returns the user information based on the Auth0 access token.
Args:
access_token (str): Auth0 access token (obtained during login).
Returns:
The user profile.
"""
return self.get(
url='https://%s/userinfo' % self.domain,
headers={'Authorization': 'Bearer %s' % access_token}
)
def tokeninfo(self, jwt):
"""Returns user profile based on the user's jwt
Validates a JSON Web Token (signature and expiration) and returns the
user information associated with the user id (sub property) of
the token.
Args:
jwt (str): User's jwt
Returns:
The user profile.
"""
return self.post(
url='https://%s/tokeninfo' % self.domain,
data={'id_token': jwt},
headers={'Content-Type: application/json'}
)
|
// ... existing code ...
class Users(AuthenticationBase):
"""Userinfo related endpoints.
Args:
domain (str): Your auth0 domain (e.g: username.auth0.com)
"""
def __init__(self, domain):
self.domain = domain
def userinfo(self, access_token):
"""Returns the user information based on the Auth0 access token.
Args:
access_token (str): Auth0 access token (obtained during login).
Returns:
The user profile.
"""
return self.get(
url='https://%s/userinfo' % self.domain,
headers={'Authorization': 'Bearer %s' % access_token}
// ... modified code ...
)
def tokeninfo(self, jwt):
"""Returns user profile based on the user's jwt
Validates a JSON Web Token (signature and expiration) and returns the
user information associated with the user id (sub property) of
the token.
Args:
jwt (str): User's jwt
Returns:
The user profile.
"""
return self.post(
url='https://%s/tokeninfo' % self.domain,
data={'id_token': jwt},
// ... rest of the code ...
|
a8726f9acf3d2a1b0287046d0ffb5236892c6535
|
tests/django_test_app/models.py
|
tests/django_test_app/models.py
|
from django.db import models
from semantic_version import django_fields as semver_fields
class VersionModel(models.Model):
version = semver_fields.VersionField(verbose_name='my version')
spec = semver_fields.SpecField(verbose_name='my spec')
class PartialVersionModel(models.Model):
partial = semver_fields.VersionField(partial=True, verbose_name='partial version')
optional = semver_fields.VersionField(verbose_name='optional version', blank=True, null=True)
optional_spec = semver_fields.SpecField(verbose_name='optional spec', blank=True, null=True)
class CoerceVersionModel(models.Model):
version = semver_fields.VersionField(verbose_name='my version', coerce=True)
partial = semver_fields.VersionField(verbose_name='partial version', coerce=True, partial=True)
|
try:
from django.db import models
django_loaded = True
except ImportError:
django_loaded = False
if django_loaded:
from semantic_version import django_fields as semver_fields
class VersionModel(models.Model):
version = semver_fields.VersionField(verbose_name='my version')
spec = semver_fields.SpecField(verbose_name='my spec')
class PartialVersionModel(models.Model):
partial = semver_fields.VersionField(partial=True, verbose_name='partial version')
optional = semver_fields.VersionField(verbose_name='optional version', blank=True, null=True)
optional_spec = semver_fields.SpecField(verbose_name='optional spec', blank=True, null=True)
class CoerceVersionModel(models.Model):
version = semver_fields.VersionField(verbose_name='my version', coerce=True)
partial = semver_fields.VersionField(verbose_name='partial version', coerce=True, partial=True)
|
Fix test running when Django isn't available.
|
tests: Fix test running when Django isn't available.
|
Python
|
bsd-2-clause
|
rbarrois/python-semanticversion,marcelometal/python-semanticversion,mhrivnak/python-semanticversion,pombredanne/python-semanticversion
|
+ try:
- from django.db import models
+ from django.db import models
- from semantic_version import django_fields as semver_fields
+ django_loaded = True
+ except ImportError:
+ django_loaded = False
+ if django_loaded:
+ from semantic_version import django_fields as semver_fields
- class VersionModel(models.Model):
- version = semver_fields.VersionField(verbose_name='my version')
- spec = semver_fields.SpecField(verbose_name='my spec')
- class PartialVersionModel(models.Model):
+ class VersionModel(models.Model):
- partial = semver_fields.VersionField(partial=True, verbose_name='partial version')
+ version = semver_fields.VersionField(verbose_name='my version')
+ spec = semver_fields.SpecField(verbose_name='my spec')
- optional = semver_fields.VersionField(verbose_name='optional version', blank=True, null=True)
- optional_spec = semver_fields.SpecField(verbose_name='optional spec', blank=True, null=True)
- class CoerceVersionModel(models.Model):
+ class PartialVersionModel(models.Model):
- version = semver_fields.VersionField(verbose_name='my version', coerce=True)
+ partial = semver_fields.VersionField(partial=True, verbose_name='partial version')
- partial = semver_fields.VersionField(verbose_name='partial version', coerce=True, partial=True)
+ optional = semver_fields.VersionField(verbose_name='optional version', blank=True, null=True)
+ optional_spec = semver_fields.SpecField(verbose_name='optional spec', blank=True, null=True)
+
+ class CoerceVersionModel(models.Model):
+ version = semver_fields.VersionField(verbose_name='my version', coerce=True)
+ partial = semver_fields.VersionField(verbose_name='partial version', coerce=True, partial=True)
+
|
Fix test running when Django isn't available.
|
## Code Before:
from django.db import models
from semantic_version import django_fields as semver_fields
class VersionModel(models.Model):
version = semver_fields.VersionField(verbose_name='my version')
spec = semver_fields.SpecField(verbose_name='my spec')
class PartialVersionModel(models.Model):
partial = semver_fields.VersionField(partial=True, verbose_name='partial version')
optional = semver_fields.VersionField(verbose_name='optional version', blank=True, null=True)
optional_spec = semver_fields.SpecField(verbose_name='optional spec', blank=True, null=True)
class CoerceVersionModel(models.Model):
version = semver_fields.VersionField(verbose_name='my version', coerce=True)
partial = semver_fields.VersionField(verbose_name='partial version', coerce=True, partial=True)
## Instruction:
Fix test running when Django isn't available.
## Code After:
try:
from django.db import models
django_loaded = True
except ImportError:
django_loaded = False
if django_loaded:
from semantic_version import django_fields as semver_fields
class VersionModel(models.Model):
version = semver_fields.VersionField(verbose_name='my version')
spec = semver_fields.SpecField(verbose_name='my spec')
class PartialVersionModel(models.Model):
partial = semver_fields.VersionField(partial=True, verbose_name='partial version')
optional = semver_fields.VersionField(verbose_name='optional version', blank=True, null=True)
optional_spec = semver_fields.SpecField(verbose_name='optional spec', blank=True, null=True)
class CoerceVersionModel(models.Model):
version = semver_fields.VersionField(verbose_name='my version', coerce=True)
partial = semver_fields.VersionField(verbose_name='partial version', coerce=True, partial=True)
|
# ... existing code ...
try:
from django.db import models
django_loaded = True
except ImportError:
django_loaded = False
if django_loaded:
from semantic_version import django_fields as semver_fields
class VersionModel(models.Model):
version = semver_fields.VersionField(verbose_name='my version')
spec = semver_fields.SpecField(verbose_name='my spec')
class PartialVersionModel(models.Model):
partial = semver_fields.VersionField(partial=True, verbose_name='partial version')
optional = semver_fields.VersionField(verbose_name='optional version', blank=True, null=True)
optional_spec = semver_fields.SpecField(verbose_name='optional spec', blank=True, null=True)
class CoerceVersionModel(models.Model):
version = semver_fields.VersionField(verbose_name='my version', coerce=True)
partial = semver_fields.VersionField(verbose_name='partial version', coerce=True, partial=True)
# ... rest of the code ...
|
d726efa1116f95ced28994c7c6bbcfe4cf703b05
|
wavvy/views.py
|
wavvy/views.py
|
from wavvy import app
from flask import Flask, url_for, render_template, request, session, escape
@app.route('/hello')
@app.route('/hello/<name>')
def hello(name=None):
return render_template('hello.html', name=name)
@app.route('/')
def index():
if session.get('logged_in', False):
return 'Logged in as {}'.format(escape(session['username']))
return 'You are not logged in.'
@app.route('/login', methods=['POST', 'GET'])
def login():
error = None
if request.method == 'POST':
session['logged_in'] = True
session['username'] = request.form['username']
password = escape(request.form['password'])
return 'Validating a login! U:{} P:{}'.format(escape(session['username']), password)
return render_template('login.html', error=error)
@app.route('/logout')
def logout():
session['logged_in'] = False
if 'username' in session:
del session['username']
return 'You are logged out.'
|
from wavvy import app
from flask import Flask, url_for, render_template, request, session, escape
def clear_session(s):
if 'username' in s:
del s['username']
s['logged_in'] = False
@app.route('/hello')
@app.route('/hello/<name>')
def hello(name=None):
return render_template('hello.html', name=name)
@app.route('/')
def index():
if session.get('logged_in', False):
return 'Logged in as {}'.format(escape(session['username']))
return 'You are not logged in.'
@app.route('/login', methods=['POST', 'GET'])
def login():
error = None
if request.method == 'POST':
session['logged_in'] = True
session['username'] = request.form['username']
password = escape(request.form['password'])
return 'Validating a login! U:{} P:{}'.format(escape(session['username']), password)
return render_template('login.html', error=error)
@app.route('/logout')
def logout():
clear_session(session)
return 'You are logged out.'
|
Generalize the logout a bit
|
Generalize the logout a bit
This is on the road to removing auth from this file.
|
Python
|
mit
|
john-patterson/wavvy,john-patterson/wavvy
|
from wavvy import app
from flask import Flask, url_for, render_template, request, session, escape
+
+
+ def clear_session(s):
+ if 'username' in s:
+ del s['username']
+ s['logged_in'] = False
@app.route('/hello')
@app.route('/hello/<name>')
def hello(name=None):
return render_template('hello.html', name=name)
@app.route('/')
def index():
if session.get('logged_in', False):
return 'Logged in as {}'.format(escape(session['username']))
return 'You are not logged in.'
@app.route('/login', methods=['POST', 'GET'])
def login():
error = None
if request.method == 'POST':
session['logged_in'] = True
session['username'] = request.form['username']
password = escape(request.form['password'])
return 'Validating a login! U:{} P:{}'.format(escape(session['username']), password)
return render_template('login.html', error=error)
@app.route('/logout')
def logout():
+ clear_session(session)
- session['logged_in'] = False
- if 'username' in session:
- del session['username']
return 'You are logged out.'
|
Generalize the logout a bit
|
## Code Before:
from wavvy import app
from flask import Flask, url_for, render_template, request, session, escape
@app.route('/hello')
@app.route('/hello/<name>')
def hello(name=None):
return render_template('hello.html', name=name)
@app.route('/')
def index():
if session.get('logged_in', False):
return 'Logged in as {}'.format(escape(session['username']))
return 'You are not logged in.'
@app.route('/login', methods=['POST', 'GET'])
def login():
error = None
if request.method == 'POST':
session['logged_in'] = True
session['username'] = request.form['username']
password = escape(request.form['password'])
return 'Validating a login! U:{} P:{}'.format(escape(session['username']), password)
return render_template('login.html', error=error)
@app.route('/logout')
def logout():
session['logged_in'] = False
if 'username' in session:
del session['username']
return 'You are logged out.'
## Instruction:
Generalize the logout a bit
## Code After:
from wavvy import app
from flask import Flask, url_for, render_template, request, session, escape
def clear_session(s):
if 'username' in s:
del s['username']
s['logged_in'] = False
@app.route('/hello')
@app.route('/hello/<name>')
def hello(name=None):
return render_template('hello.html', name=name)
@app.route('/')
def index():
if session.get('logged_in', False):
return 'Logged in as {}'.format(escape(session['username']))
return 'You are not logged in.'
@app.route('/login', methods=['POST', 'GET'])
def login():
error = None
if request.method == 'POST':
session['logged_in'] = True
session['username'] = request.form['username']
password = escape(request.form['password'])
return 'Validating a login! U:{} P:{}'.format(escape(session['username']), password)
return render_template('login.html', error=error)
@app.route('/logout')
def logout():
clear_session(session)
return 'You are logged out.'
|
# ... existing code ...
from wavvy import app
from flask import Flask, url_for, render_template, request, session, escape
def clear_session(s):
if 'username' in s:
del s['username']
s['logged_in'] = False
@app.route('/hello')
# ... modified code ...
@app.route('/logout')
def logout():
clear_session(session)
return 'You are logged out.'
# ... rest of the code ...
|
0003ef7fe3d59c4bda034dee334d45b6d7a2622d
|
pyvm_test.py
|
pyvm_test.py
|
import pyvm
import unittest
class PyVMTest(unittest.TestCase):
def setUp(self):
self.vm = pyvm.PythonVM()
def test_load_const_num(self):
self.assertEqual(
10,
self.vm.eval('10')
)
def test_load_const_str(self):
self.assertEqual(
"hoge",
self.vm.eval('"hoge"')
)
if __name__ == '__main__':
unittest.main()
|
import pyvm
import unittest
class PyVMTest(unittest.TestCase):
def setUp(self):
self.vm = pyvm.PythonVM()
def test_load_const_num(self):
self.assertEqual(
10,
self.vm.eval('10')
)
def test_load_const_num_float(self):
self.assertEqual(
10.55,
self.vm.eval('10.55')
)
def test_load_const_str(self):
self.assertEqual(
"hoge",
self.vm.eval('"hoge"')
)
if __name__ == '__main__':
unittest.main()
|
Add test of storing float
|
Add test of storing float
|
Python
|
mit
|
utgwkk/tiny-python-vm
|
import pyvm
import unittest
class PyVMTest(unittest.TestCase):
def setUp(self):
self.vm = pyvm.PythonVM()
def test_load_const_num(self):
self.assertEqual(
10,
self.vm.eval('10')
)
+ def test_load_const_num_float(self):
+ self.assertEqual(
+ 10.55,
+ self.vm.eval('10.55')
+ )
+
def test_load_const_str(self):
self.assertEqual(
"hoge",
self.vm.eval('"hoge"')
)
if __name__ == '__main__':
unittest.main()
|
Add test of storing float
|
## Code Before:
import pyvm
import unittest
class PyVMTest(unittest.TestCase):
def setUp(self):
self.vm = pyvm.PythonVM()
def test_load_const_num(self):
self.assertEqual(
10,
self.vm.eval('10')
)
def test_load_const_str(self):
self.assertEqual(
"hoge",
self.vm.eval('"hoge"')
)
if __name__ == '__main__':
unittest.main()
## Instruction:
Add test of storing float
## Code After:
import pyvm
import unittest
class PyVMTest(unittest.TestCase):
def setUp(self):
self.vm = pyvm.PythonVM()
def test_load_const_num(self):
self.assertEqual(
10,
self.vm.eval('10')
)
def test_load_const_num_float(self):
self.assertEqual(
10.55,
self.vm.eval('10.55')
)
def test_load_const_str(self):
self.assertEqual(
"hoge",
self.vm.eval('"hoge"')
)
if __name__ == '__main__':
unittest.main()
|
// ... existing code ...
self.vm.eval('10')
)
def test_load_const_num_float(self):
self.assertEqual(
10.55,
self.vm.eval('10.55')
)
def test_load_const_str(self):
self.assertEqual(
"hoge",
// ... rest of the code ...
|
154632b0ab27d36b63c302a550589a182a319ef8
|
distance_matrix.py
|
distance_matrix.py
|
from GamTools import corr
import numpy as np
import argparse
parser = argparse.ArgumentParser(description='Calculate coverage over different window sizes for a list of bam files.')
parser.add_argument('npz_frequencies_file', help='An npz file containing co-segregation frequencies to convert to correlations')
args = parser.parse_args()
correlation_file = args.npz_frequencies_file.split('.')
correlation_file = correlation_file[0] + '.correlations.npz'
freqs = np.load(args.npz_frequencies_file)['freqs']
def flatten_freqs(freqs):
freqs_shape = freqs.shape
flat_shape = ( freqs_shape[0] * freqs_shape[1], freqs_shape[2], freqs_shape[3])
return freqs.reshape(flat_shape)
distances = np.array(map(corr, flatten_freqs(freqs))).reshape(freqs.shape[:2])
np.save_compressed(correlation_file, corr=distances)
|
from GamTools import corr
import numpy as np
import argparse
parser = argparse.ArgumentParser(description='Calculate coverage over different window sizes for a list of bam files.')
parser.add_argument('npz_frequencies_file', help='An npz file containing co-segregation frequencies to convert to correlations')
args = parser.parse_args()
correlation_file = args.npz_frequencies_file.split('.')
correlation_file[correlation_file.index('chrom')] = "corr"
correlation_file = '.'.join(correlation_file)
freqs = np.load(args.npz_frequencies_file)['freqs']
def flatten_freqs(freqs):
freqs_shape = freqs.shape
flat_shape = ( freqs_shape[0] * freqs_shape[1], freqs_shape[2], freqs_shape[3])
return freqs.reshape(flat_shape)
distances = np.array(map(corr, flatten_freqs(freqs))).reshape(freqs.shape[:2])
np.savez_compressed(correlation_file, corr=distances)
|
Change how/where to save the file
|
Change how/where to save the file
|
Python
|
apache-2.0
|
pombo-lab/gamtools,pombo-lab/gamtools
|
from GamTools import corr
import numpy as np
import argparse
parser = argparse.ArgumentParser(description='Calculate coverage over different window sizes for a list of bam files.')
parser.add_argument('npz_frequencies_file', help='An npz file containing co-segregation frequencies to convert to correlations')
args = parser.parse_args()
correlation_file = args.npz_frequencies_file.split('.')
- correlation_file = correlation_file[0] + '.correlations.npz'
+ correlation_file[correlation_file.index('chrom')] = "corr"
+ correlation_file = '.'.join(correlation_file)
freqs = np.load(args.npz_frequencies_file)['freqs']
def flatten_freqs(freqs):
freqs_shape = freqs.shape
flat_shape = ( freqs_shape[0] * freqs_shape[1], freqs_shape[2], freqs_shape[3])
return freqs.reshape(flat_shape)
distances = np.array(map(corr, flatten_freqs(freqs))).reshape(freqs.shape[:2])
- np.save_compressed(correlation_file, corr=distances)
+ np.savez_compressed(correlation_file, corr=distances)
|
Change how/where to save the file
|
## Code Before:
from GamTools import corr
import numpy as np
import argparse
parser = argparse.ArgumentParser(description='Calculate coverage over different window sizes for a list of bam files.')
parser.add_argument('npz_frequencies_file', help='An npz file containing co-segregation frequencies to convert to correlations')
args = parser.parse_args()
correlation_file = args.npz_frequencies_file.split('.')
correlation_file = correlation_file[0] + '.correlations.npz'
freqs = np.load(args.npz_frequencies_file)['freqs']
def flatten_freqs(freqs):
freqs_shape = freqs.shape
flat_shape = ( freqs_shape[0] * freqs_shape[1], freqs_shape[2], freqs_shape[3])
return freqs.reshape(flat_shape)
distances = np.array(map(corr, flatten_freqs(freqs))).reshape(freqs.shape[:2])
np.save_compressed(correlation_file, corr=distances)
## Instruction:
Change how/where to save the file
## Code After:
from GamTools import corr
import numpy as np
import argparse
parser = argparse.ArgumentParser(description='Calculate coverage over different window sizes for a list of bam files.')
parser.add_argument('npz_frequencies_file', help='An npz file containing co-segregation frequencies to convert to correlations')
args = parser.parse_args()
correlation_file = args.npz_frequencies_file.split('.')
correlation_file[correlation_file.index('chrom')] = "corr"
correlation_file = '.'.join(correlation_file)
freqs = np.load(args.npz_frequencies_file)['freqs']
def flatten_freqs(freqs):
freqs_shape = freqs.shape
flat_shape = ( freqs_shape[0] * freqs_shape[1], freqs_shape[2], freqs_shape[3])
return freqs.reshape(flat_shape)
distances = np.array(map(corr, flatten_freqs(freqs))).reshape(freqs.shape[:2])
np.savez_compressed(correlation_file, corr=distances)
|
...
args = parser.parse_args()
correlation_file = args.npz_frequencies_file.split('.')
correlation_file[correlation_file.index('chrom')] = "corr"
correlation_file = '.'.join(correlation_file)
freqs = np.load(args.npz_frequencies_file)['freqs']
...
distances = np.array(map(corr, flatten_freqs(freqs))).reshape(freqs.shape[:2])
np.savez_compressed(correlation_file, corr=distances)
...
|
9ff75ff858681665141650d4e1ef310265956f35
|
tools/workplace_status.py
|
tools/workplace_status.py
|
from __future__ import print_function
from subprocess import Popen, PIPE
dirty = Popen(["git", "diff-index", "--quiet", "HEAD"], stdout=PIPE).wait() != 0
commit_process = Popen(["git", "describe", "--tags", "--abbrev=0"], stdout=PIPE)
(version, err) = commit_process.communicate()
print("STABLE_GIT_VERSION %s%s" % (
version.decode("utf-8").replace("\n", ""),
"-dirty" if dirty else "")
)
|
from __future__ import print_function
from subprocess import Popen, PIPE
dirty = Popen(["git", "diff-index", "--quiet", "HEAD"], stdout=PIPE).wait() != 0
commit_process = Popen(["git", "describe", "--always", "--tags", "--abbrev=0"], stdout=PIPE)
(version, err) = commit_process.communicate()
print("STABLE_GIT_VERSION %s%s" % (
version.decode("utf-8").replace("\n", ""),
"-dirty" if dirty else "")
)
|
Make git describe --always return a value
|
Make git describe --always return a value
This means that the latest commit will be stamped if there are no tags.
|
Python
|
apache-2.0
|
bazelbuild/bazel-watcher,bazelbuild/bazel-watcher,bazelbuild/bazel-watcher,bazelbuild/bazel-watcher
|
from __future__ import print_function
from subprocess import Popen, PIPE
dirty = Popen(["git", "diff-index", "--quiet", "HEAD"], stdout=PIPE).wait() != 0
- commit_process = Popen(["git", "describe", "--tags", "--abbrev=0"], stdout=PIPE)
+ commit_process = Popen(["git", "describe", "--always", "--tags", "--abbrev=0"], stdout=PIPE)
(version, err) = commit_process.communicate()
print("STABLE_GIT_VERSION %s%s" % (
version.decode("utf-8").replace("\n", ""),
"-dirty" if dirty else "")
)
|
Make git describe --always return a value
|
## Code Before:
from __future__ import print_function
from subprocess import Popen, PIPE
dirty = Popen(["git", "diff-index", "--quiet", "HEAD"], stdout=PIPE).wait() != 0
commit_process = Popen(["git", "describe", "--tags", "--abbrev=0"], stdout=PIPE)
(version, err) = commit_process.communicate()
print("STABLE_GIT_VERSION %s%s" % (
version.decode("utf-8").replace("\n", ""),
"-dirty" if dirty else "")
)
## Instruction:
Make git describe --always return a value
## Code After:
from __future__ import print_function
from subprocess import Popen, PIPE
dirty = Popen(["git", "diff-index", "--quiet", "HEAD"], stdout=PIPE).wait() != 0
commit_process = Popen(["git", "describe", "--always", "--tags", "--abbrev=0"], stdout=PIPE)
(version, err) = commit_process.communicate()
print("STABLE_GIT_VERSION %s%s" % (
version.decode("utf-8").replace("\n", ""),
"-dirty" if dirty else "")
)
|
# ... existing code ...
dirty = Popen(["git", "diff-index", "--quiet", "HEAD"], stdout=PIPE).wait() != 0
commit_process = Popen(["git", "describe", "--always", "--tags", "--abbrev=0"], stdout=PIPE)
(version, err) = commit_process.communicate()
print("STABLE_GIT_VERSION %s%s" % (
# ... rest of the code ...
|
2f635e890414f777fbe3ddde1aea74ab13558313
|
llvmlite/tests/test_dylib.py
|
llvmlite/tests/test_dylib.py
|
import unittest
from . import TestCase
from llvmlite import binding as llvm
from llvmlite.binding import dylib
import platform
class TestDylib(TestCase):
def setUp(self):
llvm.initialize()
llvm.initialize_native_target()
llvm.initialize_native_asmprinter()
def test_bad_library(self):
with self.assertRaises(Exception) as context:
dylib.load_library_permanently("zzzasdkf;jasd;l")
system = platform.system()
if system == "Linux":
self.assertTrue('zzzasdkf;jasd;l: cannot open shared object file: No such file or directory'
in str(context.exception))
elif system == "Darwin":
self.assertTrue('dlopen(zzzasdkf;jasd;l, 9): image not found'
in str(context.exception))
|
from . import TestCase
from llvmlite import binding as llvm
from llvmlite.binding import dylib
import platform
from ctypes.util import find_library
import unittest
@unittest.skipUnless(platform.system() in {"Linux", "Darwin"}, "Unsupport test for current OS")
class TestDylib(TestCase):
def setUp(self):
llvm.initialize()
llvm.initialize_native_target()
llvm.initialize_native_asmprinter()
self.system = platform.system()
def test_bad_library(self):
with self.assertRaises(Exception) as context:
dylib.load_library_permanently("zzzasdkf;jasd;l")
if self.system == "Linux":
self.assertTrue('zzzasdkf;jasd;l: cannot open shared object file: No such file or directory'
in str(context.exception))
elif self.system == "Darwin":
self.assertTrue('dlopen(zzzasdkf;jasd;l, 9): image not found'
in str(context.exception))
def test_libm(self):
try:
if self.system == "Linux":
libm = find_library("m")
elif self.system == "Darwin":
libm = find_library("libm")
dylib.load_library_permanently(libm)
except Exception:
self.fail("Valid call to link library should not fail.")
|
Add tests to check loading library.
|
Add tests to check loading library.
|
Python
|
bsd-2-clause
|
m-labs/llvmlite,pitrou/llvmlite,ssarangi/llvmlite,m-labs/llvmlite,markdewing/llvmlite,pitrou/llvmlite,numba/llvmlite,markdewing/llvmlite,sklam/llvmlite,sklam/llvmlite,pitrou/llvmlite,numba/llvmlite,ssarangi/llvmlite,markdewing/llvmlite,squisher/llvmlite,ssarangi/llvmlite,m-labs/llvmlite,numba/llvmlite,numba/llvmlite,squisher/llvmlite,squisher/llvmlite,sklam/llvmlite,ssarangi/llvmlite,sklam/llvmlite,squisher/llvmlite,markdewing/llvmlite,m-labs/llvmlite,pitrou/llvmlite
|
- import unittest
from . import TestCase
from llvmlite import binding as llvm
from llvmlite.binding import dylib
import platform
+ from ctypes.util import find_library
+ import unittest
-
+ @unittest.skipUnless(platform.system() in {"Linux", "Darwin"}, "Unsupport test for current OS")
class TestDylib(TestCase):
-
def setUp(self):
llvm.initialize()
llvm.initialize_native_target()
llvm.initialize_native_asmprinter()
+ self.system = platform.system()
def test_bad_library(self):
with self.assertRaises(Exception) as context:
dylib.load_library_permanently("zzzasdkf;jasd;l")
- system = platform.system()
- if system == "Linux":
+ if self.system == "Linux":
self.assertTrue('zzzasdkf;jasd;l: cannot open shared object file: No such file or directory'
in str(context.exception))
- elif system == "Darwin":
+ elif self.system == "Darwin":
self.assertTrue('dlopen(zzzasdkf;jasd;l, 9): image not found'
in str(context.exception))
+ def test_libm(self):
+ try:
+ if self.system == "Linux":
+ libm = find_library("m")
+ elif self.system == "Darwin":
+ libm = find_library("libm")
+ dylib.load_library_permanently(libm)
+ except Exception:
+ self.fail("Valid call to link library should not fail.")
+
|
Add tests to check loading library.
|
## Code Before:
import unittest
from . import TestCase
from llvmlite import binding as llvm
from llvmlite.binding import dylib
import platform
class TestDylib(TestCase):
def setUp(self):
llvm.initialize()
llvm.initialize_native_target()
llvm.initialize_native_asmprinter()
def test_bad_library(self):
with self.assertRaises(Exception) as context:
dylib.load_library_permanently("zzzasdkf;jasd;l")
system = platform.system()
if system == "Linux":
self.assertTrue('zzzasdkf;jasd;l: cannot open shared object file: No such file or directory'
in str(context.exception))
elif system == "Darwin":
self.assertTrue('dlopen(zzzasdkf;jasd;l, 9): image not found'
in str(context.exception))
## Instruction:
Add tests to check loading library.
## Code After:
from . import TestCase
from llvmlite import binding as llvm
from llvmlite.binding import dylib
import platform
from ctypes.util import find_library
import unittest
@unittest.skipUnless(platform.system() in {"Linux", "Darwin"}, "Unsupport test for current OS")
class TestDylib(TestCase):
def setUp(self):
llvm.initialize()
llvm.initialize_native_target()
llvm.initialize_native_asmprinter()
self.system = platform.system()
def test_bad_library(self):
with self.assertRaises(Exception) as context:
dylib.load_library_permanently("zzzasdkf;jasd;l")
if self.system == "Linux":
self.assertTrue('zzzasdkf;jasd;l: cannot open shared object file: No such file or directory'
in str(context.exception))
elif self.system == "Darwin":
self.assertTrue('dlopen(zzzasdkf;jasd;l, 9): image not found'
in str(context.exception))
def test_libm(self):
try:
if self.system == "Linux":
libm = find_library("m")
elif self.system == "Darwin":
libm = find_library("libm")
dylib.load_library_permanently(libm)
except Exception:
self.fail("Valid call to link library should not fail.")
|
// ... existing code ...
from . import TestCase
from llvmlite import binding as llvm
from llvmlite.binding import dylib
import platform
from ctypes.util import find_library
import unittest
@unittest.skipUnless(platform.system() in {"Linux", "Darwin"}, "Unsupport test for current OS")
class TestDylib(TestCase):
def setUp(self):
llvm.initialize()
llvm.initialize_native_target()
llvm.initialize_native_asmprinter()
self.system = platform.system()
def test_bad_library(self):
with self.assertRaises(Exception) as context:
dylib.load_library_permanently("zzzasdkf;jasd;l")
if self.system == "Linux":
self.assertTrue('zzzasdkf;jasd;l: cannot open shared object file: No such file or directory'
in str(context.exception))
elif self.system == "Darwin":
self.assertTrue('dlopen(zzzasdkf;jasd;l, 9): image not found'
in str(context.exception))
def test_libm(self):
try:
if self.system == "Linux":
libm = find_library("m")
elif self.system == "Darwin":
libm = find_library("libm")
dylib.load_library_permanently(libm)
except Exception:
self.fail("Valid call to link library should not fail.")
// ... rest of the code ...
|
72064e373e6b13f5847199aeb8116ab1708523b2
|
astroquery/cadc/tests/setup_package.py
|
astroquery/cadc/tests/setup_package.py
|
from __future__ import absolute_import
import os
# setup paths to the test data
# can specify a single file or a list of files
def get_package_data():
paths = [os.path.join('data', '*.vot'),
os.path.join('data', '*.xml'),
os.path.join('data', '*.pem'),
] # etc, add other extensions
# you can also enlist files individually by names
# finally construct and return a dict for the sub module
return {'astroquery.cadc.tests': paths}
|
from __future__ import absolute_import
import os
# setup paths to the test data
# can specify a single file or a list of files
def get_package_data():
paths = [os.path.join('data', '*.vot'),
os.path.join('data', '*.xml'),
os.path.join('data', '*.pem'),
os.path.join('data', '*.fits'),
] # etc, add other extensions
# you can also enlist files individually by names
# finally construct and return a dict for the sub module
return {'astroquery.cadc.tests': paths}
|
Add fits file to package build
|
Add fits file to package build
|
Python
|
bsd-3-clause
|
imbasimba/astroquery,ceb8/astroquery,ceb8/astroquery,imbasimba/astroquery
|
from __future__ import absolute_import
import os
# setup paths to the test data
# can specify a single file or a list of files
def get_package_data():
paths = [os.path.join('data', '*.vot'),
os.path.join('data', '*.xml'),
os.path.join('data', '*.pem'),
+ os.path.join('data', '*.fits'),
] # etc, add other extensions
# you can also enlist files individually by names
# finally construct and return a dict for the sub module
return {'astroquery.cadc.tests': paths}
|
Add fits file to package build
|
## Code Before:
from __future__ import absolute_import
import os
# setup paths to the test data
# can specify a single file or a list of files
def get_package_data():
paths = [os.path.join('data', '*.vot'),
os.path.join('data', '*.xml'),
os.path.join('data', '*.pem'),
] # etc, add other extensions
# you can also enlist files individually by names
# finally construct and return a dict for the sub module
return {'astroquery.cadc.tests': paths}
## Instruction:
Add fits file to package build
## Code After:
from __future__ import absolute_import
import os
# setup paths to the test data
# can specify a single file or a list of files
def get_package_data():
paths = [os.path.join('data', '*.vot'),
os.path.join('data', '*.xml'),
os.path.join('data', '*.pem'),
os.path.join('data', '*.fits'),
] # etc, add other extensions
# you can also enlist files individually by names
# finally construct and return a dict for the sub module
return {'astroquery.cadc.tests': paths}
|
// ... existing code ...
paths = [os.path.join('data', '*.vot'),
os.path.join('data', '*.xml'),
os.path.join('data', '*.pem'),
os.path.join('data', '*.fits'),
] # etc, add other extensions
# you can also enlist files individually by names
# finally construct and return a dict for the sub module
// ... rest of the code ...
|
a3c49c490ffe103f759b935bae31c37c05d26e81
|
tests/settings.py
|
tests/settings.py
|
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'formtools',
'tests.wizard.wizardtests',
]
SECRET_KEY = 'spam-spam-spam-spam'
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'spam-and-eggs'
}
}
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
)
|
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': '/tmp/django-formtools-tests.db',
}
}
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'formtools',
'tests.wizard.wizardtests',
]
SECRET_KEY = 'spam-spam-spam-spam'
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'spam-and-eggs'
}
}
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
)
|
Use a filesystem db and add the sites app to fix a test failure.
|
Use a filesystem db and add the sites app to fix a test failure.
|
Python
|
bsd-3-clause
|
gchp/django-formtools,thenewguy/django-formtools,lastfm/django-formtools,barseghyanartur/django-formtools,thenewguy/django-formtools,barseghyanartur/django-formtools,gchp/django-formtools,lastfm/django-formtools
|
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
- 'NAME': ':memory:',
+ 'NAME': '/tmp/django-formtools-tests.db',
}
}
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
+ 'django.contrib.sites',
'formtools',
'tests.wizard.wizardtests',
]
SECRET_KEY = 'spam-spam-spam-spam'
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'spam-and-eggs'
}
}
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
)
|
Use a filesystem db and add the sites app to fix a test failure.
|
## Code Before:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'formtools',
'tests.wizard.wizardtests',
]
SECRET_KEY = 'spam-spam-spam-spam'
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'spam-and-eggs'
}
}
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
)
## Instruction:
Use a filesystem db and add the sites app to fix a test failure.
## Code After:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': '/tmp/django-formtools-tests.db',
}
}
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'formtools',
'tests.wizard.wizardtests',
]
SECRET_KEY = 'spam-spam-spam-spam'
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'spam-and-eggs'
}
}
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
)
|
...
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': '/tmp/django-formtools-tests.db',
}
}
...
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'formtools',
'tests.wizard.wizardtests',
]
...
|
94e7fb9821d904dba19fee1ca1d129259f33204e
|
skimage/draw/__init__.py
|
skimage/draw/__init__.py
|
from ._draw import line, polygon, ellipse, ellipse_perimeter, \
circle, circle_perimeter, set_color, bresenham
|
from ._draw import line, polygon, ellipse, ellipse_perimeter, \
circle, circle_perimeter, set_color, bresenham
__all__ = ['line',
'polygon',
'ellipse',
'ellipse_perimeter',
'circle',
'circle_perimeter',
'set_color',
'bresenham']
|
Add __all__ to draw package
|
Add __all__ to draw package
|
Python
|
bsd-3-clause
|
keflavich/scikit-image,michaelpacer/scikit-image,WarrenWeckesser/scikits-image,chintak/scikit-image,robintw/scikit-image,michaelaye/scikit-image,emon10005/scikit-image,chriscrosscutler/scikit-image,GaZ3ll3/scikit-image,michaelaye/scikit-image,emon10005/scikit-image,ajaybhat/scikit-image,bennlich/scikit-image,vighneshbirodkar/scikit-image,rjeli/scikit-image,Hiyorimi/scikit-image,SamHames/scikit-image,newville/scikit-image,juliusbierk/scikit-image,youprofit/scikit-image,keflavich/scikit-image,Midafi/scikit-image,ClinicalGraphics/scikit-image,pratapvardhan/scikit-image,newville/scikit-image,ofgulban/scikit-image,blink1073/scikit-image,WarrenWeckesser/scikits-image,oew1v07/scikit-image,chriscrosscutler/scikit-image,vighneshbirodkar/scikit-image,chintak/scikit-image,warmspringwinds/scikit-image,Britefury/scikit-image,vighneshbirodkar/scikit-image,almarklein/scikit-image,michaelpacer/scikit-image,rjeli/scikit-image,jwiggins/scikit-image,robintw/scikit-image,blink1073/scikit-image,chintak/scikit-image,ClinicalGraphics/scikit-image,warmspringwinds/scikit-image,ofgulban/scikit-image,chintak/scikit-image,ajaybhat/scikit-image,oew1v07/scikit-image,paalge/scikit-image,bennlich/scikit-image,dpshelio/scikit-image,SamHames/scikit-image,Midafi/scikit-image,paalge/scikit-image,rjeli/scikit-image,paalge/scikit-image,almarklein/scikit-image,SamHames/scikit-image,jwiggins/scikit-image,SamHames/scikit-image,youprofit/scikit-image,GaZ3ll3/scikit-image,bsipocz/scikit-image,almarklein/scikit-image,ofgulban/scikit-image,dpshelio/scikit-image,Hiyorimi/scikit-image,almarklein/scikit-image,juliusbierk/scikit-image,bsipocz/scikit-image,Britefury/scikit-image,pratapvardhan/scikit-image
|
from ._draw import line, polygon, ellipse, ellipse_perimeter, \
circle, circle_perimeter, set_color, bresenham
+ __all__ = ['line',
+ 'polygon',
+ 'ellipse',
+ 'ellipse_perimeter',
+ 'circle',
+ 'circle_perimeter',
+ 'set_color',
+ 'bresenham']
+
|
Add __all__ to draw package
|
## Code Before:
from ._draw import line, polygon, ellipse, ellipse_perimeter, \
circle, circle_perimeter, set_color, bresenham
## Instruction:
Add __all__ to draw package
## Code After:
from ._draw import line, polygon, ellipse, ellipse_perimeter, \
circle, circle_perimeter, set_color, bresenham
__all__ = ['line',
'polygon',
'ellipse',
'ellipse_perimeter',
'circle',
'circle_perimeter',
'set_color',
'bresenham']
|
// ... existing code ...
from ._draw import line, polygon, ellipse, ellipse_perimeter, \
circle, circle_perimeter, set_color, bresenham
__all__ = ['line',
'polygon',
'ellipse',
'ellipse_perimeter',
'circle',
'circle_perimeter',
'set_color',
'bresenham']
// ... rest of the code ...
|
a69e8d0d179f12fd42eadd85eca8e0c968d67c91
|
tests/runTests.py
|
tests/runTests.py
|
import os
import os.path
import configparser
import shutil
import subprocess
# Setup
print("Setting up...")
if os.path.isfile("../halite.ini"):
shutil.copyfile("../halite.ini", "temp.ini")
shutil.copyfile("tests.ini", "../halite.ini")
parser = configparser.ConfigParser()
parser.read("../halite.ini")
# Website tests
print("Beginning website backend tests")
os.system("mysql -u "+parser["database"]["username"]+" -p"+parser["database"]["password"]+" < ../website/sql/Database.sql")
subprocess.call(["phpunit", "--stderr", "website/"])
# Environment tests.
print(subprocess.Popen('cd environment; python3 testenv.py', stdout=subprocess.PIPE, shell = True).stdout.read().decode('utf-8'))
# Tear down
print("Almost done...")
if os.path.isfile("../temp.ini"):
shutil.copyfile("temp.ini", "../halite.ini")
|
import os
import os.path
import configparser
import shutil
import subprocess
# Setup
print("Setting up...")
if os.path.isfile("../halite.ini"):
shutil.copyfile("../halite.ini", "temp.ini")
shutil.copyfile("tests.ini", "../halite.ini")
parser = configparser.ConfigParser()
parser.read("../halite.ini")
# Website tests
print("Beginning website backend tests")
passwordField = "" if parser["database"]["password"] == "" else "-p"+parser["database"]["password"]
os.system("mysql -u "+parser["database"]["username"]+" "+passwordField+" < ../website/sql/Database.sql")
subprocess.call(["phpunit", "--stderr", "website/"])
# Environment tests.
print(subprocess.Popen('cd environment; python3 testenv.py', stdout=subprocess.PIPE, shell = True).stdout.read().decode('utf-8'))
# Tear down
print("Almost done...")
if os.path.isfile("../temp.ini"):
shutil.copyfile("temp.ini", "../halite.ini")
|
Make test runner work with blank mysql password
|
Make test runner work with blank mysql password
|
Python
|
mit
|
HaliteChallenge/Halite-II,yangle/HaliteIO,yangle/HaliteIO,HaliteChallenge/Halite,HaliteChallenge/Halite,yangle/HaliteIO,lanyudhy/Halite-II,HaliteChallenge/Halite-II,HaliteChallenge/Halite,yangle/HaliteIO,lanyudhy/Halite-II,HaliteChallenge/Halite-II,yangle/HaliteIO,lanyudhy/Halite-II,HaliteChallenge/Halite-II,HaliteChallenge/Halite,HaliteChallenge/Halite-II,lanyudhy/Halite-II,HaliteChallenge/Halite,lanyudhy/Halite-II,yangle/HaliteIO,lanyudhy/Halite-II,yangle/HaliteIO,HaliteChallenge/Halite-II,HaliteChallenge/Halite-II,HaliteChallenge/Halite,HaliteChallenge/Halite,lanyudhy/Halite-II,HaliteChallenge/Halite-II,HaliteChallenge/Halite,HaliteChallenge/Halite-II,yangle/HaliteIO,HaliteChallenge/Halite-II,yangle/HaliteIO,yangle/HaliteIO,yangle/HaliteIO,lanyudhy/Halite-II,HaliteChallenge/Halite,HaliteChallenge/Halite-II,HaliteChallenge/Halite,HaliteChallenge/Halite-II,yangle/HaliteIO,HaliteChallenge/Halite-II,HaliteChallenge/Halite-II,HaliteChallenge/Halite,HaliteChallenge/Halite-II,lanyudhy/Halite-II,HaliteChallenge/Halite,lanyudhy/Halite-II,lanyudhy/Halite-II,HaliteChallenge/Halite-II
|
import os
import os.path
import configparser
import shutil
import subprocess
# Setup
print("Setting up...")
if os.path.isfile("../halite.ini"):
shutil.copyfile("../halite.ini", "temp.ini")
shutil.copyfile("tests.ini", "../halite.ini")
parser = configparser.ConfigParser()
parser.read("../halite.ini")
# Website tests
print("Beginning website backend tests")
+ passwordField = "" if parser["database"]["password"] == "" else "-p"+parser["database"]["password"]
- os.system("mysql -u "+parser["database"]["username"]+" -p"+parser["database"]["password"]+" < ../website/sql/Database.sql")
+ os.system("mysql -u "+parser["database"]["username"]+" "+passwordField+" < ../website/sql/Database.sql")
subprocess.call(["phpunit", "--stderr", "website/"])
# Environment tests.
print(subprocess.Popen('cd environment; python3 testenv.py', stdout=subprocess.PIPE, shell = True).stdout.read().decode('utf-8'))
# Tear down
print("Almost done...")
if os.path.isfile("../temp.ini"):
shutil.copyfile("temp.ini", "../halite.ini")
|
Make test runner work with blank mysql password
|
## Code Before:
import os
import os.path
import configparser
import shutil
import subprocess
# Setup
print("Setting up...")
if os.path.isfile("../halite.ini"):
shutil.copyfile("../halite.ini", "temp.ini")
shutil.copyfile("tests.ini", "../halite.ini")
parser = configparser.ConfigParser()
parser.read("../halite.ini")
# Website tests
print("Beginning website backend tests")
os.system("mysql -u "+parser["database"]["username"]+" -p"+parser["database"]["password"]+" < ../website/sql/Database.sql")
subprocess.call(["phpunit", "--stderr", "website/"])
# Environment tests.
print(subprocess.Popen('cd environment; python3 testenv.py', stdout=subprocess.PIPE, shell = True).stdout.read().decode('utf-8'))
# Tear down
print("Almost done...")
if os.path.isfile("../temp.ini"):
shutil.copyfile("temp.ini", "../halite.ini")
## Instruction:
Make test runner work with blank mysql password
## Code After:
import os
import os.path
import configparser
import shutil
import subprocess
# Setup
print("Setting up...")
if os.path.isfile("../halite.ini"):
shutil.copyfile("../halite.ini", "temp.ini")
shutil.copyfile("tests.ini", "../halite.ini")
parser = configparser.ConfigParser()
parser.read("../halite.ini")
# Website tests
print("Beginning website backend tests")
passwordField = "" if parser["database"]["password"] == "" else "-p"+parser["database"]["password"]
os.system("mysql -u "+parser["database"]["username"]+" "+passwordField+" < ../website/sql/Database.sql")
subprocess.call(["phpunit", "--stderr", "website/"])
# Environment tests.
print(subprocess.Popen('cd environment; python3 testenv.py', stdout=subprocess.PIPE, shell = True).stdout.read().decode('utf-8'))
# Tear down
print("Almost done...")
if os.path.isfile("../temp.ini"):
shutil.copyfile("temp.ini", "../halite.ini")
|
// ... existing code ...
# Website tests
print("Beginning website backend tests")
passwordField = "" if parser["database"]["password"] == "" else "-p"+parser["database"]["password"]
os.system("mysql -u "+parser["database"]["username"]+" "+passwordField+" < ../website/sql/Database.sql")
subprocess.call(["phpunit", "--stderr", "website/"])
# Environment tests.
// ... rest of the code ...
|
d879c6338449cd0c2f3c9a84162b3de688a55105
|
webdiff/gitwebdiff.py
|
webdiff/gitwebdiff.py
|
'''This lets you run "git webdiff" instead of "git difftool".'''
import os
import subprocess
import sys
def any_nonflag_args(args):
"""Do any args not start with '-'? If so, this isn't a HEAD diff."""
return len([x for x in args if not x.startswith('-')]) > 0
def run():
if not any_nonflag_args(sys.argv[1:]):
# This tells webdiff that it was invoked as a simple "git webdiff", not
# "git webdiff <sha>". This allows special treatment (e.g. for
# staging diffhunks).
os.environ['WEBDIFF_FROM_HEAD'] = 'yes'
sys.exit(subprocess.call(
'git difftool -d -x webdiff'.split(' ') + sys.argv[1:]))
if __name__ == '__main__':
run()
|
'''This lets you run "git webdiff" instead of "git difftool".'''
import os
import subprocess
import sys
def any_nonflag_args(args):
"""Do any args not start with '-'? If so, this isn't a HEAD diff."""
return len([x for x in args if not x.startswith('-')]) > 0
def run():
if not any_nonflag_args(sys.argv[1:]):
# This tells webdiff that it was invoked as a simple "git webdiff", not
# "git webdiff <sha>". This allows special treatment (e.g. for
# staging diffhunks).
os.environ['WEBDIFF_FROM_HEAD'] = 'yes'
try:
subprocess.call('git difftool -d -x webdiff'.split(' ') + sys.argv[1:])
except KeyboardInterrupt:
# Don't raise an exception to the user when sigint is received
pass
if __name__ == '__main__':
run()
|
Exit cleanly from 'git webdiff'
|
Exit cleanly from 'git webdiff'
- Don't allow a KeyboardInterrupt/sigint exception propagate up
to the user when exiting webdiff with Ctrl-C
|
Python
|
apache-2.0
|
daytonb/webdiff,danvk/webdiff,daytonb/webdiff,daytonb/webdiff,danvk/webdiff,danvk/webdiff,danvk/webdiff,daytonb/webdiff,danvk/webdiff
|
'''This lets you run "git webdiff" instead of "git difftool".'''
import os
import subprocess
import sys
def any_nonflag_args(args):
"""Do any args not start with '-'? If so, this isn't a HEAD diff."""
return len([x for x in args if not x.startswith('-')]) > 0
def run():
if not any_nonflag_args(sys.argv[1:]):
# This tells webdiff that it was invoked as a simple "git webdiff", not
# "git webdiff <sha>". This allows special treatment (e.g. for
# staging diffhunks).
os.environ['WEBDIFF_FROM_HEAD'] = 'yes'
-
- sys.exit(subprocess.call(
+
+ try:
- 'git difftool -d -x webdiff'.split(' ') + sys.argv[1:]))
+ subprocess.call('git difftool -d -x webdiff'.split(' ') + sys.argv[1:])
+ except KeyboardInterrupt:
+ # Don't raise an exception to the user when sigint is received
+ pass
if __name__ == '__main__':
run()
|
Exit cleanly from 'git webdiff'
|
## Code Before:
'''This lets you run "git webdiff" instead of "git difftool".'''
import os
import subprocess
import sys
def any_nonflag_args(args):
"""Do any args not start with '-'? If so, this isn't a HEAD diff."""
return len([x for x in args if not x.startswith('-')]) > 0
def run():
if not any_nonflag_args(sys.argv[1:]):
# This tells webdiff that it was invoked as a simple "git webdiff", not
# "git webdiff <sha>". This allows special treatment (e.g. for
# staging diffhunks).
os.environ['WEBDIFF_FROM_HEAD'] = 'yes'
sys.exit(subprocess.call(
'git difftool -d -x webdiff'.split(' ') + sys.argv[1:]))
if __name__ == '__main__':
run()
## Instruction:
Exit cleanly from 'git webdiff'
## Code After:
'''This lets you run "git webdiff" instead of "git difftool".'''
import os
import subprocess
import sys
def any_nonflag_args(args):
"""Do any args not start with '-'? If so, this isn't a HEAD diff."""
return len([x for x in args if not x.startswith('-')]) > 0
def run():
if not any_nonflag_args(sys.argv[1:]):
# This tells webdiff that it was invoked as a simple "git webdiff", not
# "git webdiff <sha>". This allows special treatment (e.g. for
# staging diffhunks).
os.environ['WEBDIFF_FROM_HEAD'] = 'yes'
try:
subprocess.call('git difftool -d -x webdiff'.split(' ') + sys.argv[1:])
except KeyboardInterrupt:
# Don't raise an exception to the user when sigint is received
pass
if __name__ == '__main__':
run()
|
...
# "git webdiff <sha>". This allows special treatment (e.g. for
# staging diffhunks).
os.environ['WEBDIFF_FROM_HEAD'] = 'yes'
try:
subprocess.call('git difftool -d -x webdiff'.split(' ') + sys.argv[1:])
except KeyboardInterrupt:
# Don't raise an exception to the user when sigint is received
pass
if __name__ == '__main__':
...
|
92631d96a9acac10e8af98bbaa5ec2afee1ae12f
|
openrcv/main.py
|
openrcv/main.py
|
import sys
from openrcv import models
from openrcv.models import BallotList
from openrcv.parsing import BLTParser
from openrcv.utils import FILE_ENCODING
def do_parse(ballots_path, encoding=None):
if encoding is None:
encoding = FILE_ENCODING
ballots = models.random_ballot_list(range(6), 5)
#print(repr(ballots.ballots))
parser = BLTParser()
info = parser.parse_path(ballots_path)
print(repr(info))
|
import sys
from openrcv import models
from openrcv.models import BallotList
from openrcv.parsing import BLTParser
from openrcv.utils import FILE_ENCODING
def make_json_tests():
contests = []
for count in range(3, 6):
contest = models.random_contest(count)
contests.append(contest)
contests_obj = [c.__jsobj__() for c in contests]
tests_jobj = {
"_version": "0.1.0-alpha",
"contests": contests_obj
}
json = models.to_json(tests_jobj)
print(json)
def do_parse(ballots_path, encoding=None):
if encoding is None:
encoding = FILE_ENCODING
parser = BLTParser()
info = parser.parse_path(ballots_path)
print(repr(info))
|
Add code for generating test files.
|
Add code for generating test files.
|
Python
|
mit
|
cjerdonek/open-rcv,cjerdonek/open-rcv
|
import sys
from openrcv import models
from openrcv.models import BallotList
from openrcv.parsing import BLTParser
from openrcv.utils import FILE_ENCODING
+ def make_json_tests():
+ contests = []
+ for count in range(3, 6):
+ contest = models.random_contest(count)
+ contests.append(contest)
+
+ contests_obj = [c.__jsobj__() for c in contests]
+
+ tests_jobj = {
+ "_version": "0.1.0-alpha",
+ "contests": contests_obj
+ }
+ json = models.to_json(tests_jobj)
+
+ print(json)
+
def do_parse(ballots_path, encoding=None):
if encoding is None:
encoding = FILE_ENCODING
-
- ballots = models.random_ballot_list(range(6), 5)
- #print(repr(ballots.ballots))
parser = BLTParser()
info = parser.parse_path(ballots_path)
print(repr(info))
|
Add code for generating test files.
|
## Code Before:
import sys
from openrcv import models
from openrcv.models import BallotList
from openrcv.parsing import BLTParser
from openrcv.utils import FILE_ENCODING
def do_parse(ballots_path, encoding=None):
if encoding is None:
encoding = FILE_ENCODING
ballots = models.random_ballot_list(range(6), 5)
#print(repr(ballots.ballots))
parser = BLTParser()
info = parser.parse_path(ballots_path)
print(repr(info))
## Instruction:
Add code for generating test files.
## Code After:
import sys
from openrcv import models
from openrcv.models import BallotList
from openrcv.parsing import BLTParser
from openrcv.utils import FILE_ENCODING
def make_json_tests():
contests = []
for count in range(3, 6):
contest = models.random_contest(count)
contests.append(contest)
contests_obj = [c.__jsobj__() for c in contests]
tests_jobj = {
"_version": "0.1.0-alpha",
"contests": contests_obj
}
json = models.to_json(tests_jobj)
print(json)
def do_parse(ballots_path, encoding=None):
if encoding is None:
encoding = FILE_ENCODING
parser = BLTParser()
info = parser.parse_path(ballots_path)
print(repr(info))
|
// ... existing code ...
from openrcv.utils import FILE_ENCODING
def make_json_tests():
contests = []
for count in range(3, 6):
contest = models.random_contest(count)
contests.append(contest)
contests_obj = [c.__jsobj__() for c in contests]
tests_jobj = {
"_version": "0.1.0-alpha",
"contests": contests_obj
}
json = models.to_json(tests_jobj)
print(json)
def do_parse(ballots_path, encoding=None):
if encoding is None:
encoding = FILE_ENCODING
parser = BLTParser()
info = parser.parse_path(ballots_path)
print(repr(info))
// ... rest of the code ...
|
54d5a984aeecd9bad501ec484c173f2dc504dfa5
|
dict.py
|
dict.py
|
import os
import sys
import json
import urllib
import datetime
import subprocess
# api key, 1000 times per hour
APIKEY = 'WGCxN9fzvCxPo0nqlzGLCPUc'
PATH = '~/vocabulary' # make sure the path exist
FILENAME = os.path.join(os.path.expanduser(PATH), str(datetime.date.today()) + '.txt')
def main():
word = subprocess.check_output('xsel')
params = urllib.urlencode({'from': 'auto', 'to': 'auto', 'client_id':APIKEY, 'q': word})
f = urllib.urlopen("http://openapi.baidu.com/public/2.0/bmt/translate?%s", params)
j = json.loads(f.read())
d = dict(j['trans_result'][0])
subprocess.call(['notify-send', word, d['dst']])
with open(FILENAME, 'a+', 0) as f:
f.write(word + '\n')
if __name__ == '__main__':
main()
|
import os
import sys
import json
import urllib
import datetime
import subprocess
import random
import md5
# api key, six million per month
APPID = 'You baidu translate appid'
APIKEY = 'You baidu translate apikey'
PATH = '~/vocabulary' # make sure the path exist
FILENAME = os.path.join(os.path.expanduser(PATH), str(datetime.date.today()) + '.txt')
def main():
word = subprocess.check_output('xsel')
salt = random.randint(32768, 65536)
sign = APPID + word + str(salt) + APIKEY
m1 = md5.new()
m1.update(sign)
sign = m1.hexdigest()
params = urllib.urlencode({'q': word, 'from': 'auto', 'to': 'zh', 'appid':APPID, 'salt': salt, 'sign': sign})
f = urllib.urlopen("http://api.fanyi.baidu.com/api/trans/vip/translate?%s", params)
j = json.loads(f.read())
d = dict(j['trans_result'][0])
subprocess.call(['notify-send', word, d['dst']])
with open(FILENAME, 'a+', 0) as f:
f.write(word + '\n')
if __name__ == '__main__':
main()
|
Migrate to new translate api
|
Migrate to new translate api
|
Python
|
mit
|
pidofme/T4LE
|
import os
import sys
import json
import urllib
import datetime
import subprocess
+ import random
+ import md5
- # api key, 1000 times per hour
- APIKEY = 'WGCxN9fzvCxPo0nqlzGLCPUc'
+ # api key, six million per month
+ APPID = 'You baidu translate appid'
+ APIKEY = 'You baidu translate apikey'
PATH = '~/vocabulary' # make sure the path exist
FILENAME = os.path.join(os.path.expanduser(PATH), str(datetime.date.today()) + '.txt')
def main():
word = subprocess.check_output('xsel')
- params = urllib.urlencode({'from': 'auto', 'to': 'auto', 'client_id':APIKEY, 'q': word})
+ salt = random.randint(32768, 65536)
+ sign = APPID + word + str(salt) + APIKEY
+ m1 = md5.new()
+ m1.update(sign)
+ sign = m1.hexdigest()
+ params = urllib.urlencode({'q': word, 'from': 'auto', 'to': 'zh', 'appid':APPID, 'salt': salt, 'sign': sign})
- f = urllib.urlopen("http://openapi.baidu.com/public/2.0/bmt/translate?%s", params)
+ f = urllib.urlopen("http://api.fanyi.baidu.com/api/trans/vip/translate?%s", params)
j = json.loads(f.read())
d = dict(j['trans_result'][0])
subprocess.call(['notify-send', word, d['dst']])
with open(FILENAME, 'a+', 0) as f:
f.write(word + '\n')
if __name__ == '__main__':
main()
|
Migrate to new translate api
|
## Code Before:
import os
import sys
import json
import urllib
import datetime
import subprocess
# api key, 1000 times per hour
APIKEY = 'WGCxN9fzvCxPo0nqlzGLCPUc'
PATH = '~/vocabulary' # make sure the path exist
FILENAME = os.path.join(os.path.expanduser(PATH), str(datetime.date.today()) + '.txt')
def main():
word = subprocess.check_output('xsel')
params = urllib.urlencode({'from': 'auto', 'to': 'auto', 'client_id':APIKEY, 'q': word})
f = urllib.urlopen("http://openapi.baidu.com/public/2.0/bmt/translate?%s", params)
j = json.loads(f.read())
d = dict(j['trans_result'][0])
subprocess.call(['notify-send', word, d['dst']])
with open(FILENAME, 'a+', 0) as f:
f.write(word + '\n')
if __name__ == '__main__':
main()
## Instruction:
Migrate to new translate api
## Code After:
import os
import sys
import json
import urllib
import datetime
import subprocess
import random
import md5
# api key, six million per month
APPID = 'You baidu translate appid'
APIKEY = 'You baidu translate apikey'
PATH = '~/vocabulary' # make sure the path exist
FILENAME = os.path.join(os.path.expanduser(PATH), str(datetime.date.today()) + '.txt')
def main():
word = subprocess.check_output('xsel')
salt = random.randint(32768, 65536)
sign = APPID + word + str(salt) + APIKEY
m1 = md5.new()
m1.update(sign)
sign = m1.hexdigest()
params = urllib.urlencode({'q': word, 'from': 'auto', 'to': 'zh', 'appid':APPID, 'salt': salt, 'sign': sign})
f = urllib.urlopen("http://api.fanyi.baidu.com/api/trans/vip/translate?%s", params)
j = json.loads(f.read())
d = dict(j['trans_result'][0])
subprocess.call(['notify-send', word, d['dst']])
with open(FILENAME, 'a+', 0) as f:
f.write(word + '\n')
if __name__ == '__main__':
main()
|
...
import urllib
import datetime
import subprocess
import random
import md5
# api key, six million per month
APPID = 'You baidu translate appid'
APIKEY = 'You baidu translate apikey'
PATH = '~/vocabulary' # make sure the path exist
FILENAME = os.path.join(os.path.expanduser(PATH), str(datetime.date.today()) + '.txt')
def main():
word = subprocess.check_output('xsel')
salt = random.randint(32768, 65536)
sign = APPID + word + str(salt) + APIKEY
m1 = md5.new()
m1.update(sign)
sign = m1.hexdigest()
params = urllib.urlencode({'q': word, 'from': 'auto', 'to': 'zh', 'appid':APPID, 'salt': salt, 'sign': sign})
f = urllib.urlopen("http://api.fanyi.baidu.com/api/trans/vip/translate?%s", params)
j = json.loads(f.read())
d = dict(j['trans_result'][0])
subprocess.call(['notify-send', word, d['dst']])
...
|
bdc554d18dc67cd4979bac3bc5d4b7d01b23b8b4
|
grako/rendering.py
|
grako/rendering.py
|
from __future__ import print_function, division, absolute_import, unicode_literals
import itertools
from .util import trim
def render(item, **fields):
""" Render the given item
"""
if item is None:
return ''
elif isinstance(item, Renderer):
return item.render(**fields)
elif isinstance(item, list):
return ''.join(render(e) for e in item)
else:
return str(item)
class Renderer(object):
template = ''
_counter = itertools.count()
def __init__(self, template=None):
if template is not None:
self.template = template
def counter(self):
return next(self._counter)
def render_fields(self, fields):
pass
def render(self, template=None, **fields):
fields.update({k:v for k, v in vars(self).items() if not k.startswith('_')})
self.render_fields(fields)
if template is None:
template = self.template
fields = {k:render(v) for k, v in fields.items()}
try:
return trim(template).format(**fields)
except KeyError as e:
raise KeyError(str(e), type(self))
|
from __future__ import print_function, division, absolute_import, unicode_literals
import itertools
from .util import trim
def render(item, **fields):
""" Render the given item
"""
if item is None:
return ''
elif isinstance(item, Renderer):
return item.render(**fields)
elif isinstance(item, list):
return ''.join(render(e) for e in item)
else:
return str(item)
class Renderer(object):
template = ''
_counter = itertools.count()
def __init__(self, template=None):
if template is not None:
self.template = template
def counter(self):
return next(self._counter)
def render_fields(self, fields):
pass
def render(self, template=None, **kwargs):
fields = ({k:v for k, v in vars(self).items() if not k.startswith('_')})
override = self.render_fields(fields)
if template is None:
if override is not None:
template = override
else:
template = self.template
fields.update(kwargs)
fields = {k:render(v) for k, v in fields.items()}
try:
return trim(template).format(**fields)
except KeyError as e:
raise KeyError(str(e), type(self))
|
Allow override of template through return value of render_fields.
|
Allow override of template through return value of render_fields.
|
Python
|
bsd-2-clause
|
swayf/grako,swayf/grako
|
from __future__ import print_function, division, absolute_import, unicode_literals
import itertools
from .util import trim
def render(item, **fields):
""" Render the given item
"""
if item is None:
return ''
elif isinstance(item, Renderer):
return item.render(**fields)
elif isinstance(item, list):
return ''.join(render(e) for e in item)
else:
return str(item)
class Renderer(object):
template = ''
_counter = itertools.count()
def __init__(self, template=None):
if template is not None:
self.template = template
def counter(self):
return next(self._counter)
def render_fields(self, fields):
pass
- def render(self, template=None, **fields):
+ def render(self, template=None, **kwargs):
- fields.update({k:v for k, v in vars(self).items() if not k.startswith('_')})
+ fields = ({k:v for k, v in vars(self).items() if not k.startswith('_')})
+
- self.render_fields(fields)
+ override = self.render_fields(fields)
if template is None:
+ if override is not None:
+ template = override
+ else:
- template = self.template
+ template = self.template
+
+ fields.update(kwargs)
fields = {k:render(v) for k, v in fields.items()}
try:
return trim(template).format(**fields)
except KeyError as e:
raise KeyError(str(e), type(self))
|
Allow override of template through return value of render_fields.
|
## Code Before:
from __future__ import print_function, division, absolute_import, unicode_literals
import itertools
from .util import trim
def render(item, **fields):
""" Render the given item
"""
if item is None:
return ''
elif isinstance(item, Renderer):
return item.render(**fields)
elif isinstance(item, list):
return ''.join(render(e) for e in item)
else:
return str(item)
class Renderer(object):
template = ''
_counter = itertools.count()
def __init__(self, template=None):
if template is not None:
self.template = template
def counter(self):
return next(self._counter)
def render_fields(self, fields):
pass
def render(self, template=None, **fields):
fields.update({k:v for k, v in vars(self).items() if not k.startswith('_')})
self.render_fields(fields)
if template is None:
template = self.template
fields = {k:render(v) for k, v in fields.items()}
try:
return trim(template).format(**fields)
except KeyError as e:
raise KeyError(str(e), type(self))
## Instruction:
Allow override of template through return value of render_fields.
## Code After:
from __future__ import print_function, division, absolute_import, unicode_literals
import itertools
from .util import trim
def render(item, **fields):
""" Render the given item
"""
if item is None:
return ''
elif isinstance(item, Renderer):
return item.render(**fields)
elif isinstance(item, list):
return ''.join(render(e) for e in item)
else:
return str(item)
class Renderer(object):
template = ''
_counter = itertools.count()
def __init__(self, template=None):
if template is not None:
self.template = template
def counter(self):
return next(self._counter)
def render_fields(self, fields):
pass
def render(self, template=None, **kwargs):
fields = ({k:v for k, v in vars(self).items() if not k.startswith('_')})
override = self.render_fields(fields)
if template is None:
if override is not None:
template = override
else:
template = self.template
fields.update(kwargs)
fields = {k:render(v) for k, v in fields.items()}
try:
return trim(template).format(**fields)
except KeyError as e:
raise KeyError(str(e), type(self))
|
...
def render_fields(self, fields):
pass
def render(self, template=None, **kwargs):
fields = ({k:v for k, v in vars(self).items() if not k.startswith('_')})
override = self.render_fields(fields)
if template is None:
if override is not None:
template = override
else:
template = self.template
fields.update(kwargs)
fields = {k:render(v) for k, v in fields.items()}
try:
return trim(template).format(**fields)
...
|
17e26fa55e70de657d52e340cb6b66691310a663
|
bettertexts/forms.py
|
bettertexts/forms.py
|
from django_comments.forms import CommentForm
from django import forms
from django.utils.translation import ugettext_lazy as _
from bettertexts.models import TextComment
class TextCommentForm(CommentForm):
def __init__(self, *args, **kwargs):
super(TextCommentForm, self).__init__(*args, **kwargs)
self.fields['name'].label = _("Name")
self.fields['name'].required = True
self.fields['email'].label = _("Email address")
self.fields['email'].required = True
self.fields['comment'].label = _('Comment')
self.fields['comment'].required = True
self.fields['url'].widget = forms.HiddenInput()
inform = forms.BooleanField(required=False,
label=_('Keep me informed'),
widget=forms.CheckboxInput)
involved = forms.BooleanField(required=False,
label=_('Keep me involved'),
widget=forms.CheckboxInput)
class Meta:
fields = ['name', 'email', 'inform', 'comment']
def get_comment_model(self):
"""
override to provide a custom comment model.
"""
return TextComment
def get_comment_create_data(self):
"""
Override to add inform field
"""
data = super(TextCommentForm, self).get_comment_create_data()
data.update({'inform': True})
return data
|
from django_comments.forms import CommentForm
from django import forms
from django.utils.translation import ugettext_lazy as _
from bettertexts.models import TextComment
class TextCommentForm(CommentForm):
def __init__(self, *args, **kwargs):
super(TextCommentForm, self).__init__(*args, **kwargs)
self.fields['name'].label = _("Name")
self.fields['name'].required = True
self.fields['email'].label = _("Email address")
self.fields['email'].required = True
self.fields['comment'].label = _('Comment')
self.fields['comment'].required = True
self.fields['url'].widget = forms.HiddenInput()
inform = forms.BooleanField(required=False,
label=_('Keep me informed'),
widget=forms.CheckboxInput)
involved = forms.BooleanField(required=False,
label=_('Keep me involved'),
widget=forms.CheckboxInput)
class Meta:
fields = ['name', 'email', 'inform', 'comment']
def get_comment_model(self):
"""
override to provide a custom comment model.
"""
return TextComment
def get_comment_create_data(self):
"""
Override to add inform and involved field
"""
data = super(TextCommentForm, self).get_comment_create_data()
data.update({'inform': self.cleaned_data["inform"],
'involved': self.cleaned_data["involved"]})
return data
|
Fix checkboxes inform and involved
|
CL011: Fix checkboxes inform and involved
|
Python
|
mit
|
citizenline/citizenline,citizenline/citizenline,citizenline/citizenline,citizenline/citizenline
|
from django_comments.forms import CommentForm
from django import forms
from django.utils.translation import ugettext_lazy as _
from bettertexts.models import TextComment
class TextCommentForm(CommentForm):
def __init__(self, *args, **kwargs):
super(TextCommentForm, self).__init__(*args, **kwargs)
self.fields['name'].label = _("Name")
self.fields['name'].required = True
self.fields['email'].label = _("Email address")
self.fields['email'].required = True
self.fields['comment'].label = _('Comment')
self.fields['comment'].required = True
self.fields['url'].widget = forms.HiddenInput()
inform = forms.BooleanField(required=False,
label=_('Keep me informed'),
widget=forms.CheckboxInput)
involved = forms.BooleanField(required=False,
label=_('Keep me involved'),
widget=forms.CheckboxInput)
class Meta:
fields = ['name', 'email', 'inform', 'comment']
def get_comment_model(self):
"""
override to provide a custom comment model.
"""
return TextComment
def get_comment_create_data(self):
"""
- Override to add inform field
+ Override to add inform and involved field
"""
data = super(TextCommentForm, self).get_comment_create_data()
- data.update({'inform': True})
+ data.update({'inform': self.cleaned_data["inform"],
+ 'involved': self.cleaned_data["involved"]})
return data
|
Fix checkboxes inform and involved
|
## Code Before:
from django_comments.forms import CommentForm
from django import forms
from django.utils.translation import ugettext_lazy as _
from bettertexts.models import TextComment
class TextCommentForm(CommentForm):
def __init__(self, *args, **kwargs):
super(TextCommentForm, self).__init__(*args, **kwargs)
self.fields['name'].label = _("Name")
self.fields['name'].required = True
self.fields['email'].label = _("Email address")
self.fields['email'].required = True
self.fields['comment'].label = _('Comment')
self.fields['comment'].required = True
self.fields['url'].widget = forms.HiddenInput()
inform = forms.BooleanField(required=False,
label=_('Keep me informed'),
widget=forms.CheckboxInput)
involved = forms.BooleanField(required=False,
label=_('Keep me involved'),
widget=forms.CheckboxInput)
class Meta:
fields = ['name', 'email', 'inform', 'comment']
def get_comment_model(self):
"""
override to provide a custom comment model.
"""
return TextComment
def get_comment_create_data(self):
"""
Override to add inform field
"""
data = super(TextCommentForm, self).get_comment_create_data()
data.update({'inform': True})
return data
## Instruction:
Fix checkboxes inform and involved
## Code After:
from django_comments.forms import CommentForm
from django import forms
from django.utils.translation import ugettext_lazy as _
from bettertexts.models import TextComment
class TextCommentForm(CommentForm):
def __init__(self, *args, **kwargs):
super(TextCommentForm, self).__init__(*args, **kwargs)
self.fields['name'].label = _("Name")
self.fields['name'].required = True
self.fields['email'].label = _("Email address")
self.fields['email'].required = True
self.fields['comment'].label = _('Comment')
self.fields['comment'].required = True
self.fields['url'].widget = forms.HiddenInput()
inform = forms.BooleanField(required=False,
label=_('Keep me informed'),
widget=forms.CheckboxInput)
involved = forms.BooleanField(required=False,
label=_('Keep me involved'),
widget=forms.CheckboxInput)
class Meta:
fields = ['name', 'email', 'inform', 'comment']
def get_comment_model(self):
"""
override to provide a custom comment model.
"""
return TextComment
def get_comment_create_data(self):
"""
Override to add inform and involved field
"""
data = super(TextCommentForm, self).get_comment_create_data()
data.update({'inform': self.cleaned_data["inform"],
'involved': self.cleaned_data["involved"]})
return data
|
// ... existing code ...
def get_comment_create_data(self):
"""
Override to add inform and involved field
"""
data = super(TextCommentForm, self).get_comment_create_data()
data.update({'inform': self.cleaned_data["inform"],
'involved': self.cleaned_data["involved"]})
return data
// ... rest of the code ...
|
bab2056ea9e47eec1d0bd15f86ff9cf08f8dbafc
|
tweets/tasks.py
|
tweets/tasks.py
|
from datetime import timedelta
from django.conf import settings
from django.core.management import call_command
from celery.task import periodic_task
@periodic_task(run_every=timedelta(minutes=settings.TWEET_INTERVAL_MINUTES))
def tweet_next_task():
call_command('tweet_next', verbose=True, interactive=False)
|
from django.core.management import call_command
from celery import shared_task
@shared_task()
def tweet_next_task():
call_command('tweet_next', verbose=True, interactive=False)
|
Use shared_task instead of deprecated periodic_task
|
Use shared_task instead of deprecated periodic_task
|
Python
|
mit
|
gwhigs/tweeter,gwhigs/tweeter,gwhigs/tweeter,gwhigs/tweeter
|
- from datetime import timedelta
-
- from django.conf import settings
from django.core.management import call_command
- from celery.task import periodic_task
+ from celery import shared_task
- @periodic_task(run_every=timedelta(minutes=settings.TWEET_INTERVAL_MINUTES))
+ @shared_task()
def tweet_next_task():
call_command('tweet_next', verbose=True, interactive=False)
|
Use shared_task instead of deprecated periodic_task
|
## Code Before:
from datetime import timedelta
from django.conf import settings
from django.core.management import call_command
from celery.task import periodic_task
@periodic_task(run_every=timedelta(minutes=settings.TWEET_INTERVAL_MINUTES))
def tweet_next_task():
call_command('tweet_next', verbose=True, interactive=False)
## Instruction:
Use shared_task instead of deprecated periodic_task
## Code After:
from django.core.management import call_command
from celery import shared_task
@shared_task()
def tweet_next_task():
call_command('tweet_next', verbose=True, interactive=False)
|
# ... existing code ...
from django.core.management import call_command
from celery import shared_task
@shared_task()
def tweet_next_task():
call_command('tweet_next', verbose=True, interactive=False)
# ... rest of the code ...
|
94caedce74bad7a1e4a2002dd725a220a8fc8a8e
|
django_prometheus/migrations.py
|
django_prometheus/migrations.py
|
from django.db import connections
from django.db.migrations.executor import MigrationExecutor
from prometheus_client import Gauge
unapplied_migrations = Gauge(
'django_migrations_unapplied_total',
'Count of unapplied migrations by database connection',
['connection'])
applied_migrations = Gauge(
'django_migrations_applied_total',
'Count of applied migrations by database connection',
['connection'])
def ExportMigrationsForDatabase(alias, executor):
plan = executor.migration_plan(executor.loader.graph.leaf_nodes())
unapplied_migrations.labels(alias).set(len(plan))
applied_migrations.labels(alias).set(len(
executor.loader.applied_migrations))
def ExportMigrations():
"""Exports counts of unapplied migrations.
This is meant to be called during app startup, ideally by
django_prometheus.apps.AppConfig.
"""
return
if 'default' in connections and connections['default']['ENGINE'] == 'django.db.backends.dummy':
# This is the case where DATABASES = {} in the configuration,
# i.e. the user is not using any databases. Django "helpfully"
# adds a dummy database and then throws when you try to
# actually use it. So we don't do anything, because trying to
# export stats would crash the app on startup.
return
for alias in connections.databases:
executor = MigrationExecutor(connections[alias])
ExportMigrationsForDatabase(alias, executor)
|
from django.db import connections
from django.db.migrations.executor import MigrationExecutor
from prometheus_client import Gauge
unapplied_migrations = Gauge(
'django_migrations_unapplied_total',
'Count of unapplied migrations by database connection',
['connection'])
applied_migrations = Gauge(
'django_migrations_applied_total',
'Count of applied migrations by database connection',
['connection'])
def ExportMigrationsForDatabase(alias, executor):
plan = executor.migration_plan(executor.loader.graph.leaf_nodes())
unapplied_migrations.labels(alias).set(len(plan))
applied_migrations.labels(alias).set(len(
executor.loader.applied_migrations))
def ExportMigrations():
"""Exports counts of unapplied migrations.
This is meant to be called during app startup, ideally by
django_prometheus.apps.AppConfig.
"""
return
if 'default' in connections and (
connections['default']['ENGINE'] == 'django.db.backends.dummy'):
# This is the case where DATABASES = {} in the configuration,
# i.e. the user is not using any databases. Django "helpfully"
# adds a dummy database and then throws when you try to
# actually use it. So we don't do anything, because trying to
# export stats would crash the app on startup.
return
for alias in connections.databases:
executor = MigrationExecutor(connections[alias])
ExportMigrationsForDatabase(alias, executor)
|
Fix pep8 violation in 29e3a0c.
|
Fix pep8 violation in 29e3a0c.
|
Python
|
apache-2.0
|
korfuri/django-prometheus,obytes/django-prometheus,obytes/django-prometheus,korfuri/django-prometheus
|
from django.db import connections
from django.db.migrations.executor import MigrationExecutor
from prometheus_client import Gauge
unapplied_migrations = Gauge(
'django_migrations_unapplied_total',
'Count of unapplied migrations by database connection',
['connection'])
applied_migrations = Gauge(
'django_migrations_applied_total',
'Count of applied migrations by database connection',
['connection'])
def ExportMigrationsForDatabase(alias, executor):
plan = executor.migration_plan(executor.loader.graph.leaf_nodes())
unapplied_migrations.labels(alias).set(len(plan))
applied_migrations.labels(alias).set(len(
executor.loader.applied_migrations))
def ExportMigrations():
"""Exports counts of unapplied migrations.
This is meant to be called during app startup, ideally by
django_prometheus.apps.AppConfig.
"""
return
+ if 'default' in connections and (
- if 'default' in connections and connections['default']['ENGINE'] == 'django.db.backends.dummy':
+ connections['default']['ENGINE'] == 'django.db.backends.dummy'):
# This is the case where DATABASES = {} in the configuration,
# i.e. the user is not using any databases. Django "helpfully"
# adds a dummy database and then throws when you try to
# actually use it. So we don't do anything, because trying to
# export stats would crash the app on startup.
return
for alias in connections.databases:
executor = MigrationExecutor(connections[alias])
ExportMigrationsForDatabase(alias, executor)
|
Fix pep8 violation in 29e3a0c.
|
## Code Before:
from django.db import connections
from django.db.migrations.executor import MigrationExecutor
from prometheus_client import Gauge
unapplied_migrations = Gauge(
'django_migrations_unapplied_total',
'Count of unapplied migrations by database connection',
['connection'])
applied_migrations = Gauge(
'django_migrations_applied_total',
'Count of applied migrations by database connection',
['connection'])
def ExportMigrationsForDatabase(alias, executor):
plan = executor.migration_plan(executor.loader.graph.leaf_nodes())
unapplied_migrations.labels(alias).set(len(plan))
applied_migrations.labels(alias).set(len(
executor.loader.applied_migrations))
def ExportMigrations():
"""Exports counts of unapplied migrations.
This is meant to be called during app startup, ideally by
django_prometheus.apps.AppConfig.
"""
return
if 'default' in connections and connections['default']['ENGINE'] == 'django.db.backends.dummy':
# This is the case where DATABASES = {} in the configuration,
# i.e. the user is not using any databases. Django "helpfully"
# adds a dummy database and then throws when you try to
# actually use it. So we don't do anything, because trying to
# export stats would crash the app on startup.
return
for alias in connections.databases:
executor = MigrationExecutor(connections[alias])
ExportMigrationsForDatabase(alias, executor)
## Instruction:
Fix pep8 violation in 29e3a0c.
## Code After:
from django.db import connections
from django.db.migrations.executor import MigrationExecutor
from prometheus_client import Gauge
unapplied_migrations = Gauge(
'django_migrations_unapplied_total',
'Count of unapplied migrations by database connection',
['connection'])
applied_migrations = Gauge(
'django_migrations_applied_total',
'Count of applied migrations by database connection',
['connection'])
def ExportMigrationsForDatabase(alias, executor):
plan = executor.migration_plan(executor.loader.graph.leaf_nodes())
unapplied_migrations.labels(alias).set(len(plan))
applied_migrations.labels(alias).set(len(
executor.loader.applied_migrations))
def ExportMigrations():
"""Exports counts of unapplied migrations.
This is meant to be called during app startup, ideally by
django_prometheus.apps.AppConfig.
"""
return
if 'default' in connections and (
connections['default']['ENGINE'] == 'django.db.backends.dummy'):
# This is the case where DATABASES = {} in the configuration,
# i.e. the user is not using any databases. Django "helpfully"
# adds a dummy database and then throws when you try to
# actually use it. So we don't do anything, because trying to
# export stats would crash the app on startup.
return
for alias in connections.databases:
executor = MigrationExecutor(connections[alias])
ExportMigrationsForDatabase(alias, executor)
|
...
django_prometheus.apps.AppConfig.
"""
return
if 'default' in connections and (
connections['default']['ENGINE'] == 'django.db.backends.dummy'):
# This is the case where DATABASES = {} in the configuration,
# i.e. the user is not using any databases. Django "helpfully"
# adds a dummy database and then throws when you try to
...
|
01a012bf37c438c845e4962ffa6f1c0e1e2723f4
|
netmiko/cisco/cisco_ios.py
|
netmiko/cisco/cisco_ios.py
|
from __future__ import unicode_literals
from netmiko.cisco_base_connection import CiscoSSHConnection
from netmiko.cisco_base_connection import CiscoTelnetConnection
class CiscoIosSSH(CiscoSSHConnection):
"""Cisco IOS SSH driver."""
def session_preparation(self):
"""Prepare the session after the connection has been established."""
self.set_base_prompt()
self.disable_paging()
self.set_terminal_width(command='terminal width 511')
class CiscoIosTelnet(CiscoTelnetConnection):
"""Cisco IOS Telnet driver."""
def session_preparation(self):
"""Prepare the session after the connection has been established."""
self.set_base_prompt()
self.disable_paging()
self.set_terminal_width(command='terminal width 511')
|
from __future__ import unicode_literals
from netmiko.cisco_base_connection import CiscoSSHConnection
from netmiko.cisco_base_connection import CiscoTelnetConnection
class CiscoIosSSH(CiscoSSHConnection):
"""Cisco IOS SSH driver."""
@staticmethod
def autodetect(session):
"""
"""
matches = ["Cisco IOS Software", "Cisco Internetwork Operating System Software"]
try:
response = session.send_command("show version | inc Cisco")
for m in matches:
if m in response:
return 99
except:
return 0
return 0
def session_preparation(self):
"""Prepare the session after the connection has been established."""
self.set_base_prompt()
self.disable_paging()
self.set_terminal_width(command='terminal width 511')
class CiscoIosTelnet(CiscoTelnetConnection):
"""Cisco IOS Telnet driver."""
def session_preparation(self):
"""Prepare the session after the connection has been established."""
self.set_base_prompt()
self.disable_paging()
self.set_terminal_width(command='terminal width 511')
|
Add autodetect for Cisco IOS
|
Add autodetect for Cisco IOS
|
Python
|
mit
|
fooelisa/netmiko,ktbyers/netmiko,ktbyers/netmiko,isidroamv/netmiko,isidroamv/netmiko,fooelisa/netmiko
|
from __future__ import unicode_literals
from netmiko.cisco_base_connection import CiscoSSHConnection
from netmiko.cisco_base_connection import CiscoTelnetConnection
class CiscoIosSSH(CiscoSSHConnection):
"""Cisco IOS SSH driver."""
+
+ @staticmethod
+ def autodetect(session):
+ """
+ """
+ matches = ["Cisco IOS Software", "Cisco Internetwork Operating System Software"]
+ try:
+ response = session.send_command("show version | inc Cisco")
+ for m in matches:
+ if m in response:
+ return 99
+ except:
+ return 0
+ return 0
+
def session_preparation(self):
"""Prepare the session after the connection has been established."""
self.set_base_prompt()
self.disable_paging()
self.set_terminal_width(command='terminal width 511')
class CiscoIosTelnet(CiscoTelnetConnection):
"""Cisco IOS Telnet driver."""
def session_preparation(self):
"""Prepare the session after the connection has been established."""
self.set_base_prompt()
self.disable_paging()
self.set_terminal_width(command='terminal width 511')
|
Add autodetect for Cisco IOS
|
## Code Before:
from __future__ import unicode_literals
from netmiko.cisco_base_connection import CiscoSSHConnection
from netmiko.cisco_base_connection import CiscoTelnetConnection
class CiscoIosSSH(CiscoSSHConnection):
"""Cisco IOS SSH driver."""
def session_preparation(self):
"""Prepare the session after the connection has been established."""
self.set_base_prompt()
self.disable_paging()
self.set_terminal_width(command='terminal width 511')
class CiscoIosTelnet(CiscoTelnetConnection):
"""Cisco IOS Telnet driver."""
def session_preparation(self):
"""Prepare the session after the connection has been established."""
self.set_base_prompt()
self.disable_paging()
self.set_terminal_width(command='terminal width 511')
## Instruction:
Add autodetect for Cisco IOS
## Code After:
from __future__ import unicode_literals
from netmiko.cisco_base_connection import CiscoSSHConnection
from netmiko.cisco_base_connection import CiscoTelnetConnection
class CiscoIosSSH(CiscoSSHConnection):
"""Cisco IOS SSH driver."""
@staticmethod
def autodetect(session):
"""
"""
matches = ["Cisco IOS Software", "Cisco Internetwork Operating System Software"]
try:
response = session.send_command("show version | inc Cisco")
for m in matches:
if m in response:
return 99
except:
return 0
return 0
def session_preparation(self):
"""Prepare the session after the connection has been established."""
self.set_base_prompt()
self.disable_paging()
self.set_terminal_width(command='terminal width 511')
class CiscoIosTelnet(CiscoTelnetConnection):
"""Cisco IOS Telnet driver."""
def session_preparation(self):
"""Prepare the session after the connection has been established."""
self.set_base_prompt()
self.disable_paging()
self.set_terminal_width(command='terminal width 511')
|
...
class CiscoIosSSH(CiscoSSHConnection):
"""Cisco IOS SSH driver."""
@staticmethod
def autodetect(session):
"""
"""
matches = ["Cisco IOS Software", "Cisco Internetwork Operating System Software"]
try:
response = session.send_command("show version | inc Cisco")
for m in matches:
if m in response:
return 99
except:
return 0
return 0
def session_preparation(self):
"""Prepare the session after the connection has been established."""
self.set_base_prompt()
...
|
5839e70f2ffab6640997e3a609a26e50ff2b4da6
|
opps/containers/urls.py
|
opps/containers/urls.py
|
from django.conf.urls import patterns, url
from django.conf import settings
from django.views.decorators.cache import cache_page
from .views import ContainerList, ContainerDetail
from .views import Search
from opps.contrib.feeds.views import ContainerFeed, ChannelFeed
urlpatterns = patterns(
'',
url(r'^$', ContainerList.as_view(), name='home'),
url(r'^(rss|feed)$', cache_page(settings.OPPS_CACHE_EXPIRE)(
ContainerFeed()), name='feed'),
url(r'^search/', Search(), name='search'),
url(r'^(?P<long_slug>[\w\b//-]+)/(rss|feed)$',
cache_page(settings.OPPS_CACHE_EXPIRE)(
ChannelFeed()), name='channel_feed'),
url(r'^(?P<channel__long_slug>[\w//-]+)/(?P<slug>[\w-]+)$',
cache_page(settings.OPPS_CACHE_EXPIRE_DETAIL)(
ContainerDetail.as_view()), name='open'),
url(r'^(?P<channel__long_slug>[\w\b//-]+)/$',
cache_page(settings.OPPS_CACHE_EXPIRE_LIST)(
ContainerList.as_view()), name='channel'),
)
|
from django.conf.urls import patterns, url
from django.conf import settings
from django.views.decorators.cache import cache_page
from .views import ContainerList, ContainerDetail
from .views import ContainerAPIList, ContainerAPIDetail
from .views import Search
from opps.contrib.feeds.views import ContainerFeed, ChannelFeed
urlpatterns = patterns(
'',
url(r'^$', ContainerList.as_view(), name='home'),
url(r'^(rss|feed)$', cache_page(settings.OPPS_CACHE_EXPIRE)(
ContainerFeed()), name='feed'),
url(r'^search/', Search(), name='search'),
url(r'^(?P<long_slug>[\w\b//-]+)/(rss|feed)$',
cache_page(settings.OPPS_CACHE_EXPIRE)(
ChannelFeed()), name='channel_feed'),
url(r'^(?P<channel__long_slug>[\w//-]+)/(?P<slug>[\w-]+).api$',
cache_page(settings.OPPS_CACHE_EXPIRE_DETAIL)(
ContainerAPIDetail.as_view()), name='open-api'),
url(r'^(?P<channel__long_slug>[\w//-]+)/(?P<slug>[\w-]+)$',
cache_page(settings.OPPS_CACHE_EXPIRE_DETAIL)(
ContainerDetail.as_view()), name='open'),
url(r'^(?P<channel__long_slug>[\w\b//-]+).api$',
cache_page(settings.OPPS_CACHE_EXPIRE_LIST)(
ContainerAPIList.as_view()), name='channel-api'),
url(r'^(?P<channel__long_slug>[\w\b//-]+)/$',
cache_page(settings.OPPS_CACHE_EXPIRE_LIST)(
ContainerList.as_view()), name='channel'),
)
|
Add url entry container api
|
Add url entry container api
|
Python
|
mit
|
jeanmask/opps,williamroot/opps,opps/opps,opps/opps,williamroot/opps,YACOWS/opps,jeanmask/opps,YACOWS/opps,opps/opps,jeanmask/opps,williamroot/opps,opps/opps,YACOWS/opps,williamroot/opps,jeanmask/opps,YACOWS/opps
|
from django.conf.urls import patterns, url
from django.conf import settings
from django.views.decorators.cache import cache_page
from .views import ContainerList, ContainerDetail
+ from .views import ContainerAPIList, ContainerAPIDetail
from .views import Search
from opps.contrib.feeds.views import ContainerFeed, ChannelFeed
urlpatterns = patterns(
'',
url(r'^$', ContainerList.as_view(), name='home'),
url(r'^(rss|feed)$', cache_page(settings.OPPS_CACHE_EXPIRE)(
ContainerFeed()), name='feed'),
url(r'^search/', Search(), name='search'),
url(r'^(?P<long_slug>[\w\b//-]+)/(rss|feed)$',
cache_page(settings.OPPS_CACHE_EXPIRE)(
ChannelFeed()), name='channel_feed'),
+ url(r'^(?P<channel__long_slug>[\w//-]+)/(?P<slug>[\w-]+).api$',
+ cache_page(settings.OPPS_CACHE_EXPIRE_DETAIL)(
+ ContainerAPIDetail.as_view()), name='open-api'),
url(r'^(?P<channel__long_slug>[\w//-]+)/(?P<slug>[\w-]+)$',
cache_page(settings.OPPS_CACHE_EXPIRE_DETAIL)(
ContainerDetail.as_view()), name='open'),
+ url(r'^(?P<channel__long_slug>[\w\b//-]+).api$',
+ cache_page(settings.OPPS_CACHE_EXPIRE_LIST)(
+ ContainerAPIList.as_view()), name='channel-api'),
url(r'^(?P<channel__long_slug>[\w\b//-]+)/$',
cache_page(settings.OPPS_CACHE_EXPIRE_LIST)(
ContainerList.as_view()), name='channel'),
)
|
Add url entry container api
|
## Code Before:
from django.conf.urls import patterns, url
from django.conf import settings
from django.views.decorators.cache import cache_page
from .views import ContainerList, ContainerDetail
from .views import Search
from opps.contrib.feeds.views import ContainerFeed, ChannelFeed
urlpatterns = patterns(
'',
url(r'^$', ContainerList.as_view(), name='home'),
url(r'^(rss|feed)$', cache_page(settings.OPPS_CACHE_EXPIRE)(
ContainerFeed()), name='feed'),
url(r'^search/', Search(), name='search'),
url(r'^(?P<long_slug>[\w\b//-]+)/(rss|feed)$',
cache_page(settings.OPPS_CACHE_EXPIRE)(
ChannelFeed()), name='channel_feed'),
url(r'^(?P<channel__long_slug>[\w//-]+)/(?P<slug>[\w-]+)$',
cache_page(settings.OPPS_CACHE_EXPIRE_DETAIL)(
ContainerDetail.as_view()), name='open'),
url(r'^(?P<channel__long_slug>[\w\b//-]+)/$',
cache_page(settings.OPPS_CACHE_EXPIRE_LIST)(
ContainerList.as_view()), name='channel'),
)
## Instruction:
Add url entry container api
## Code After:
from django.conf.urls import patterns, url
from django.conf import settings
from django.views.decorators.cache import cache_page
from .views import ContainerList, ContainerDetail
from .views import ContainerAPIList, ContainerAPIDetail
from .views import Search
from opps.contrib.feeds.views import ContainerFeed, ChannelFeed
urlpatterns = patterns(
'',
url(r'^$', ContainerList.as_view(), name='home'),
url(r'^(rss|feed)$', cache_page(settings.OPPS_CACHE_EXPIRE)(
ContainerFeed()), name='feed'),
url(r'^search/', Search(), name='search'),
url(r'^(?P<long_slug>[\w\b//-]+)/(rss|feed)$',
cache_page(settings.OPPS_CACHE_EXPIRE)(
ChannelFeed()), name='channel_feed'),
url(r'^(?P<channel__long_slug>[\w//-]+)/(?P<slug>[\w-]+).api$',
cache_page(settings.OPPS_CACHE_EXPIRE_DETAIL)(
ContainerAPIDetail.as_view()), name='open-api'),
url(r'^(?P<channel__long_slug>[\w//-]+)/(?P<slug>[\w-]+)$',
cache_page(settings.OPPS_CACHE_EXPIRE_DETAIL)(
ContainerDetail.as_view()), name='open'),
url(r'^(?P<channel__long_slug>[\w\b//-]+).api$',
cache_page(settings.OPPS_CACHE_EXPIRE_LIST)(
ContainerAPIList.as_view()), name='channel-api'),
url(r'^(?P<channel__long_slug>[\w\b//-]+)/$',
cache_page(settings.OPPS_CACHE_EXPIRE_LIST)(
ContainerList.as_view()), name='channel'),
)
|
// ... existing code ...
from django.views.decorators.cache import cache_page
from .views import ContainerList, ContainerDetail
from .views import ContainerAPIList, ContainerAPIDetail
from .views import Search
from opps.contrib.feeds.views import ContainerFeed, ChannelFeed
// ... modified code ...
cache_page(settings.OPPS_CACHE_EXPIRE)(
ChannelFeed()), name='channel_feed'),
url(r'^(?P<channel__long_slug>[\w//-]+)/(?P<slug>[\w-]+).api$',
cache_page(settings.OPPS_CACHE_EXPIRE_DETAIL)(
ContainerAPIDetail.as_view()), name='open-api'),
url(r'^(?P<channel__long_slug>[\w//-]+)/(?P<slug>[\w-]+)$',
cache_page(settings.OPPS_CACHE_EXPIRE_DETAIL)(
ContainerDetail.as_view()), name='open'),
url(r'^(?P<channel__long_slug>[\w\b//-]+).api$',
cache_page(settings.OPPS_CACHE_EXPIRE_LIST)(
ContainerAPIList.as_view()), name='channel-api'),
url(r'^(?P<channel__long_slug>[\w\b//-]+)/$',
cache_page(settings.OPPS_CACHE_EXPIRE_LIST)(
ContainerList.as_view()), name='channel'),
// ... rest of the code ...
|
9e4608b5cafcaf69718b3b187e143098ed74954f
|
examples/dispatcher/main.py
|
examples/dispatcher/main.py
|
import time
from osbrain import random_nameserver
from osbrain import run_agent
from osbrain import Agent
from osbrain import Proxy
def log(agent, message):
agent.log_info(message)
def rep_handler(agent, message):
if agent.i < 10:
if not agent.i % 5:
agent.send('rep', 5)
else:
agent.send('rep', 1)
agent.i += 1
def worker_loop(agent):
while True:
agent.send('dispatcher', 'READY!')
x = agent.recv('dispatcher')
time.sleep(x)
agent.send('results', '%s finished with %s' % (agent.name, x))
if __name__ == '__main__':
ns = random_nameserver()
results = run_agent('Results', nsaddr=ns)
results_addr = results.bind('PULL', handler=log)
dispatcher = run_agent('Dispatcher', nsaddr=ns)
dispatcher.set_attr(i=0)
dispatcher_addr = dispatcher.bind('REP', alias='rep', handler=rep_handler)
for i in range(5):
worker = run_agent('Worker%s' % i, nsaddr=ns)
worker.connect(results_addr, alias='results')
worker.connect(dispatcher_addr, alias='dispatcher')
worker.stop()
worker.set_loop(worker_loop)
worker.run()
|
import time
from osbrain import random_nameserver
from osbrain import run_agent
def log(agent, message):
agent.log_info(message)
def rep_handler(agent, message):
if agent.i < 10:
if not agent.i % 5:
agent.send('rep', 5)
else:
agent.send('rep', 1)
agent.i += 1
def worker_loop(agent):
while True:
agent.send('dispatcher', 'READY!')
x = agent.recv('dispatcher')
time.sleep(x)
agent.send('results', '%s finished with %s' % (agent.name, x))
if __name__ == '__main__':
ns = random_nameserver()
results = run_agent('Results', nsaddr=ns)
results_addr = results.bind('PULL', handler=log)
dispatcher = run_agent('Dispatcher', nsaddr=ns)
dispatcher.set_attr(i=0)
dispatcher_addr = dispatcher.bind('REP', alias='rep', handler=rep_handler)
for i in range(5):
worker = run_agent('Worker%s' % i, nsaddr=ns)
worker.connect(results_addr, alias='results')
worker.connect(dispatcher_addr, alias='dispatcher')
worker.stop()
worker.set_loop(worker_loop)
worker.run()
|
Remove unused imports in example
|
Remove unused imports in example
|
Python
|
apache-2.0
|
opensistemas-hub/osbrain
|
import time
from osbrain import random_nameserver
from osbrain import run_agent
- from osbrain import Agent
- from osbrain import Proxy
def log(agent, message):
agent.log_info(message)
def rep_handler(agent, message):
if agent.i < 10:
if not agent.i % 5:
agent.send('rep', 5)
else:
agent.send('rep', 1)
agent.i += 1
def worker_loop(agent):
while True:
agent.send('dispatcher', 'READY!')
x = agent.recv('dispatcher')
time.sleep(x)
agent.send('results', '%s finished with %s' % (agent.name, x))
if __name__ == '__main__':
ns = random_nameserver()
results = run_agent('Results', nsaddr=ns)
results_addr = results.bind('PULL', handler=log)
dispatcher = run_agent('Dispatcher', nsaddr=ns)
dispatcher.set_attr(i=0)
dispatcher_addr = dispatcher.bind('REP', alias='rep', handler=rep_handler)
for i in range(5):
worker = run_agent('Worker%s' % i, nsaddr=ns)
worker.connect(results_addr, alias='results')
worker.connect(dispatcher_addr, alias='dispatcher')
worker.stop()
worker.set_loop(worker_loop)
worker.run()
|
Remove unused imports in example
|
## Code Before:
import time
from osbrain import random_nameserver
from osbrain import run_agent
from osbrain import Agent
from osbrain import Proxy
def log(agent, message):
agent.log_info(message)
def rep_handler(agent, message):
if agent.i < 10:
if not agent.i % 5:
agent.send('rep', 5)
else:
agent.send('rep', 1)
agent.i += 1
def worker_loop(agent):
while True:
agent.send('dispatcher', 'READY!')
x = agent.recv('dispatcher')
time.sleep(x)
agent.send('results', '%s finished with %s' % (agent.name, x))
if __name__ == '__main__':
ns = random_nameserver()
results = run_agent('Results', nsaddr=ns)
results_addr = results.bind('PULL', handler=log)
dispatcher = run_agent('Dispatcher', nsaddr=ns)
dispatcher.set_attr(i=0)
dispatcher_addr = dispatcher.bind('REP', alias='rep', handler=rep_handler)
for i in range(5):
worker = run_agent('Worker%s' % i, nsaddr=ns)
worker.connect(results_addr, alias='results')
worker.connect(dispatcher_addr, alias='dispatcher')
worker.stop()
worker.set_loop(worker_loop)
worker.run()
## Instruction:
Remove unused imports in example
## Code After:
import time
from osbrain import random_nameserver
from osbrain import run_agent
def log(agent, message):
agent.log_info(message)
def rep_handler(agent, message):
if agent.i < 10:
if not agent.i % 5:
agent.send('rep', 5)
else:
agent.send('rep', 1)
agent.i += 1
def worker_loop(agent):
while True:
agent.send('dispatcher', 'READY!')
x = agent.recv('dispatcher')
time.sleep(x)
agent.send('results', '%s finished with %s' % (agent.name, x))
if __name__ == '__main__':
ns = random_nameserver()
results = run_agent('Results', nsaddr=ns)
results_addr = results.bind('PULL', handler=log)
dispatcher = run_agent('Dispatcher', nsaddr=ns)
dispatcher.set_attr(i=0)
dispatcher_addr = dispatcher.bind('REP', alias='rep', handler=rep_handler)
for i in range(5):
worker = run_agent('Worker%s' % i, nsaddr=ns)
worker.connect(results_addr, alias='results')
worker.connect(dispatcher_addr, alias='dispatcher')
worker.stop()
worker.set_loop(worker_loop)
worker.run()
|
// ... existing code ...
import time
from osbrain import random_nameserver
from osbrain import run_agent
def log(agent, message):
// ... rest of the code ...
|
53636a17cd50d704b7b4563d0b23a474677051f4
|
hub/prototype/config.py
|
hub/prototype/config.py
|
HOST = "the.hub.machine.tld"
# the servers we listen to; for now each server can just
# have one port and secret key on the hub even if it runs
# multiple game servers; not sure if we need to allow more
# than that yet :-/
SERVERS = {
"some.game.server.tld": (42, "somesecret"),
}
# the other hubs we echo to; note that we don't yet change
# the packets in any way, so they'll look like they really
# come from us; not good, but we'll need to define a new
# packet format for forwarded userinfo strings first, then
# we can fix this :-/
HUBS = {
"some.hub.server.tld": (84, "anothersecret"),
}
|
HOST = "the.hub.machine.tld"
# the servers we listen to; for now each server can just
# have one port and secret key on the hub even if it runs
# multiple game servers; not sure if we need to allow more
# than that yet :-/
SERVERS = {
"some.game.server.tld": (42, "somesecret"),
"some.other.game.tld": (543, "monkeyspam"),
}
# the other hubs we echo to; note that we don't yet change
# the packets in any way, so they'll look like they really
# come from us; not good, but we'll need to define a new
# packet format for forwarded userinfo strings first, then
# we can fix this :-/
HUBS = {
"some.hub.server.tld": (84, "anothersecret"),
}
|
Make sure we give an example for two servers.
|
Make sure we give an example for two servers.
|
Python
|
agpl-3.0
|
madprof/alpha-hub
|
HOST = "the.hub.machine.tld"
# the servers we listen to; for now each server can just
# have one port and secret key on the hub even if it runs
# multiple game servers; not sure if we need to allow more
# than that yet :-/
SERVERS = {
"some.game.server.tld": (42, "somesecret"),
+ "some.other.game.tld": (543, "monkeyspam"),
}
# the other hubs we echo to; note that we don't yet change
# the packets in any way, so they'll look like they really
# come from us; not good, but we'll need to define a new
# packet format for forwarded userinfo strings first, then
# we can fix this :-/
HUBS = {
"some.hub.server.tld": (84, "anothersecret"),
}
|
Make sure we give an example for two servers.
|
## Code Before:
HOST = "the.hub.machine.tld"
# the servers we listen to; for now each server can just
# have one port and secret key on the hub even if it runs
# multiple game servers; not sure if we need to allow more
# than that yet :-/
SERVERS = {
"some.game.server.tld": (42, "somesecret"),
}
# the other hubs we echo to; note that we don't yet change
# the packets in any way, so they'll look like they really
# come from us; not good, but we'll need to define a new
# packet format for forwarded userinfo strings first, then
# we can fix this :-/
HUBS = {
"some.hub.server.tld": (84, "anothersecret"),
}
## Instruction:
Make sure we give an example for two servers.
## Code After:
HOST = "the.hub.machine.tld"
# the servers we listen to; for now each server can just
# have one port and secret key on the hub even if it runs
# multiple game servers; not sure if we need to allow more
# than that yet :-/
SERVERS = {
"some.game.server.tld": (42, "somesecret"),
"some.other.game.tld": (543, "monkeyspam"),
}
# the other hubs we echo to; note that we don't yet change
# the packets in any way, so they'll look like they really
# come from us; not good, but we'll need to define a new
# packet format for forwarded userinfo strings first, then
# we can fix this :-/
HUBS = {
"some.hub.server.tld": (84, "anothersecret"),
}
|
# ... existing code ...
SERVERS = {
"some.game.server.tld": (42, "somesecret"),
"some.other.game.tld": (543, "monkeyspam"),
}
# the other hubs we echo to; note that we don't yet change
# ... rest of the code ...
|
befce70e7931f5949a7db10af4bae2cb4c21ba08
|
localore/people/wagtail_hooks.py
|
localore/people/wagtail_hooks.py
|
from wagtailmodeladmin.options import ModelAdmin, wagtailmodeladmin_register
from .models import Person
class PeopleAdmin(ModelAdmin):
model = Person
menu_icon = 'group'
menu_label = 'Team'
menu_order = 300
list_display = ('first_name', 'last_name', 'production', 'role')
list_filter = ('role', 'production')
search_fields = ('first_name', 'last_name', 'biography')
wagtailmodeladmin_register(PeopleAdmin)
|
from wagtailmodeladmin.options import ModelAdmin, wagtailmodeladmin_register
from .models import Person
class PeopleAdmin(ModelAdmin):
model = Person
menu_icon = 'group'
menu_label = 'Team'
menu_order = 300
list_display = ('first_name', 'last_name', 'production', 'role')
list_filter = ('role', 'production')
ordering = ('-production',)
search_fields = ('first_name', 'last_name', 'biography')
wagtailmodeladmin_register(PeopleAdmin)
|
Order people by associated production.
|
Order people by associated production.
|
Python
|
mpl-2.0
|
ghostwords/localore,ghostwords/localore,ghostwords/localore
|
from wagtailmodeladmin.options import ModelAdmin, wagtailmodeladmin_register
from .models import Person
class PeopleAdmin(ModelAdmin):
model = Person
menu_icon = 'group'
menu_label = 'Team'
menu_order = 300
list_display = ('first_name', 'last_name', 'production', 'role')
list_filter = ('role', 'production')
+ ordering = ('-production',)
search_fields = ('first_name', 'last_name', 'biography')
wagtailmodeladmin_register(PeopleAdmin)
|
Order people by associated production.
|
## Code Before:
from wagtailmodeladmin.options import ModelAdmin, wagtailmodeladmin_register
from .models import Person
class PeopleAdmin(ModelAdmin):
model = Person
menu_icon = 'group'
menu_label = 'Team'
menu_order = 300
list_display = ('first_name', 'last_name', 'production', 'role')
list_filter = ('role', 'production')
search_fields = ('first_name', 'last_name', 'biography')
wagtailmodeladmin_register(PeopleAdmin)
## Instruction:
Order people by associated production.
## Code After:
from wagtailmodeladmin.options import ModelAdmin, wagtailmodeladmin_register
from .models import Person
class PeopleAdmin(ModelAdmin):
model = Person
menu_icon = 'group'
menu_label = 'Team'
menu_order = 300
list_display = ('first_name', 'last_name', 'production', 'role')
list_filter = ('role', 'production')
ordering = ('-production',)
search_fields = ('first_name', 'last_name', 'biography')
wagtailmodeladmin_register(PeopleAdmin)
|
# ... existing code ...
menu_order = 300
list_display = ('first_name', 'last_name', 'production', 'role')
list_filter = ('role', 'production')
ordering = ('-production',)
search_fields = ('first_name', 'last_name', 'biography')
wagtailmodeladmin_register(PeopleAdmin)
# ... rest of the code ...
|
58823e20e3891cea7198be15b7c85395521086e1
|
extension_course/tests/conftest.py
|
extension_course/tests/conftest.py
|
import pytest
from django.conf import settings
from django.core.management import call_command
from events.tests.conftest import (administrative_division, administrative_division_type, data_source, event, # noqa
location_id, minimal_event_dict, municipality, organization, place, user,
user_api_client, django_db_modify_db_settings)
# Django test harness tries to serialize DB in order to support transactions
# within tests. (It restores the snapshot after such tests).
# This fails with modeltranslate, as the serialization is done before
# sync_translation_fields has a chance to run. Thus the fields are missing
# and serialization fails horribly.
#@pytest.fixture(scope='session')
#def django_db_modify_db_settings(django_db_modify_db_settings_xdist_suffix):
# settings.DATABASES['default']['TEST']['SERIALIZE'] = False
@pytest.fixture(scope='session')
def django_db_setup(django_db_setup, django_db_blocker):
with django_db_blocker.unblock():
call_command('sync_translation_fields', '--noinput')
@pytest.fixture(autouse=True)
def auto_enable(settings):
settings.AUTO_ENABLED_EXTENSIONS = ['course']
|
import pytest
from events.tests.conftest import (administrative_division, administrative_division_type, data_source, event, # noqa
location_id, minimal_event_dict, municipality, organization, place, user,
user_api_client, django_db_modify_db_settings, django_db_setup)
@pytest.fixture(autouse=True)
def auto_enable(settings):
settings.AUTO_ENABLED_EXTENSIONS = ['course']
|
Remove some needless code from course extension tests
|
Remove some needless code from course extension tests
|
Python
|
mit
|
City-of-Helsinki/linkedevents,City-of-Helsinki/linkedevents,City-of-Helsinki/linkedevents
|
import pytest
- from django.conf import settings
- from django.core.management import call_command
-
from events.tests.conftest import (administrative_division, administrative_division_type, data_source, event, # noqa
location_id, minimal_event_dict, municipality, organization, place, user,
- user_api_client, django_db_modify_db_settings)
+ user_api_client, django_db_modify_db_settings, django_db_setup)
-
-
- # Django test harness tries to serialize DB in order to support transactions
- # within tests. (It restores the snapshot after such tests).
- # This fails with modeltranslate, as the serialization is done before
- # sync_translation_fields has a chance to run. Thus the fields are missing
- # and serialization fails horribly.
- #@pytest.fixture(scope='session')
- #def django_db_modify_db_settings(django_db_modify_db_settings_xdist_suffix):
- # settings.DATABASES['default']['TEST']['SERIALIZE'] = False
-
-
- @pytest.fixture(scope='session')
- def django_db_setup(django_db_setup, django_db_blocker):
- with django_db_blocker.unblock():
- call_command('sync_translation_fields', '--noinput')
@pytest.fixture(autouse=True)
def auto_enable(settings):
settings.AUTO_ENABLED_EXTENSIONS = ['course']
|
Remove some needless code from course extension tests
|
## Code Before:
import pytest
from django.conf import settings
from django.core.management import call_command
from events.tests.conftest import (administrative_division, administrative_division_type, data_source, event, # noqa
location_id, minimal_event_dict, municipality, organization, place, user,
user_api_client, django_db_modify_db_settings)
# Django test harness tries to serialize DB in order to support transactions
# within tests. (It restores the snapshot after such tests).
# This fails with modeltranslate, as the serialization is done before
# sync_translation_fields has a chance to run. Thus the fields are missing
# and serialization fails horribly.
#@pytest.fixture(scope='session')
#def django_db_modify_db_settings(django_db_modify_db_settings_xdist_suffix):
# settings.DATABASES['default']['TEST']['SERIALIZE'] = False
@pytest.fixture(scope='session')
def django_db_setup(django_db_setup, django_db_blocker):
with django_db_blocker.unblock():
call_command('sync_translation_fields', '--noinput')
@pytest.fixture(autouse=True)
def auto_enable(settings):
settings.AUTO_ENABLED_EXTENSIONS = ['course']
## Instruction:
Remove some needless code from course extension tests
## Code After:
import pytest
from events.tests.conftest import (administrative_division, administrative_division_type, data_source, event, # noqa
location_id, minimal_event_dict, municipality, organization, place, user,
user_api_client, django_db_modify_db_settings, django_db_setup)
@pytest.fixture(autouse=True)
def auto_enable(settings):
settings.AUTO_ENABLED_EXTENSIONS = ['course']
|
# ... existing code ...
import pytest
from events.tests.conftest import (administrative_division, administrative_division_type, data_source, event, # noqa
location_id, minimal_event_dict, municipality, organization, place, user,
user_api_client, django_db_modify_db_settings, django_db_setup)
@pytest.fixture(autouse=True)
# ... rest of the code ...
|
4117b767f48678542797d811cc1a8ea75f37c714
|
saleor/account/migrations/0040_auto_20200415_0443.py
|
saleor/account/migrations/0040_auto_20200415_0443.py
|
from django.db import migrations
def change_extension_permission_to_plugin_permission(apps, schema_editor):
permission = apps.get_model("auth", "Permission")
users = apps.get_model("account", "User")
plugin_permission = permission.objects.filter(
codename="manage_plugins", content_type__app_label="plugins"
).first()
extension_permission = permission.objects.filter(
codename="manage_plugins", content_type__app_label="extensions"
).first()
users = users.objects.filter(
user_permissions__content_type__app_label="extensions",
user_permissions__codename="manage_plugins",
)
if not plugin_permission or not extension_permission:
return
for user in users:
user.user_permissions.remove(extension_permission)
user.user_permissions.add(plugin_permission)
class Migration(migrations.Migration):
dependencies = [
("account", "0039_auto_20200221_0257"),
("plugins", "0001_initial"),
]
operations = [
migrations.RunPython(change_extension_permission_to_plugin_permission),
]
|
from django.db import migrations
def change_extension_permission_to_plugin_permission(apps, schema_editor):
permission = apps.get_model("auth", "Permission")
users = apps.get_model("account", "User")
plugin_permission = permission.objects.filter(
codename="manage_plugins", content_type__app_label="plugins"
).first()
extension_permission = permission.objects.filter(
codename="manage_plugins", content_type__app_label="extensions"
).first()
users = users.objects.filter(
user_permissions__content_type__app_label="extensions",
user_permissions__codename="manage_plugins",
)
if not plugin_permission or not extension_permission:
return
for user in users:
user.user_permissions.remove(extension_permission)
user.user_permissions.add(plugin_permission)
if extension_permission:
extension_permission.delete()
class Migration(migrations.Migration):
dependencies = [
("account", "0039_auto_20200221_0257"),
("plugins", "0001_initial"),
]
operations = [
migrations.RunPython(change_extension_permission_to_plugin_permission),
]
|
Remove unused permission from db
|
Remove unused permission from db
|
Python
|
bsd-3-clause
|
mociepka/saleor,mociepka/saleor,mociepka/saleor
|
from django.db import migrations
def change_extension_permission_to_plugin_permission(apps, schema_editor):
permission = apps.get_model("auth", "Permission")
users = apps.get_model("account", "User")
plugin_permission = permission.objects.filter(
codename="manage_plugins", content_type__app_label="plugins"
).first()
extension_permission = permission.objects.filter(
codename="manage_plugins", content_type__app_label="extensions"
).first()
users = users.objects.filter(
user_permissions__content_type__app_label="extensions",
user_permissions__codename="manage_plugins",
)
if not plugin_permission or not extension_permission:
return
for user in users:
user.user_permissions.remove(extension_permission)
user.user_permissions.add(plugin_permission)
+ if extension_permission:
+ extension_permission.delete()
+
class Migration(migrations.Migration):
dependencies = [
("account", "0039_auto_20200221_0257"),
("plugins", "0001_initial"),
]
operations = [
migrations.RunPython(change_extension_permission_to_plugin_permission),
]
|
Remove unused permission from db
|
## Code Before:
from django.db import migrations
def change_extension_permission_to_plugin_permission(apps, schema_editor):
permission = apps.get_model("auth", "Permission")
users = apps.get_model("account", "User")
plugin_permission = permission.objects.filter(
codename="manage_plugins", content_type__app_label="plugins"
).first()
extension_permission = permission.objects.filter(
codename="manage_plugins", content_type__app_label="extensions"
).first()
users = users.objects.filter(
user_permissions__content_type__app_label="extensions",
user_permissions__codename="manage_plugins",
)
if not plugin_permission or not extension_permission:
return
for user in users:
user.user_permissions.remove(extension_permission)
user.user_permissions.add(plugin_permission)
class Migration(migrations.Migration):
dependencies = [
("account", "0039_auto_20200221_0257"),
("plugins", "0001_initial"),
]
operations = [
migrations.RunPython(change_extension_permission_to_plugin_permission),
]
## Instruction:
Remove unused permission from db
## Code After:
from django.db import migrations
def change_extension_permission_to_plugin_permission(apps, schema_editor):
permission = apps.get_model("auth", "Permission")
users = apps.get_model("account", "User")
plugin_permission = permission.objects.filter(
codename="manage_plugins", content_type__app_label="plugins"
).first()
extension_permission = permission.objects.filter(
codename="manage_plugins", content_type__app_label="extensions"
).first()
users = users.objects.filter(
user_permissions__content_type__app_label="extensions",
user_permissions__codename="manage_plugins",
)
if not plugin_permission or not extension_permission:
return
for user in users:
user.user_permissions.remove(extension_permission)
user.user_permissions.add(plugin_permission)
if extension_permission:
extension_permission.delete()
class Migration(migrations.Migration):
dependencies = [
("account", "0039_auto_20200221_0257"),
("plugins", "0001_initial"),
]
operations = [
migrations.RunPython(change_extension_permission_to_plugin_permission),
]
|
// ... existing code ...
user.user_permissions.remove(extension_permission)
user.user_permissions.add(plugin_permission)
if extension_permission:
extension_permission.delete()
class Migration(migrations.Migration):
// ... rest of the code ...
|
d2a0d0d22a8369c99626ca754a337ea8076f7efa
|
aybu/core/models/migrations/versions/587c89cfa8ea_added_column_weight_.py
|
aybu/core/models/migrations/versions/587c89cfa8ea_added_column_weight_.py
|
# downgrade revision identifier, used by Alembic.
revision = '587c89cfa8ea'
down_revision = '2c0bfc379e01'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('files', sa.Column('weight', sa.Integer(),
nullable=False, default=0))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('files', 'weight')
### end Alembic commands ###
|
# downgrade revision identifier, used by Alembic.
revision = '587c89cfa8ea'
down_revision = '2c0bfc379e01'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('files', sa.Column('weight',
sa.Integer(),
nullable=True,
default=0))
connection = op.get_bind()
connection.execute('UPDATE files SET weight=0')
op.alter_column('files',
'weight',
existing_type=sa.Integer,
nullable=False)
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('files', 'weight')
### end Alembic commands ###
|
Fix bug in migration script
|
Fix bug in migration script
|
Python
|
apache-2.0
|
asidev/aybu-core
|
# downgrade revision identifier, used by Alembic.
revision = '587c89cfa8ea'
down_revision = '2c0bfc379e01'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
- op.add_column('files', sa.Column('weight', sa.Integer(),
+ op.add_column('files', sa.Column('weight',
+ sa.Integer(),
+ nullable=True,
- nullable=False, default=0))
+ default=0))
- ### end Alembic commands ###
+ connection = op.get_bind()
+ connection.execute('UPDATE files SET weight=0')
+ op.alter_column('files',
+ 'weight',
+ existing_type=sa.Integer,
+ nullable=False)
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('files', 'weight')
### end Alembic commands ###
|
Fix bug in migration script
|
## Code Before:
# downgrade revision identifier, used by Alembic.
revision = '587c89cfa8ea'
down_revision = '2c0bfc379e01'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('files', sa.Column('weight', sa.Integer(),
nullable=False, default=0))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('files', 'weight')
### end Alembic commands ###
## Instruction:
Fix bug in migration script
## Code After:
# downgrade revision identifier, used by Alembic.
revision = '587c89cfa8ea'
down_revision = '2c0bfc379e01'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('files', sa.Column('weight',
sa.Integer(),
nullable=True,
default=0))
connection = op.get_bind()
connection.execute('UPDATE files SET weight=0')
op.alter_column('files',
'weight',
existing_type=sa.Integer,
nullable=False)
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('files', 'weight')
### end Alembic commands ###
|
// ... existing code ...
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('files', sa.Column('weight',
sa.Integer(),
nullable=True,
default=0))
connection = op.get_bind()
connection.execute('UPDATE files SET weight=0')
op.alter_column('files',
'weight',
existing_type=sa.Integer,
nullable=False)
def downgrade():
// ... rest of the code ...
|
ce2855d82331fc7bb1ffdb07761d6ad235a1c6c9
|
transport/tests/test_models.py
|
transport/tests/test_models.py
|
from django.test import TestCase
from org.models import Organization
from ..models import Bus
class BusModelTest(TestCase):
def setUp(self):
self.org = Organization.objects.create(
name='Some Org',
logo='/media/logos/some-org-logo.jpg',
description='We are a familiar condominium',
rules='Please check our conduct code page at https://some-url.foo'
)
self.bus = Bus.objects.create(
name='Bus 1',
organization=self.org
)
def test_str(self):
self.assertEqual('Bus 1', str(self.bus))
def test_can_create(self):
self.assertTrue(Bus.objects.exists())
|
from django.test import TestCase
from org.models import Organization
from ..models import Bus, Route
class BusModelTest(TestCase):
def setUp(self):
self.org = Organization.objects.create(
name='Some Org',
logo='/media/logos/some-org-logo.jpg',
description='We are a familiar condominium',
rules='Please check our conduct code page at https://some-url.foo'
)
self.bus = Bus.objects.create(
name='Bus 1',
organization=self.org
)
def test_str(self):
self.assertEqual('Bus 1', str(self.bus))
def test_can_create(self):
self.assertTrue(Bus.objects.exists())
class RouteModelTest(TestCase):
def setUp(self):
self.org = Organization.objects.create(
name='Some Org',
logo='/media/logos/some-org-logo.jpg',
description='We are a familiar condominium',
rules='Please check our conduct code page at https://some-url.foo'
)
self.route = Route.objects.create(
name='Route 1',
organization=self.org
)
def test_str(self):
self.assertEqual('Route 1', str(self.route))
def test_can_create(self):
self.assertTrue(Route.objects.exists())
|
Add some Route model tests
|
Add some Route model tests
|
Python
|
mit
|
arturfelipe/condobus,arturfelipe/condobus,arturfelipe/condobus,arturfelipe/condobus
|
from django.test import TestCase
from org.models import Organization
- from ..models import Bus
+ from ..models import Bus, Route
class BusModelTest(TestCase):
def setUp(self):
self.org = Organization.objects.create(
name='Some Org',
logo='/media/logos/some-org-logo.jpg',
description='We are a familiar condominium',
rules='Please check our conduct code page at https://some-url.foo'
)
self.bus = Bus.objects.create(
name='Bus 1',
organization=self.org
)
def test_str(self):
self.assertEqual('Bus 1', str(self.bus))
def test_can_create(self):
self.assertTrue(Bus.objects.exists())
+
+ class RouteModelTest(TestCase):
+
+ def setUp(self):
+ self.org = Organization.objects.create(
+ name='Some Org',
+ logo='/media/logos/some-org-logo.jpg',
+ description='We are a familiar condominium',
+ rules='Please check our conduct code page at https://some-url.foo'
+ )
+ self.route = Route.objects.create(
+ name='Route 1',
+ organization=self.org
+ )
+
+ def test_str(self):
+ self.assertEqual('Route 1', str(self.route))
+
+ def test_can_create(self):
+ self.assertTrue(Route.objects.exists())
+
|
Add some Route model tests
|
## Code Before:
from django.test import TestCase
from org.models import Organization
from ..models import Bus
class BusModelTest(TestCase):
def setUp(self):
self.org = Organization.objects.create(
name='Some Org',
logo='/media/logos/some-org-logo.jpg',
description='We are a familiar condominium',
rules='Please check our conduct code page at https://some-url.foo'
)
self.bus = Bus.objects.create(
name='Bus 1',
organization=self.org
)
def test_str(self):
self.assertEqual('Bus 1', str(self.bus))
def test_can_create(self):
self.assertTrue(Bus.objects.exists())
## Instruction:
Add some Route model tests
## Code After:
from django.test import TestCase
from org.models import Organization
from ..models import Bus, Route
class BusModelTest(TestCase):
def setUp(self):
self.org = Organization.objects.create(
name='Some Org',
logo='/media/logos/some-org-logo.jpg',
description='We are a familiar condominium',
rules='Please check our conduct code page at https://some-url.foo'
)
self.bus = Bus.objects.create(
name='Bus 1',
organization=self.org
)
def test_str(self):
self.assertEqual('Bus 1', str(self.bus))
def test_can_create(self):
self.assertTrue(Bus.objects.exists())
class RouteModelTest(TestCase):
def setUp(self):
self.org = Organization.objects.create(
name='Some Org',
logo='/media/logos/some-org-logo.jpg',
description='We are a familiar condominium',
rules='Please check our conduct code page at https://some-url.foo'
)
self.route = Route.objects.create(
name='Route 1',
organization=self.org
)
def test_str(self):
self.assertEqual('Route 1', str(self.route))
def test_can_create(self):
self.assertTrue(Route.objects.exists())
|
# ... existing code ...
from django.test import TestCase
from org.models import Organization
from ..models import Bus, Route
class BusModelTest(TestCase):
# ... modified code ...
def test_can_create(self):
self.assertTrue(Bus.objects.exists())
class RouteModelTest(TestCase):
def setUp(self):
self.org = Organization.objects.create(
name='Some Org',
logo='/media/logos/some-org-logo.jpg',
description='We are a familiar condominium',
rules='Please check our conduct code page at https://some-url.foo'
)
self.route = Route.objects.create(
name='Route 1',
organization=self.org
)
def test_str(self):
self.assertEqual('Route 1', str(self.route))
def test_can_create(self):
self.assertTrue(Route.objects.exists())
# ... rest of the code ...
|
aff8945aef3f10fa9d1243b25301e84611c27422
|
aleph/views/status_api.py
|
aleph/views/status_api.py
|
import logging
from flask import Blueprint, request
from aleph.model import Collection
from aleph.queues import get_active_collection_status
from aleph.views.util import jsonify
from aleph.logic import resolver
from aleph.views.util import require
log = logging.getLogger(__name__)
blueprint = Blueprint('status_api', __name__)
@blueprint.route('/api/2/status', methods=['GET'])
def status():
require(request.authz.logged_in)
status = get_active_collection_status()
active_collections = status.pop('datasets')
active_foreign_ids = set(active_collections.keys())
collections = request.authz.collections(request.authz.READ)
for collection_id in collections:
resolver.queue(request, Collection, collection_id)
resolver.resolve(request)
results = []
for collection_id in collections:
data = resolver.get(request, Collection, collection_id)
if data is None:
continue
fid = data['foreign_id']
if fid in active_foreign_ids:
result = active_collections[fid]
result['collection'] = data
result['id'] = fid
results.append(result)
status['results'] = results
return jsonify(status)
|
import logging
from flask import Blueprint, request
from aleph.model import Collection
from aleph.queues import get_active_collection_status
from aleph.views.util import jsonify
from aleph.views.util import require
log = logging.getLogger(__name__)
blueprint = Blueprint('status_api', __name__)
@blueprint.route('/api/2/status', methods=['GET'])
def status():
require(request.authz.logged_in)
status = get_active_collection_status()
active_collections = status.pop('datasets')
active_foreign_ids = set(active_collections.keys())
collections = request.authz.collections(request.authz.READ)
results = []
for fid in active_foreign_ids:
collection = Collection.by_foreign_id(fid)
if collection is None:
continue
if collection.id in collections:
result = active_collections[fid]
result['collection'] = collection.to_dict()
result['id'] = fid
results.append(result)
status['results'] = results
return jsonify(status)
|
Load only the active collections instead of all accessible collections
|
Load only the active collections instead of all accessible collections
|
Python
|
mit
|
alephdata/aleph,alephdata/aleph,pudo/aleph,alephdata/aleph,pudo/aleph,pudo/aleph,alephdata/aleph,alephdata/aleph
|
import logging
from flask import Blueprint, request
from aleph.model import Collection
from aleph.queues import get_active_collection_status
from aleph.views.util import jsonify
- from aleph.logic import resolver
from aleph.views.util import require
log = logging.getLogger(__name__)
blueprint = Blueprint('status_api', __name__)
@blueprint.route('/api/2/status', methods=['GET'])
def status():
require(request.authz.logged_in)
status = get_active_collection_status()
active_collections = status.pop('datasets')
active_foreign_ids = set(active_collections.keys())
collections = request.authz.collections(request.authz.READ)
- for collection_id in collections:
- resolver.queue(request, Collection, collection_id)
- resolver.resolve(request)
results = []
- for collection_id in collections:
- data = resolver.get(request, Collection, collection_id)
+ for fid in active_foreign_ids:
+ collection = Collection.by_foreign_id(fid)
- if data is None:
+ if collection is None:
continue
+ if collection.id in collections:
- fid = data['foreign_id']
- if fid in active_foreign_ids:
result = active_collections[fid]
- result['collection'] = data
+ result['collection'] = collection.to_dict()
result['id'] = fid
results.append(result)
status['results'] = results
return jsonify(status)
|
Load only the active collections instead of all accessible collections
|
## Code Before:
import logging
from flask import Blueprint, request
from aleph.model import Collection
from aleph.queues import get_active_collection_status
from aleph.views.util import jsonify
from aleph.logic import resolver
from aleph.views.util import require
log = logging.getLogger(__name__)
blueprint = Blueprint('status_api', __name__)
@blueprint.route('/api/2/status', methods=['GET'])
def status():
require(request.authz.logged_in)
status = get_active_collection_status()
active_collections = status.pop('datasets')
active_foreign_ids = set(active_collections.keys())
collections = request.authz.collections(request.authz.READ)
for collection_id in collections:
resolver.queue(request, Collection, collection_id)
resolver.resolve(request)
results = []
for collection_id in collections:
data = resolver.get(request, Collection, collection_id)
if data is None:
continue
fid = data['foreign_id']
if fid in active_foreign_ids:
result = active_collections[fid]
result['collection'] = data
result['id'] = fid
results.append(result)
status['results'] = results
return jsonify(status)
## Instruction:
Load only the active collections instead of all accessible collections
## Code After:
import logging
from flask import Blueprint, request
from aleph.model import Collection
from aleph.queues import get_active_collection_status
from aleph.views.util import jsonify
from aleph.views.util import require
log = logging.getLogger(__name__)
blueprint = Blueprint('status_api', __name__)
@blueprint.route('/api/2/status', methods=['GET'])
def status():
require(request.authz.logged_in)
status = get_active_collection_status()
active_collections = status.pop('datasets')
active_foreign_ids = set(active_collections.keys())
collections = request.authz.collections(request.authz.READ)
results = []
for fid in active_foreign_ids:
collection = Collection.by_foreign_id(fid)
if collection is None:
continue
if collection.id in collections:
result = active_collections[fid]
result['collection'] = collection.to_dict()
result['id'] = fid
results.append(result)
status['results'] = results
return jsonify(status)
|
// ... existing code ...
from aleph.model import Collection
from aleph.queues import get_active_collection_status
from aleph.views.util import jsonify
from aleph.views.util import require
// ... modified code ...
active_collections = status.pop('datasets')
active_foreign_ids = set(active_collections.keys())
collections = request.authz.collections(request.authz.READ)
results = []
for fid in active_foreign_ids:
collection = Collection.by_foreign_id(fid)
if collection is None:
continue
if collection.id in collections:
result = active_collections[fid]
result['collection'] = collection.to_dict()
result['id'] = fid
results.append(result)
status['results'] = results
// ... rest of the code ...
|
ff9d6bc72673843fcdf6f7e0d866beec5bdb45f0
|
mezzanine/accounts/models.py
|
mezzanine/accounts/models.py
|
from django.db.models.signals import post_save
from django.dispatch import receiver
from mezzanine.utils.models import get_user_model
from mezzanine.accounts import get_profile_model, get_profile_user_fieldname
# Signal for ensuring users have a profile instance.
Profile = get_profile_model()
User = get_user_model()
if Profile:
user_field = get_profile_user_fieldname()
@receiver(post_save, sender=User)
def user_saved(sender=None, instance=None, **kwargs):
Profile.objects.get_or_create(**{str(user_field): instance})
|
from django.db import connection
from django.db.models.signals import post_save
from django.db.utils import DatabaseError
from django.dispatch import receiver
from mezzanine.utils.models import get_user_model
from mezzanine.accounts import get_profile_model, get_profile_user_fieldname
# Signal for ensuring users have a profile instance.
Profile = get_profile_model()
User = get_user_model()
if Profile:
user_field = get_profile_user_fieldname()
@receiver(post_save, sender=User)
def user_saved(sender=None, instance=None, **kwargs):
try:
Profile.objects.get_or_create(**{str(user_field): instance})
except DatabaseError:
# User creation in initial syncdb may have been triggered,
# while profile model is under migration management and
# doesn't exist yet. We close the connection so that it
# gets re-opened, allowing syncdb to continue and complete.
connection.close()
|
Allow initial user creation in syncdb when a profile model is managed by migrations and doesn't yet exist.
|
Allow initial user creation in syncdb when a profile model is managed by migrations and doesn't yet exist.
|
Python
|
bsd-2-clause
|
jjz/mezzanine,cccs-web/mezzanine,stephenmcd/mezzanine,Kniyl/mezzanine,gradel/mezzanine,mush42/mezzanine,dekomote/mezzanine-modeltranslation-backport,eino-makitalo/mezzanine,dsanders11/mezzanine,cccs-web/mezzanine,christianwgd/mezzanine,scarcry/snm-mezzanine,theclanks/mezzanine,webounty/mezzanine,dovydas/mezzanine,nikolas/mezzanine,douglaskastle/mezzanine,readevalprint/mezzanine,dovydas/mezzanine,emile2016/mezzanine,ZeroXn/mezzanine,vladir/mezzanine,damnfine/mezzanine,vladir/mezzanine,damnfine/mezzanine,stbarnabas/mezzanine,frankchin/mezzanine,stephenmcd/mezzanine,saintbird/mezzanine,Cajoline/mezzanine,Kniyl/mezzanine,wyzex/mezzanine,jjz/mezzanine,readevalprint/mezzanine,viaregio/mezzanine,jerivas/mezzanine,promil23/mezzanine,jerivas/mezzanine,spookylukey/mezzanine,viaregio/mezzanine,nikolas/mezzanine,douglaskastle/mezzanine,Kniyl/mezzanine,sjuxax/mezzanine,webounty/mezzanine,promil23/mezzanine,PegasusWang/mezzanine,biomassives/mezzanine,scarcry/snm-mezzanine,PegasusWang/mezzanine,SoLoHiC/mezzanine,nikolas/mezzanine,agepoly/mezzanine,fusionbox/mezzanine,frankier/mezzanine,PegasusWang/mezzanine,fusionbox/mezzanine,geodesign/mezzanine,sjdines/mezzanine,batpad/mezzanine,AlexHill/mezzanine,scarcry/snm-mezzanine,molokov/mezzanine,viaregio/mezzanine,dekomote/mezzanine-modeltranslation-backport,wbtuomela/mezzanine,promil23/mezzanine,wbtuomela/mezzanine,eino-makitalo/mezzanine,wyzex/mezzanine,saintbird/mezzanine,stephenmcd/mezzanine,joshcartme/mezzanine,gradel/mezzanine,wyzex/mezzanine,adrian-the-git/mezzanine,emile2016/mezzanine,geodesign/mezzanine,frankier/mezzanine,dovydas/mezzanine,industrydive/mezzanine,emile2016/mezzanine,stbarnabas/mezzanine,agepoly/mezzanine,SoLoHiC/mezzanine,readevalprint/mezzanine,SoLoHiC/mezzanine,dustinrb/mezzanine,biomassives/mezzanine,christianwgd/mezzanine,tuxinhang1989/mezzanine,theclanks/mezzanine,sjdines/mezzanine,tuxinhang1989/mezzanine,webounty/mezzanine,Cajoline/mezzanine,dsanders11/mezzanine,jjz/mezzanine,Skytorn86/mezzanine,ryneeverett/mezzanine,joshcartme/mezzanine,Skytorn86/mezzanine,biomassives/mezzanine,agepoly/mezzanine,spookylukey/mezzanine,ZeroXn/mezzanine,spookylukey/mezzanine,industrydive/mezzanine,joshcartme/mezzanine,eino-makitalo/mezzanine,mush42/mezzanine,christianwgd/mezzanine,ryneeverett/mezzanine,jerivas/mezzanine,dustinrb/mezzanine,theclanks/mezzanine,sjdines/mezzanine,vladir/mezzanine,Cicero-Zhao/mezzanine,adrian-the-git/mezzanine,Cicero-Zhao/mezzanine,industrydive/mezzanine,gradel/mezzanine,dekomote/mezzanine-modeltranslation-backport,sjuxax/mezzanine,douglaskastle/mezzanine,adrian-the-git/mezzanine,AlexHill/mezzanine,damnfine/mezzanine,Cajoline/mezzanine,frankchin/mezzanine,molokov/mezzanine,dsanders11/mezzanine,ryneeverett/mezzanine,tuxinhang1989/mezzanine,wbtuomela/mezzanine,ZeroXn/mezzanine,Skytorn86/mezzanine,batpad/mezzanine,mush42/mezzanine,frankchin/mezzanine,saintbird/mezzanine,sjuxax/mezzanine,frankier/mezzanine,molokov/mezzanine,geodesign/mezzanine,dustinrb/mezzanine
|
+
+ from django.db import connection
from django.db.models.signals import post_save
+ from django.db.utils import DatabaseError
from django.dispatch import receiver
from mezzanine.utils.models import get_user_model
from mezzanine.accounts import get_profile_model, get_profile_user_fieldname
# Signal for ensuring users have a profile instance.
Profile = get_profile_model()
User = get_user_model()
if Profile:
user_field = get_profile_user_fieldname()
@receiver(post_save, sender=User)
def user_saved(sender=None, instance=None, **kwargs):
+ try:
- Profile.objects.get_or_create(**{str(user_field): instance})
+ Profile.objects.get_or_create(**{str(user_field): instance})
+ except DatabaseError:
+ # User creation in initial syncdb may have been triggered,
+ # while profile model is under migration management and
+ # doesn't exist yet. We close the connection so that it
+ # gets re-opened, allowing syncdb to continue and complete.
+ connection.close()
|
Allow initial user creation in syncdb when a profile model is managed by migrations and doesn't yet exist.
|
## Code Before:
from django.db.models.signals import post_save
from django.dispatch import receiver
from mezzanine.utils.models import get_user_model
from mezzanine.accounts import get_profile_model, get_profile_user_fieldname
# Signal for ensuring users have a profile instance.
Profile = get_profile_model()
User = get_user_model()
if Profile:
user_field = get_profile_user_fieldname()
@receiver(post_save, sender=User)
def user_saved(sender=None, instance=None, **kwargs):
Profile.objects.get_or_create(**{str(user_field): instance})
## Instruction:
Allow initial user creation in syncdb when a profile model is managed by migrations and doesn't yet exist.
## Code After:
from django.db import connection
from django.db.models.signals import post_save
from django.db.utils import DatabaseError
from django.dispatch import receiver
from mezzanine.utils.models import get_user_model
from mezzanine.accounts import get_profile_model, get_profile_user_fieldname
# Signal for ensuring users have a profile instance.
Profile = get_profile_model()
User = get_user_model()
if Profile:
user_field = get_profile_user_fieldname()
@receiver(post_save, sender=User)
def user_saved(sender=None, instance=None, **kwargs):
try:
Profile.objects.get_or_create(**{str(user_field): instance})
except DatabaseError:
# User creation in initial syncdb may have been triggered,
# while profile model is under migration management and
# doesn't exist yet. We close the connection so that it
# gets re-opened, allowing syncdb to continue and complete.
connection.close()
|
// ... existing code ...
from django.db import connection
from django.db.models.signals import post_save
from django.db.utils import DatabaseError
from django.dispatch import receiver
from mezzanine.utils.models import get_user_model
// ... modified code ...
@receiver(post_save, sender=User)
def user_saved(sender=None, instance=None, **kwargs):
try:
Profile.objects.get_or_create(**{str(user_field): instance})
except DatabaseError:
# User creation in initial syncdb may have been triggered,
# while profile model is under migration management and
# doesn't exist yet. We close the connection so that it
# gets re-opened, allowing syncdb to continue and complete.
connection.close()
// ... rest of the code ...
|
c6cb543f35356769dcc0f7fedb099a160e267473
|
run_tests.py
|
run_tests.py
|
import os, sys, re, shutil
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
# Use psycopg2cffi for PyPy
try:
import psycopg2 # noqa
except ImportError:
# Fall back to psycopg2cffi
from psycopg2cffi import compat
compat.register()
# Set up Django
import django
from django.core.management import call_command
django.setup()
# Derive test names
names = next((a for a in sys.argv[1:] if not a.startswith('-')), None)
if not names:
names = 'tests'
elif re.search(r'^\d+', names):
names = 'tests.tests.IssueTests.test_' + names
elif not names.startswith('tests.'):
names = 'tests.tests.' + names
# NOTE: we create migrations each time since they depend on type of database,
# python and django versions
try:
shutil.rmtree('tests/migrations', True)
call_command('makemigrations', 'tests', verbosity=2 if '-v' in sys.argv else 0)
call_command('test', names, failfast='-x' in sys.argv, verbosity=2 if '-v' in sys.argv else 1)
finally:
shutil.rmtree('tests/migrations')
|
import os, sys, re, shutil
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
# Use psycopg2cffi for PyPy
try:
import psycopg2 # noqa
except ImportError:
# Fall back to psycopg2cffi
try:
from psycopg2cffi import compat
compat.register()
except ImportError:
# Hope we are not testing against PostgreSQL :)
pass
# Set up Django
import django
from django.core.management import call_command
django.setup()
# Derive test names
names = next((a for a in sys.argv[1:] if not a.startswith('-')), None)
if not names:
names = 'tests'
elif re.search(r'^\d+', names):
names = 'tests.tests.IssueTests.test_' + names
elif not names.startswith('tests.'):
names = 'tests.tests.' + names
# NOTE: we create migrations each time since they depend on type of database,
# python and django versions
try:
shutil.rmtree('tests/migrations', True)
call_command('makemigrations', 'tests', verbosity=2 if '-v' in sys.argv else 0)
call_command('test', names, failfast='-x' in sys.argv, verbosity=2 if '-v' in sys.argv else 1)
finally:
shutil.rmtree('tests/migrations')
|
Stop requiring psycopg2 to run tests
|
Stop requiring psycopg2 to run tests
|
Python
|
bsd-3-clause
|
LPgenerator/django-cacheops,Suor/django-cacheops
|
import os, sys, re, shutil
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
# Use psycopg2cffi for PyPy
try:
import psycopg2 # noqa
except ImportError:
# Fall back to psycopg2cffi
+ try:
- from psycopg2cffi import compat
+ from psycopg2cffi import compat
- compat.register()
+ compat.register()
+ except ImportError:
+ # Hope we are not testing against PostgreSQL :)
+ pass
# Set up Django
import django
from django.core.management import call_command
django.setup()
# Derive test names
names = next((a for a in sys.argv[1:] if not a.startswith('-')), None)
if not names:
names = 'tests'
elif re.search(r'^\d+', names):
names = 'tests.tests.IssueTests.test_' + names
elif not names.startswith('tests.'):
names = 'tests.tests.' + names
# NOTE: we create migrations each time since they depend on type of database,
# python and django versions
try:
shutil.rmtree('tests/migrations', True)
call_command('makemigrations', 'tests', verbosity=2 if '-v' in sys.argv else 0)
call_command('test', names, failfast='-x' in sys.argv, verbosity=2 if '-v' in sys.argv else 1)
finally:
shutil.rmtree('tests/migrations')
|
Stop requiring psycopg2 to run tests
|
## Code Before:
import os, sys, re, shutil
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
# Use psycopg2cffi for PyPy
try:
import psycopg2 # noqa
except ImportError:
# Fall back to psycopg2cffi
from psycopg2cffi import compat
compat.register()
# Set up Django
import django
from django.core.management import call_command
django.setup()
# Derive test names
names = next((a for a in sys.argv[1:] if not a.startswith('-')), None)
if not names:
names = 'tests'
elif re.search(r'^\d+', names):
names = 'tests.tests.IssueTests.test_' + names
elif not names.startswith('tests.'):
names = 'tests.tests.' + names
# NOTE: we create migrations each time since they depend on type of database,
# python and django versions
try:
shutil.rmtree('tests/migrations', True)
call_command('makemigrations', 'tests', verbosity=2 if '-v' in sys.argv else 0)
call_command('test', names, failfast='-x' in sys.argv, verbosity=2 if '-v' in sys.argv else 1)
finally:
shutil.rmtree('tests/migrations')
## Instruction:
Stop requiring psycopg2 to run tests
## Code After:
import os, sys, re, shutil
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
# Use psycopg2cffi for PyPy
try:
import psycopg2 # noqa
except ImportError:
# Fall back to psycopg2cffi
try:
from psycopg2cffi import compat
compat.register()
except ImportError:
# Hope we are not testing against PostgreSQL :)
pass
# Set up Django
import django
from django.core.management import call_command
django.setup()
# Derive test names
names = next((a for a in sys.argv[1:] if not a.startswith('-')), None)
if not names:
names = 'tests'
elif re.search(r'^\d+', names):
names = 'tests.tests.IssueTests.test_' + names
elif not names.startswith('tests.'):
names = 'tests.tests.' + names
# NOTE: we create migrations each time since they depend on type of database,
# python and django versions
try:
shutil.rmtree('tests/migrations', True)
call_command('makemigrations', 'tests', verbosity=2 if '-v' in sys.argv else 0)
call_command('test', names, failfast='-x' in sys.argv, verbosity=2 if '-v' in sys.argv else 1)
finally:
shutil.rmtree('tests/migrations')
|
# ... existing code ...
import psycopg2 # noqa
except ImportError:
# Fall back to psycopg2cffi
try:
from psycopg2cffi import compat
compat.register()
except ImportError:
# Hope we are not testing against PostgreSQL :)
pass
# Set up Django
# ... rest of the code ...
|
824c8cd3eb563de60ddf13fac1f7ca1341aa01f1
|
astral/api/tests/test_streams.py
|
astral/api/tests/test_streams.py
|
from tornado.httpclient import HTTPRequest
from nose.tools import eq_, ok_
import json
import faker
from astral.api.tests import BaseTest
from astral.models import Stream
from astral.models.tests.factories import StreamFactory
class StreamsHandlerTest(BaseTest):
def test_get_streams(self):
[StreamFactory() for _ in range(3)]
response = self.fetch('/streams')
eq_(response.code, 200)
result = json.loads(response.body)
ok_('streams' in result)
for stream in result['streams']:
ok_(Stream.get_by(name=stream['name']))
def test_create_stream(self):
data = {'name': faker.lorem.sentence()}
eq_(Stream.get_by(name=data['name']), None)
self.http_client.fetch(HTTPRequest(
self.get_url('/streams'), 'POST', body=json.dumps(data)), self.stop)
response = self.wait()
eq_(response.code, 200)
ok_(Stream.get_by(name=data['name']))
|
from tornado.httpclient import HTTPRequest
from nose.tools import eq_, ok_
import json
import faker
from astral.api.tests import BaseTest
from astral.models import Stream
from astral.models.tests.factories import StreamFactory
class StreamsHandlerTest(BaseTest):
def test_get_streams(self):
[StreamFactory() for _ in range(3)]
response = self.fetch('/streams')
eq_(response.code, 200)
result = json.loads(response.body)
ok_('streams' in result)
for stream in result['streams']:
ok_(Stream.get_by(name=stream['name']))
def test_create_stream(self):
data = {'name': faker.lorem.sentence()}
eq_(Stream.get_by(name=data['name']), None)
self.http_client.fetch(HTTPRequest(
self.get_url('/streams'), 'POST', body=json.dumps(data),
follow_redirects=False), self.stop)
response = self.wait()
eq_(response.code, 302)
ok_(Stream.get_by(name=data['name']))
|
Update tests for new redirect-after-create stream.
|
Update tests for new redirect-after-create stream.
|
Python
|
mit
|
peplin/astral
|
from tornado.httpclient import HTTPRequest
from nose.tools import eq_, ok_
import json
import faker
from astral.api.tests import BaseTest
from astral.models import Stream
from astral.models.tests.factories import StreamFactory
class StreamsHandlerTest(BaseTest):
def test_get_streams(self):
[StreamFactory() for _ in range(3)]
response = self.fetch('/streams')
eq_(response.code, 200)
result = json.loads(response.body)
ok_('streams' in result)
for stream in result['streams']:
ok_(Stream.get_by(name=stream['name']))
def test_create_stream(self):
data = {'name': faker.lorem.sentence()}
eq_(Stream.get_by(name=data['name']), None)
self.http_client.fetch(HTTPRequest(
- self.get_url('/streams'), 'POST', body=json.dumps(data)), self.stop)
+ self.get_url('/streams'), 'POST', body=json.dumps(data),
+ follow_redirects=False), self.stop)
response = self.wait()
- eq_(response.code, 200)
+ eq_(response.code, 302)
ok_(Stream.get_by(name=data['name']))
|
Update tests for new redirect-after-create stream.
|
## Code Before:
from tornado.httpclient import HTTPRequest
from nose.tools import eq_, ok_
import json
import faker
from astral.api.tests import BaseTest
from astral.models import Stream
from astral.models.tests.factories import StreamFactory
class StreamsHandlerTest(BaseTest):
def test_get_streams(self):
[StreamFactory() for _ in range(3)]
response = self.fetch('/streams')
eq_(response.code, 200)
result = json.loads(response.body)
ok_('streams' in result)
for stream in result['streams']:
ok_(Stream.get_by(name=stream['name']))
def test_create_stream(self):
data = {'name': faker.lorem.sentence()}
eq_(Stream.get_by(name=data['name']), None)
self.http_client.fetch(HTTPRequest(
self.get_url('/streams'), 'POST', body=json.dumps(data)), self.stop)
response = self.wait()
eq_(response.code, 200)
ok_(Stream.get_by(name=data['name']))
## Instruction:
Update tests for new redirect-after-create stream.
## Code After:
from tornado.httpclient import HTTPRequest
from nose.tools import eq_, ok_
import json
import faker
from astral.api.tests import BaseTest
from astral.models import Stream
from astral.models.tests.factories import StreamFactory
class StreamsHandlerTest(BaseTest):
def test_get_streams(self):
[StreamFactory() for _ in range(3)]
response = self.fetch('/streams')
eq_(response.code, 200)
result = json.loads(response.body)
ok_('streams' in result)
for stream in result['streams']:
ok_(Stream.get_by(name=stream['name']))
def test_create_stream(self):
data = {'name': faker.lorem.sentence()}
eq_(Stream.get_by(name=data['name']), None)
self.http_client.fetch(HTTPRequest(
self.get_url('/streams'), 'POST', body=json.dumps(data),
follow_redirects=False), self.stop)
response = self.wait()
eq_(response.code, 302)
ok_(Stream.get_by(name=data['name']))
|
# ... existing code ...
data = {'name': faker.lorem.sentence()}
eq_(Stream.get_by(name=data['name']), None)
self.http_client.fetch(HTTPRequest(
self.get_url('/streams'), 'POST', body=json.dumps(data),
follow_redirects=False), self.stop)
response = self.wait()
eq_(response.code, 302)
ok_(Stream.get_by(name=data['name']))
# ... rest of the code ...
|
ba4589e727a49486134e0cceab842510be9661f4
|
mobile_app_connector/models/privacy_statement.py
|
mobile_app_connector/models/privacy_statement.py
|
from odoo import models, fields
class PrivacyStatementAgreement(models.Model):
_inherit = 'privacy.statement.agreement'
origin_signature = fields.Selection(
selection_add=[('mobile_app', 'Mobile App Registration')])
def mobile_get_privacy_notice(self, language, **params):
return {'PrivacyNotice': self.env['compassion.privacy.statement']
.with_context(lang=language)
.sudo().search([], limit=1).text}
|
from ..controllers.mobile_app_controller import _get_lang
from odoo import models, fields
class PrivacyStatementAgreement(models.Model):
_inherit = 'privacy.statement.agreement'
origin_signature = fields.Selection(
selection_add=[('mobile_app', 'Mobile App Registration')])
def mobile_get_privacy_notice(self, **params):
lang = _get_lang(self, params)
return {'PrivacyNotice': self.env['compassion.privacy.statement']
.with_context(lang=lang)
.sudo().search([], limit=1).text}
|
FIX language of privacy statement
|
FIX language of privacy statement
|
Python
|
agpl-3.0
|
eicher31/compassion-modules,ecino/compassion-modules,CompassionCH/compassion-modules,ecino/compassion-modules,ecino/compassion-modules,CompassionCH/compassion-modules,CompassionCH/compassion-modules,ecino/compassion-modules,eicher31/compassion-modules,eicher31/compassion-modules,ecino/compassion-modules,CompassionCH/compassion-modules,eicher31/compassion-modules,eicher31/compassion-modules
|
-
+ from ..controllers.mobile_app_controller import _get_lang
from odoo import models, fields
class PrivacyStatementAgreement(models.Model):
_inherit = 'privacy.statement.agreement'
origin_signature = fields.Selection(
selection_add=[('mobile_app', 'Mobile App Registration')])
- def mobile_get_privacy_notice(self, language, **params):
+ def mobile_get_privacy_notice(self, **params):
+ lang = _get_lang(self, params)
return {'PrivacyNotice': self.env['compassion.privacy.statement']
- .with_context(lang=language)
+ .with_context(lang=lang)
.sudo().search([], limit=1).text}
|
FIX language of privacy statement
|
## Code Before:
from odoo import models, fields
class PrivacyStatementAgreement(models.Model):
_inherit = 'privacy.statement.agreement'
origin_signature = fields.Selection(
selection_add=[('mobile_app', 'Mobile App Registration')])
def mobile_get_privacy_notice(self, language, **params):
return {'PrivacyNotice': self.env['compassion.privacy.statement']
.with_context(lang=language)
.sudo().search([], limit=1).text}
## Instruction:
FIX language of privacy statement
## Code After:
from ..controllers.mobile_app_controller import _get_lang
from odoo import models, fields
class PrivacyStatementAgreement(models.Model):
_inherit = 'privacy.statement.agreement'
origin_signature = fields.Selection(
selection_add=[('mobile_app', 'Mobile App Registration')])
def mobile_get_privacy_notice(self, **params):
lang = _get_lang(self, params)
return {'PrivacyNotice': self.env['compassion.privacy.statement']
.with_context(lang=lang)
.sudo().search([], limit=1).text}
|
# ... existing code ...
from ..controllers.mobile_app_controller import _get_lang
from odoo import models, fields
# ... modified code ...
origin_signature = fields.Selection(
selection_add=[('mobile_app', 'Mobile App Registration')])
def mobile_get_privacy_notice(self, **params):
lang = _get_lang(self, params)
return {'PrivacyNotice': self.env['compassion.privacy.statement']
.with_context(lang=lang)
.sudo().search([], limit=1).text}
# ... rest of the code ...
|
9df2bae691e8613794be3713194db2420fc75385
|
gapipy/resources/dossier/transport_dossier.py
|
gapipy/resources/dossier/transport_dossier.py
|
from __future__ import unicode_literals
from ..base import Resource
from .details import DossierDetail, DossierDetailsMixin
from .dossier_features import DossierFeature
class TransportDossier(Resource, DossierDetailsMixin):
_resource_name = 'transport_dossiers'
_as_is_fields = [
'id', 'href', 'features', 'capacity', 'private', 'name',
'dossier_segment',
]
_model_collection_fields = [
('details', DossierDetail),
('features', DossierFeature),
]
_date_time_fields_local = ['date_created', 'date_last_modified']
|
from __future__ import unicode_literals
from ..base import Resource
from .details import DossierDetail, DossierDetailsMixin
from .dossier_features import DossierFeature
class TransportDossier(Resource, DossierDetailsMixin):
_resource_name = 'transport_dossiers'
_as_is_fields = [
'id', 'href', 'capacity', 'private', 'name',
'dossier_segment',
]
_model_collection_fields = [
('details', DossierDetail),
('features', DossierFeature),
]
_date_time_fields_local = ['date_created', 'date_last_modified']
|
Remove features from as-is fields on TransportDossier
|
Remove features from as-is fields on TransportDossier
Reflected as a model_collection_field
|
Python
|
mit
|
gadventures/gapipy
|
from __future__ import unicode_literals
from ..base import Resource
from .details import DossierDetail, DossierDetailsMixin
from .dossier_features import DossierFeature
class TransportDossier(Resource, DossierDetailsMixin):
_resource_name = 'transport_dossiers'
_as_is_fields = [
- 'id', 'href', 'features', 'capacity', 'private', 'name',
+ 'id', 'href', 'capacity', 'private', 'name',
'dossier_segment',
]
_model_collection_fields = [
('details', DossierDetail),
('features', DossierFeature),
]
_date_time_fields_local = ['date_created', 'date_last_modified']
|
Remove features from as-is fields on TransportDossier
|
## Code Before:
from __future__ import unicode_literals
from ..base import Resource
from .details import DossierDetail, DossierDetailsMixin
from .dossier_features import DossierFeature
class TransportDossier(Resource, DossierDetailsMixin):
_resource_name = 'transport_dossiers'
_as_is_fields = [
'id', 'href', 'features', 'capacity', 'private', 'name',
'dossier_segment',
]
_model_collection_fields = [
('details', DossierDetail),
('features', DossierFeature),
]
_date_time_fields_local = ['date_created', 'date_last_modified']
## Instruction:
Remove features from as-is fields on TransportDossier
## Code After:
from __future__ import unicode_literals
from ..base import Resource
from .details import DossierDetail, DossierDetailsMixin
from .dossier_features import DossierFeature
class TransportDossier(Resource, DossierDetailsMixin):
_resource_name = 'transport_dossiers'
_as_is_fields = [
'id', 'href', 'capacity', 'private', 'name',
'dossier_segment',
]
_model_collection_fields = [
('details', DossierDetail),
('features', DossierFeature),
]
_date_time_fields_local = ['date_created', 'date_last_modified']
|
...
_resource_name = 'transport_dossiers'
_as_is_fields = [
'id', 'href', 'capacity', 'private', 'name',
'dossier_segment',
]
...
|
091c125f42463b372f0c2c99124578eb8fe13150
|
2019/aoc2019/day08.py
|
2019/aoc2019/day08.py
|
from collections import Counter
from typing import Iterable, TextIO
import numpy # type: ignore
def parse_layers(width: int, height: int, data: TextIO) -> Iterable[numpy.array]:
chunk_size = width * height
content = next(data).strip()
for pos in range(0, len(content), chunk_size):
yield numpy.array([int(c) for c in content[pos:pos + chunk_size]])
def part1(data: TextIO) -> int:
best_layer: Counter[int] = min((Counter(layer) for layer in parse_layers(25, 6, data)), key=lambda c: c[0])
return best_layer[1] * best_layer[2]
def format_row(row: Iterable[int]) -> str:
return ''.join('#' if p == 1 else ' ' for p in row)
def part2(data: TextIO) -> str:
layers = list(parse_layers(25, 6, data))
background = numpy.zeros(25 * 6, numpy.int8)
for layer in reversed(layers):
background[layer != 2] = layer[layer != 2]
return '\n'.join(format_row(row) for row in background.reshape(6, 25))
|
from collections import Counter
from typing import Iterable, TextIO
import numpy # type: ignore
def parse_layers(width: int, height: int, data: TextIO) -> Iterable[numpy.array]:
chunk_size = width * height
content = next(data).strip()
for pos in range(0, len(content), chunk_size):
yield numpy.array([int(c) for c in content[pos:pos + chunk_size]])
def part1(data: TextIO) -> int:
best_layer: Counter[int] = min((Counter(layer) for layer in parse_layers(25, 6, data)), key=lambda c: c[0])
return best_layer[1] * best_layer[2]
def format_row(row: Iterable[int]) -> str:
return ''.join('#' if p == 1 else ' ' for p in row)
def part2(data: TextIO) -> str:
background = numpy.zeros(25 * 6, numpy.int8)
background.fill(2)
for layer in parse_layers(25, 6, data):
mask = background == 2
background[mask] = layer[mask]
return '\n'.join(format_row(row) for row in background.reshape(6, 25))
|
Fix day 8 to paint front-to-back
|
Fix day 8 to paint front-to-back
|
Python
|
mit
|
bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode
|
from collections import Counter
from typing import Iterable, TextIO
import numpy # type: ignore
def parse_layers(width: int, height: int, data: TextIO) -> Iterable[numpy.array]:
chunk_size = width * height
content = next(data).strip()
for pos in range(0, len(content), chunk_size):
yield numpy.array([int(c) for c in content[pos:pos + chunk_size]])
def part1(data: TextIO) -> int:
best_layer: Counter[int] = min((Counter(layer) for layer in parse_layers(25, 6, data)), key=lambda c: c[0])
return best_layer[1] * best_layer[2]
def format_row(row: Iterable[int]) -> str:
return ''.join('#' if p == 1 else ' ' for p in row)
def part2(data: TextIO) -> str:
- layers = list(parse_layers(25, 6, data))
background = numpy.zeros(25 * 6, numpy.int8)
+ background.fill(2)
- for layer in reversed(layers):
- background[layer != 2] = layer[layer != 2]
+ for layer in parse_layers(25, 6, data):
+ mask = background == 2
+ background[mask] = layer[mask]
return '\n'.join(format_row(row) for row in background.reshape(6, 25))
|
Fix day 8 to paint front-to-back
|
## Code Before:
from collections import Counter
from typing import Iterable, TextIO
import numpy # type: ignore
def parse_layers(width: int, height: int, data: TextIO) -> Iterable[numpy.array]:
chunk_size = width * height
content = next(data).strip()
for pos in range(0, len(content), chunk_size):
yield numpy.array([int(c) for c in content[pos:pos + chunk_size]])
def part1(data: TextIO) -> int:
best_layer: Counter[int] = min((Counter(layer) for layer in parse_layers(25, 6, data)), key=lambda c: c[0])
return best_layer[1] * best_layer[2]
def format_row(row: Iterable[int]) -> str:
return ''.join('#' if p == 1 else ' ' for p in row)
def part2(data: TextIO) -> str:
layers = list(parse_layers(25, 6, data))
background = numpy.zeros(25 * 6, numpy.int8)
for layer in reversed(layers):
background[layer != 2] = layer[layer != 2]
return '\n'.join(format_row(row) for row in background.reshape(6, 25))
## Instruction:
Fix day 8 to paint front-to-back
## Code After:
from collections import Counter
from typing import Iterable, TextIO
import numpy # type: ignore
def parse_layers(width: int, height: int, data: TextIO) -> Iterable[numpy.array]:
chunk_size = width * height
content = next(data).strip()
for pos in range(0, len(content), chunk_size):
yield numpy.array([int(c) for c in content[pos:pos + chunk_size]])
def part1(data: TextIO) -> int:
best_layer: Counter[int] = min((Counter(layer) for layer in parse_layers(25, 6, data)), key=lambda c: c[0])
return best_layer[1] * best_layer[2]
def format_row(row: Iterable[int]) -> str:
return ''.join('#' if p == 1 else ' ' for p in row)
def part2(data: TextIO) -> str:
background = numpy.zeros(25 * 6, numpy.int8)
background.fill(2)
for layer in parse_layers(25, 6, data):
mask = background == 2
background[mask] = layer[mask]
return '\n'.join(format_row(row) for row in background.reshape(6, 25))
|
...
def part2(data: TextIO) -> str:
background = numpy.zeros(25 * 6, numpy.int8)
background.fill(2)
for layer in parse_layers(25, 6, data):
mask = background == 2
background[mask] = layer[mask]
return '\n'.join(format_row(row) for row in background.reshape(6, 25))
...
|
159006e87cbbb08689284ae2534f556f66c0159b
|
alabaster/__init__.py
|
alabaster/__init__.py
|
import os
from alabaster import _version as version
def get_path():
"""
Shortcut for users whose theme is next to their conf.py.
"""
# Theme directory is defined as our parent directory
return os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
def update_context(app, pagename, templatename, context, doctree):
context['alabaster_version'] = version.__version__
def setup(app):
app.connect('html-page-context', update_context)
|
import os
from alabaster import _version as version
def get_path():
"""
Shortcut for users whose theme is next to their conf.py.
"""
# Theme directory is defined as our parent directory
return os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
def update_context(app, pagename, templatename, context, doctree):
context['alabaster_version'] = version.__version__
def setup(app):
app.connect('html-page-context', update_context)
return {'version': version.__version__,
'parallel_read_safe': True}
|
Declare extension version and parallel read safety
|
Declare extension version and parallel read safety
This is necessary for Sphinx' parallel read feature to work, since we import alabaster all the time now.
|
Python
|
bsd-3-clause
|
bgeron/alabaster,jupytercalpoly/jupyter-alabaster-theme,ellisonbg/jupyter-alabaster-theme,nikolas/alabaster,charnpreetsingh/jupyter-alabaster-theme,jupytercalpoly/jupyter-alabaster-theme,charnpreetsingh/jupyter-alabaster-theme,bgeron/alabaster,ellisonbg/jupyter-alabaster-theme,nikolas/alabaster,charnpreetsingh/jupyter-alabaster-theme,jupytercalpoly/jupyter-alabaster-theme,ellisonbg/jupyter-alabaster-theme
|
import os
from alabaster import _version as version
def get_path():
"""
Shortcut for users whose theme is next to their conf.py.
"""
# Theme directory is defined as our parent directory
return os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
def update_context(app, pagename, templatename, context, doctree):
context['alabaster_version'] = version.__version__
def setup(app):
app.connect('html-page-context', update_context)
+ return {'version': version.__version__,
+ 'parallel_read_safe': True}
|
Declare extension version and parallel read safety
|
## Code Before:
import os
from alabaster import _version as version
def get_path():
"""
Shortcut for users whose theme is next to their conf.py.
"""
# Theme directory is defined as our parent directory
return os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
def update_context(app, pagename, templatename, context, doctree):
context['alabaster_version'] = version.__version__
def setup(app):
app.connect('html-page-context', update_context)
## Instruction:
Declare extension version and parallel read safety
## Code After:
import os
from alabaster import _version as version
def get_path():
"""
Shortcut for users whose theme is next to their conf.py.
"""
# Theme directory is defined as our parent directory
return os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
def update_context(app, pagename, templatename, context, doctree):
context['alabaster_version'] = version.__version__
def setup(app):
app.connect('html-page-context', update_context)
return {'version': version.__version__,
'parallel_read_safe': True}
|
// ... existing code ...
def setup(app):
app.connect('html-page-context', update_context)
return {'version': version.__version__,
'parallel_read_safe': True}
// ... rest of the code ...
|
ee09661f7a40bcecc0dc4d378800a6725a800255
|
GPyOpt/experiment_design/latin_design.py
|
GPyOpt/experiment_design/latin_design.py
|
import numpy as np
from ..core.errors import InvalidConfigError
from .base import ExperimentDesign
from .random_design import RandomDesign
class LatinDesign(ExperimentDesign):
"""
Latin experiment design.
Uses random design for non-continuous variables, and latin hypercube for continuous ones
"""
def __init__(self, space):
if space.has_constraints():
raise InvalidConfigError('Sampling with constraints is not allowed by latin design')
super(LatinDesign, self).__init__(space)
def get_samples(self, init_points_count):
samples = np.empty((init_points_count, self.space.dimensionality))
# Use random design to fill non-continuous variables
random_design = RandomDesign(self.space)
random_design.fill_noncontinous_variables(samples)
if self.space.has_continuous():
bounds = self.space.get_continuous_bounds()
lower_bound = np.asarray(bounds)[:,0].reshape(1, len(bounds))
upper_bound = np.asarray(bounds)[:,1].reshape(1, len(bounds))
diff = upper_bound - lower_bound
from pyDOE import lhs
X_design_aux = lhs(len(self.space.get_continuous_bounds()), init_points_count, criterion='center')
I = np.ones((X_design_aux.shape[0], 1))
X_design = np.dot(I, lower_bound) + X_design_aux * np.dot(I, diff)
samples[:, self.space.get_continuous_dims()] = X_design
return samples
|
import numpy as np
from ..core.errors import InvalidConfigError
from .base import ExperimentDesign
from .random_design import RandomDesign
class LatinDesign(ExperimentDesign):
"""
Latin experiment design.
Uses random design for non-continuous variables, and latin hypercube for continuous ones
"""
def __init__(self, space):
if space.has_constraints():
raise InvalidConfigError('Sampling with constraints is not allowed by latin design')
super(LatinDesign, self).__init__(space)
def get_samples(self, init_points_count, criterion='center'):
samples = np.empty((init_points_count, self.space.dimensionality))
# Use random design to fill non-continuous variables
random_design = RandomDesign(self.space)
random_design.fill_noncontinous_variables(samples)
if self.space.has_continuous():
bounds = self.space.get_continuous_bounds()
lower_bound = np.asarray(bounds)[:,0].reshape(1, len(bounds))
upper_bound = np.asarray(bounds)[:,1].reshape(1, len(bounds))
diff = upper_bound - lower_bound
from pyDOE import lhs
X_design_aux = lhs(len(self.space.get_continuous_bounds()), init_points_count, criterion=criterion)
I = np.ones((X_design_aux.shape[0], 1))
X_design = np.dot(I, lower_bound) + X_design_aux * np.dot(I, diff)
samples[:, self.space.get_continuous_dims()] = X_design
return samples
|
Allow users to choose lhs sampling criteria
|
Allow users to choose lhs sampling criteria
|
Python
|
bsd-3-clause
|
SheffieldML/GPyOpt
|
import numpy as np
from ..core.errors import InvalidConfigError
from .base import ExperimentDesign
from .random_design import RandomDesign
class LatinDesign(ExperimentDesign):
"""
Latin experiment design.
Uses random design for non-continuous variables, and latin hypercube for continuous ones
"""
def __init__(self, space):
if space.has_constraints():
raise InvalidConfigError('Sampling with constraints is not allowed by latin design')
super(LatinDesign, self).__init__(space)
- def get_samples(self, init_points_count):
+ def get_samples(self, init_points_count, criterion='center'):
samples = np.empty((init_points_count, self.space.dimensionality))
# Use random design to fill non-continuous variables
random_design = RandomDesign(self.space)
random_design.fill_noncontinous_variables(samples)
if self.space.has_continuous():
bounds = self.space.get_continuous_bounds()
lower_bound = np.asarray(bounds)[:,0].reshape(1, len(bounds))
upper_bound = np.asarray(bounds)[:,1].reshape(1, len(bounds))
diff = upper_bound - lower_bound
from pyDOE import lhs
- X_design_aux = lhs(len(self.space.get_continuous_bounds()), init_points_count, criterion='center')
+ X_design_aux = lhs(len(self.space.get_continuous_bounds()), init_points_count, criterion=criterion)
I = np.ones((X_design_aux.shape[0], 1))
X_design = np.dot(I, lower_bound) + X_design_aux * np.dot(I, diff)
samples[:, self.space.get_continuous_dims()] = X_design
return samples
|
Allow users to choose lhs sampling criteria
|
## Code Before:
import numpy as np
from ..core.errors import InvalidConfigError
from .base import ExperimentDesign
from .random_design import RandomDesign
class LatinDesign(ExperimentDesign):
"""
Latin experiment design.
Uses random design for non-continuous variables, and latin hypercube for continuous ones
"""
def __init__(self, space):
if space.has_constraints():
raise InvalidConfigError('Sampling with constraints is not allowed by latin design')
super(LatinDesign, self).__init__(space)
def get_samples(self, init_points_count):
samples = np.empty((init_points_count, self.space.dimensionality))
# Use random design to fill non-continuous variables
random_design = RandomDesign(self.space)
random_design.fill_noncontinous_variables(samples)
if self.space.has_continuous():
bounds = self.space.get_continuous_bounds()
lower_bound = np.asarray(bounds)[:,0].reshape(1, len(bounds))
upper_bound = np.asarray(bounds)[:,1].reshape(1, len(bounds))
diff = upper_bound - lower_bound
from pyDOE import lhs
X_design_aux = lhs(len(self.space.get_continuous_bounds()), init_points_count, criterion='center')
I = np.ones((X_design_aux.shape[0], 1))
X_design = np.dot(I, lower_bound) + X_design_aux * np.dot(I, diff)
samples[:, self.space.get_continuous_dims()] = X_design
return samples
## Instruction:
Allow users to choose lhs sampling criteria
## Code After:
import numpy as np
from ..core.errors import InvalidConfigError
from .base import ExperimentDesign
from .random_design import RandomDesign
class LatinDesign(ExperimentDesign):
"""
Latin experiment design.
Uses random design for non-continuous variables, and latin hypercube for continuous ones
"""
def __init__(self, space):
if space.has_constraints():
raise InvalidConfigError('Sampling with constraints is not allowed by latin design')
super(LatinDesign, self).__init__(space)
def get_samples(self, init_points_count, criterion='center'):
samples = np.empty((init_points_count, self.space.dimensionality))
# Use random design to fill non-continuous variables
random_design = RandomDesign(self.space)
random_design.fill_noncontinous_variables(samples)
if self.space.has_continuous():
bounds = self.space.get_continuous_bounds()
lower_bound = np.asarray(bounds)[:,0].reshape(1, len(bounds))
upper_bound = np.asarray(bounds)[:,1].reshape(1, len(bounds))
diff = upper_bound - lower_bound
from pyDOE import lhs
X_design_aux = lhs(len(self.space.get_continuous_bounds()), init_points_count, criterion=criterion)
I = np.ones((X_design_aux.shape[0], 1))
X_design = np.dot(I, lower_bound) + X_design_aux * np.dot(I, diff)
samples[:, self.space.get_continuous_dims()] = X_design
return samples
|
# ... existing code ...
raise InvalidConfigError('Sampling with constraints is not allowed by latin design')
super(LatinDesign, self).__init__(space)
def get_samples(self, init_points_count, criterion='center'):
samples = np.empty((init_points_count, self.space.dimensionality))
# Use random design to fill non-continuous variables
# ... modified code ...
diff = upper_bound - lower_bound
from pyDOE import lhs
X_design_aux = lhs(len(self.space.get_continuous_bounds()), init_points_count, criterion=criterion)
I = np.ones((X_design_aux.shape[0], 1))
X_design = np.dot(I, lower_bound) + X_design_aux * np.dot(I, diff)
# ... rest of the code ...
|
7024d3b36176ec11142ee10884936ff329aece49
|
tests/test_cookiecutter_invocation.py
|
tests/test_cookiecutter_invocation.py
|
import os
import pytest
import subprocess
from cookiecutter import utils
def test_should_raise_error_without_template_arg(capfd):
with pytest.raises(subprocess.CalledProcessError):
subprocess.check_call(['python', '-m', 'cookiecutter.cli'])
_, err = capfd.readouterr()
exp_message = 'Error: Missing argument "template".'
assert exp_message in err
@pytest.fixture
def project_dir(request):
"""Remove the rendered project directory created by the test."""
rendered_dir = 'fake-project-templated'
def remove_generated_project():
if os.path.isdir(rendered_dir):
utils.rmtree(rendered_dir)
request.addfinalizer(remove_generated_project)
return rendered_dir
def test_should_invoke_main(project_dir):
subprocess.check_call([
'python',
'-m',
'cookiecutter.cli',
'tests/fake-repo-tmpl',
'--no-input'
])
assert os.path.isdir(project_dir)
|
import os
import pytest
import subprocess
import sys
from cookiecutter import utils
def test_should_raise_error_without_template_arg(capfd):
with pytest.raises(subprocess.CalledProcessError):
subprocess.check_call(['python', '-m', 'cookiecutter.cli'])
_, err = capfd.readouterr()
exp_message = 'Error: Missing argument "template".'
assert exp_message in err
@pytest.fixture
def project_dir(request):
"""Remove the rendered project directory created by the test."""
rendered_dir = 'fake-project-templated'
def remove_generated_project():
if os.path.isdir(rendered_dir):
utils.rmtree(rendered_dir)
request.addfinalizer(remove_generated_project)
return rendered_dir
def test_should_invoke_main(monkeypatch, project_dir):
monkeypatch.setenv('PYTHONPATH', '.')
subprocess.check_call([
sys.executable,
'-m',
'cookiecutter.cli',
'tests/fake-repo-tmpl',
'--no-input'
])
assert os.path.isdir(project_dir)
|
Set PYTHONPATH and use sys.executable
|
Set PYTHONPATH and use sys.executable
|
Python
|
bsd-3-clause
|
agconti/cookiecutter,stevepiercy/cookiecutter,sp1rs/cookiecutter,Vauxoo/cookiecutter,hackebrot/cookiecutter,hackebrot/cookiecutter,ramiroluz/cookiecutter,kkujawinski/cookiecutter,pjbull/cookiecutter,ramiroluz/cookiecutter,cguardia/cookiecutter,cguardia/cookiecutter,sp1rs/cookiecutter,venumech/cookiecutter,Vauxoo/cookiecutter,dajose/cookiecutter,takeflight/cookiecutter,moi65/cookiecutter,moi65/cookiecutter,audreyr/cookiecutter,michaeljoseph/cookiecutter,michaeljoseph/cookiecutter,Springerle/cookiecutter,takeflight/cookiecutter,willingc/cookiecutter,christabor/cookiecutter,terryjbates/cookiecutter,kkujawinski/cookiecutter,benthomasson/cookiecutter,audreyr/cookiecutter,atlassian/cookiecutter,terryjbates/cookiecutter,luzfcb/cookiecutter,stevepiercy/cookiecutter,luzfcb/cookiecutter,benthomasson/cookiecutter,christabor/cookiecutter,agconti/cookiecutter,atlassian/cookiecutter,willingc/cookiecutter,venumech/cookiecutter,dajose/cookiecutter,pjbull/cookiecutter,Springerle/cookiecutter
|
import os
import pytest
import subprocess
+ import sys
from cookiecutter import utils
def test_should_raise_error_without_template_arg(capfd):
with pytest.raises(subprocess.CalledProcessError):
subprocess.check_call(['python', '-m', 'cookiecutter.cli'])
_, err = capfd.readouterr()
exp_message = 'Error: Missing argument "template".'
assert exp_message in err
@pytest.fixture
def project_dir(request):
"""Remove the rendered project directory created by the test."""
rendered_dir = 'fake-project-templated'
def remove_generated_project():
if os.path.isdir(rendered_dir):
utils.rmtree(rendered_dir)
request.addfinalizer(remove_generated_project)
return rendered_dir
- def test_should_invoke_main(project_dir):
+ def test_should_invoke_main(monkeypatch, project_dir):
+ monkeypatch.setenv('PYTHONPATH', '.')
+
subprocess.check_call([
- 'python',
+ sys.executable,
'-m',
'cookiecutter.cli',
'tests/fake-repo-tmpl',
'--no-input'
])
+
assert os.path.isdir(project_dir)
|
Set PYTHONPATH and use sys.executable
|
## Code Before:
import os
import pytest
import subprocess
from cookiecutter import utils
def test_should_raise_error_without_template_arg(capfd):
with pytest.raises(subprocess.CalledProcessError):
subprocess.check_call(['python', '-m', 'cookiecutter.cli'])
_, err = capfd.readouterr()
exp_message = 'Error: Missing argument "template".'
assert exp_message in err
@pytest.fixture
def project_dir(request):
"""Remove the rendered project directory created by the test."""
rendered_dir = 'fake-project-templated'
def remove_generated_project():
if os.path.isdir(rendered_dir):
utils.rmtree(rendered_dir)
request.addfinalizer(remove_generated_project)
return rendered_dir
def test_should_invoke_main(project_dir):
subprocess.check_call([
'python',
'-m',
'cookiecutter.cli',
'tests/fake-repo-tmpl',
'--no-input'
])
assert os.path.isdir(project_dir)
## Instruction:
Set PYTHONPATH and use sys.executable
## Code After:
import os
import pytest
import subprocess
import sys
from cookiecutter import utils
def test_should_raise_error_without_template_arg(capfd):
with pytest.raises(subprocess.CalledProcessError):
subprocess.check_call(['python', '-m', 'cookiecutter.cli'])
_, err = capfd.readouterr()
exp_message = 'Error: Missing argument "template".'
assert exp_message in err
@pytest.fixture
def project_dir(request):
"""Remove the rendered project directory created by the test."""
rendered_dir = 'fake-project-templated'
def remove_generated_project():
if os.path.isdir(rendered_dir):
utils.rmtree(rendered_dir)
request.addfinalizer(remove_generated_project)
return rendered_dir
def test_should_invoke_main(monkeypatch, project_dir):
monkeypatch.setenv('PYTHONPATH', '.')
subprocess.check_call([
sys.executable,
'-m',
'cookiecutter.cli',
'tests/fake-repo-tmpl',
'--no-input'
])
assert os.path.isdir(project_dir)
|
// ... existing code ...
import os
import pytest
import subprocess
import sys
from cookiecutter import utils
// ... modified code ...
return rendered_dir
def test_should_invoke_main(monkeypatch, project_dir):
monkeypatch.setenv('PYTHONPATH', '.')
subprocess.check_call([
sys.executable,
'-m',
'cookiecutter.cli',
'tests/fake-repo-tmpl',
'--no-input'
])
assert os.path.isdir(project_dir)
// ... rest of the code ...
|
7223bf0bf3ecf3459e5e7c9f01af61a8236eaffd
|
espei/__init__.py
|
espei/__init__.py
|
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
import os
import yaml
from cerberus import Validator
MODULE_DIR = os.path.dirname(os.path.abspath(__file__))
# extension for iseven
class ESPEIValidator(Validator):
def _validate_iseven(self, iseven, field, value):
""" Test the oddity of a value.
The rule's arguments are validated against this schema:
{'type': 'boolean'}
"""
if iseven and bool(value & 1):
self._error(field, "Must be an even number")
with open(os.path.join(MODULE_DIR, 'input-schema.yaml')) as f:
schema = ESPEIValidator(yaml.load(f))
from espei.paramselect import generate_parameters
from espei.mcmc import mcmc_fit
from espei.espei_script import run_espei
|
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
import os
import yaml
from cerberus import Validator
MODULE_DIR = os.path.dirname(os.path.abspath(__file__))
# extension for iseven
class ESPEIValidator(Validator):
def _validate_iseven(self, iseven, field, value):
""" Test the oddity of a value.
The rule's arguments are validated against this schema:
{'type': 'boolean'}
"""
if iseven and bool(value & 1):
self._error(field, "Must be an even number")
with open(os.path.join(MODULE_DIR, 'input-schema.yaml')) as f:
schema = ESPEIValidator(yaml.load(f))
from espei.paramselect import generate_parameters
from espei.mcmc import mcmc_fit
from espei.espei_script import run_espei
# swallow warnings during MCMC runs
import warnings
warnings.filterwarnings('ignore', message='Mean of empty slice')
warnings.filterwarnings('ignore', message='invalid value encountered in subtract')
warnings.filterwarnings('ignore', message='invalid value encountered in greater')
|
Hide permissible NumPy warnings from users
|
ENH: Hide permissible NumPy warnings from users
|
Python
|
mit
|
PhasesResearchLab/ESPEI
|
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
import os
import yaml
from cerberus import Validator
MODULE_DIR = os.path.dirname(os.path.abspath(__file__))
# extension for iseven
class ESPEIValidator(Validator):
def _validate_iseven(self, iseven, field, value):
""" Test the oddity of a value.
The rule's arguments are validated against this schema:
{'type': 'boolean'}
"""
if iseven and bool(value & 1):
self._error(field, "Must be an even number")
with open(os.path.join(MODULE_DIR, 'input-schema.yaml')) as f:
schema = ESPEIValidator(yaml.load(f))
from espei.paramselect import generate_parameters
from espei.mcmc import mcmc_fit
from espei.espei_script import run_espei
+ # swallow warnings during MCMC runs
+ import warnings
+ warnings.filterwarnings('ignore', message='Mean of empty slice')
+ warnings.filterwarnings('ignore', message='invalid value encountered in subtract')
+ warnings.filterwarnings('ignore', message='invalid value encountered in greater')
+
|
Hide permissible NumPy warnings from users
|
## Code Before:
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
import os
import yaml
from cerberus import Validator
MODULE_DIR = os.path.dirname(os.path.abspath(__file__))
# extension for iseven
class ESPEIValidator(Validator):
def _validate_iseven(self, iseven, field, value):
""" Test the oddity of a value.
The rule's arguments are validated against this schema:
{'type': 'boolean'}
"""
if iseven and bool(value & 1):
self._error(field, "Must be an even number")
with open(os.path.join(MODULE_DIR, 'input-schema.yaml')) as f:
schema = ESPEIValidator(yaml.load(f))
from espei.paramselect import generate_parameters
from espei.mcmc import mcmc_fit
from espei.espei_script import run_espei
## Instruction:
Hide permissible NumPy warnings from users
## Code After:
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
import os
import yaml
from cerberus import Validator
MODULE_DIR = os.path.dirname(os.path.abspath(__file__))
# extension for iseven
class ESPEIValidator(Validator):
def _validate_iseven(self, iseven, field, value):
""" Test the oddity of a value.
The rule's arguments are validated against this schema:
{'type': 'boolean'}
"""
if iseven and bool(value & 1):
self._error(field, "Must be an even number")
with open(os.path.join(MODULE_DIR, 'input-schema.yaml')) as f:
schema = ESPEIValidator(yaml.load(f))
from espei.paramselect import generate_parameters
from espei.mcmc import mcmc_fit
from espei.espei_script import run_espei
# swallow warnings during MCMC runs
import warnings
warnings.filterwarnings('ignore', message='Mean of empty slice')
warnings.filterwarnings('ignore', message='invalid value encountered in subtract')
warnings.filterwarnings('ignore', message='invalid value encountered in greater')
|
# ... existing code ...
from espei.paramselect import generate_parameters
from espei.mcmc import mcmc_fit
from espei.espei_script import run_espei
# swallow warnings during MCMC runs
import warnings
warnings.filterwarnings('ignore', message='Mean of empty slice')
warnings.filterwarnings('ignore', message='invalid value encountered in subtract')
warnings.filterwarnings('ignore', message='invalid value encountered in greater')
# ... rest of the code ...
|
b24ae1320af5387e339a12dc00e214330525e549
|
src/BibleBot.Frontend/application.py
|
src/BibleBot.Frontend/application.py
|
import disnake
from disnake.ext import commands
from logger import VyLogger
import os
logger = VyLogger("default")
intents = disnake.Intents.default()
intents.message_content = True
bot = commands.AutoShardedBot(
command_prefix=commands.when_mentioned,
intents=intents,
test_guilds=[362503610006765568],
sync_commands_debug=True,
)
bot.load_extension("cogs")
bot.run(os.environ.get("DISCORD_TOKEN"))
|
import disnake
from disnake.ext import commands
from logger import VyLogger
import os
logger = VyLogger("default")
intents = disnake.Intents.default()
intents.message_content = True
bot = commands.AutoShardedBot(
command_prefix=commands.when_mentioned,
intents=intents,
)
bot.load_extension("cogs")
bot.run(os.environ.get("DISCORD_TOKEN"))
|
Move commands out of test.
|
Move commands out of test.
|
Python
|
mpl-2.0
|
BibleBot/BibleBot,BibleBot/BibleBot,BibleBot/BibleBot
|
import disnake
from disnake.ext import commands
from logger import VyLogger
import os
logger = VyLogger("default")
intents = disnake.Intents.default()
intents.message_content = True
bot = commands.AutoShardedBot(
command_prefix=commands.when_mentioned,
intents=intents,
- test_guilds=[362503610006765568],
- sync_commands_debug=True,
)
bot.load_extension("cogs")
bot.run(os.environ.get("DISCORD_TOKEN"))
|
Move commands out of test.
|
## Code Before:
import disnake
from disnake.ext import commands
from logger import VyLogger
import os
logger = VyLogger("default")
intents = disnake.Intents.default()
intents.message_content = True
bot = commands.AutoShardedBot(
command_prefix=commands.when_mentioned,
intents=intents,
test_guilds=[362503610006765568],
sync_commands_debug=True,
)
bot.load_extension("cogs")
bot.run(os.environ.get("DISCORD_TOKEN"))
## Instruction:
Move commands out of test.
## Code After:
import disnake
from disnake.ext import commands
from logger import VyLogger
import os
logger = VyLogger("default")
intents = disnake.Intents.default()
intents.message_content = True
bot = commands.AutoShardedBot(
command_prefix=commands.when_mentioned,
intents=intents,
)
bot.load_extension("cogs")
bot.run(os.environ.get("DISCORD_TOKEN"))
|
// ... existing code ...
bot = commands.AutoShardedBot(
command_prefix=commands.when_mentioned,
intents=intents,
)
bot.load_extension("cogs")
// ... rest of the code ...
|
d44fee53020470e2d9a8cd2393f5f0125dbd1fab
|
python/client.py
|
python/client.py
|
import grpc
import hello_pb2
import hello_pb2_grpc
def run():
channel = grpc.insecure_channel('localhost:50051')
stub = hello_pb2_grpc.HelloServiceStub(channel)
# ideally, you should have try catch block here too
response = stub.SayHello(hello_pb2.HelloReq(Name='Euler'))
print(response.Result)
try:
response = stub.SayHelloStrict(hello_pb2.HelloReq(
Name='Leonhard Euler'))
except grpc.RpcError as e:
# ouch!
# lets print the gRPC error message
# which is "Length of `Name` cannot be more than 10 characters"
print(e.details())
# lets access the error code, which is `INVALID_ARGUMENT`
# `type` of `status_code` is `grpc.StatusCode`
status_code = e.code()
# should print `INVALID_ARGUMENT`
print(status_code.name)
# should print `(3, 'invalid argument')`
print(status_code.value)
else:
print(response.Result)
if __name__ == '__main__':
run()
|
import grpc
import hello_pb2
import hello_pb2_grpc
def run():
channel = grpc.insecure_channel('localhost:50051')
stub = hello_pb2_grpc.HelloServiceStub(channel)
# ideally, you should have try catch block here too
response = stub.SayHello(hello_pb2.HelloReq(Name='Euler'))
print(response.Result)
try:
response = stub.SayHelloStrict(hello_pb2.HelloReq(
Name='Leonhard Euler'))
except grpc.RpcError as e:
# ouch!
# lets print the gRPC error message
# which is "Length of `Name` cannot be more than 10 characters"
print(e.details())
# lets access the error code, which is `INVALID_ARGUMENT`
# `type` of `status_code` is `grpc.StatusCode`
status_code = e.code()
# should print `INVALID_ARGUMENT`
print(status_code.name)
# should print `(3, 'invalid argument')`
print(status_code.value)
# want to do some specific action based on the error?
if grpc.StatusCode.INVALID_ARGUMENT == status_code:
# do your stuff here
pass
else:
print(response.Result)
if __name__ == '__main__':
run()
|
Update python version for better error handling
|
Update python version for better error handling
|
Python
|
mit
|
avinassh/grpc-errors,avinassh/grpc-errors,avinassh/grpc-errors,avinassh/grpc-errors,avinassh/grpc-errors,avinassh/grpc-errors,avinassh/grpc-errors,avinassh/grpc-errors
|
import grpc
import hello_pb2
import hello_pb2_grpc
def run():
channel = grpc.insecure_channel('localhost:50051')
stub = hello_pb2_grpc.HelloServiceStub(channel)
# ideally, you should have try catch block here too
response = stub.SayHello(hello_pb2.HelloReq(Name='Euler'))
print(response.Result)
try:
response = stub.SayHelloStrict(hello_pb2.HelloReq(
Name='Leonhard Euler'))
except grpc.RpcError as e:
# ouch!
# lets print the gRPC error message
# which is "Length of `Name` cannot be more than 10 characters"
print(e.details())
# lets access the error code, which is `INVALID_ARGUMENT`
# `type` of `status_code` is `grpc.StatusCode`
status_code = e.code()
# should print `INVALID_ARGUMENT`
print(status_code.name)
# should print `(3, 'invalid argument')`
print(status_code.value)
+ # want to do some specific action based on the error?
+ if grpc.StatusCode.INVALID_ARGUMENT == status_code:
+ # do your stuff here
+ pass
else:
print(response.Result)
if __name__ == '__main__':
run()
|
Update python version for better error handling
|
## Code Before:
import grpc
import hello_pb2
import hello_pb2_grpc
def run():
channel = grpc.insecure_channel('localhost:50051')
stub = hello_pb2_grpc.HelloServiceStub(channel)
# ideally, you should have try catch block here too
response = stub.SayHello(hello_pb2.HelloReq(Name='Euler'))
print(response.Result)
try:
response = stub.SayHelloStrict(hello_pb2.HelloReq(
Name='Leonhard Euler'))
except grpc.RpcError as e:
# ouch!
# lets print the gRPC error message
# which is "Length of `Name` cannot be more than 10 characters"
print(e.details())
# lets access the error code, which is `INVALID_ARGUMENT`
# `type` of `status_code` is `grpc.StatusCode`
status_code = e.code()
# should print `INVALID_ARGUMENT`
print(status_code.name)
# should print `(3, 'invalid argument')`
print(status_code.value)
else:
print(response.Result)
if __name__ == '__main__':
run()
## Instruction:
Update python version for better error handling
## Code After:
import grpc
import hello_pb2
import hello_pb2_grpc
def run():
channel = grpc.insecure_channel('localhost:50051')
stub = hello_pb2_grpc.HelloServiceStub(channel)
# ideally, you should have try catch block here too
response = stub.SayHello(hello_pb2.HelloReq(Name='Euler'))
print(response.Result)
try:
response = stub.SayHelloStrict(hello_pb2.HelloReq(
Name='Leonhard Euler'))
except grpc.RpcError as e:
# ouch!
# lets print the gRPC error message
# which is "Length of `Name` cannot be more than 10 characters"
print(e.details())
# lets access the error code, which is `INVALID_ARGUMENT`
# `type` of `status_code` is `grpc.StatusCode`
status_code = e.code()
# should print `INVALID_ARGUMENT`
print(status_code.name)
# should print `(3, 'invalid argument')`
print(status_code.value)
# want to do some specific action based on the error?
if grpc.StatusCode.INVALID_ARGUMENT == status_code:
# do your stuff here
pass
else:
print(response.Result)
if __name__ == '__main__':
run()
|
// ... existing code ...
print(status_code.name)
# should print `(3, 'invalid argument')`
print(status_code.value)
# want to do some specific action based on the error?
if grpc.StatusCode.INVALID_ARGUMENT == status_code:
# do your stuff here
pass
else:
print(response.Result)
// ... rest of the code ...
|
ac5a339f73cb80b54b0298a02bce41c27c25b9ae
|
authentication/forms.py
|
authentication/forms.py
|
from django import forms as newform
from django.forms import ModelForm
from people.models import Beneficiary, Donor
from django.contrib.auth.models import User
from django.utils.translation import ugettext_lazy as _
# from django.core.urlresolvers import reverse
# from crispy_forms.helper import FormHelper
# from crispy_forms.layout import Submit, Layout, Fieldset
class LoginForm(newform.Form):
username = newform.CharField()
password = newform.CharField(widget=newform.PasswordInput)
# def __init__(self, *args, **kwargs):
# super(LoginForm, self).__init__(*args, **kwargs)
# self.helper = FormHelper()
# self.helper.form_id = 'login-form'
# self.helper.form_class = 'form-horizontal'
# self.helper.form_method = 'post'
# self.helper.form_action = reverse('accounts:login')
# self.helper.add_input(Submit('submit', 'Login', css_class='btn btn-
# primary'))
|
from django import forms as newform
class LoginForm(newform.Form):
username = newform.CharField()
password = newform.CharField(widget=newform.PasswordInput)
|
Remove unused and commented parts
|
Remove unused and commented parts
|
Python
|
bsd-3-clause
|
agiliq/fundraiser,agiliq/fundraiser,febinstephen/django-fundrasiser-app,agiliq/fundraiser,febinstephen/django-fundrasiser-app,febinstephen/django-fundrasiser-app
|
from django import forms as newform
- from django.forms import ModelForm
- from people.models import Beneficiary, Donor
- from django.contrib.auth.models import User
- from django.utils.translation import ugettext_lazy as _
-
- # from django.core.urlresolvers import reverse
- # from crispy_forms.helper import FormHelper
- # from crispy_forms.layout import Submit, Layout, Fieldset
-
class LoginForm(newform.Form):
username = newform.CharField()
password = newform.CharField(widget=newform.PasswordInput)
- # def __init__(self, *args, **kwargs):
- # super(LoginForm, self).__init__(*args, **kwargs)
- # self.helper = FormHelper()
- # self.helper.form_id = 'login-form'
- # self.helper.form_class = 'form-horizontal'
- # self.helper.form_method = 'post'
- # self.helper.form_action = reverse('accounts:login')
- # self.helper.add_input(Submit('submit', 'Login', css_class='btn btn-
- # primary'))
-
|
Remove unused and commented parts
|
## Code Before:
from django import forms as newform
from django.forms import ModelForm
from people.models import Beneficiary, Donor
from django.contrib.auth.models import User
from django.utils.translation import ugettext_lazy as _
# from django.core.urlresolvers import reverse
# from crispy_forms.helper import FormHelper
# from crispy_forms.layout import Submit, Layout, Fieldset
class LoginForm(newform.Form):
username = newform.CharField()
password = newform.CharField(widget=newform.PasswordInput)
# def __init__(self, *args, **kwargs):
# super(LoginForm, self).__init__(*args, **kwargs)
# self.helper = FormHelper()
# self.helper.form_id = 'login-form'
# self.helper.form_class = 'form-horizontal'
# self.helper.form_method = 'post'
# self.helper.form_action = reverse('accounts:login')
# self.helper.add_input(Submit('submit', 'Login', css_class='btn btn-
# primary'))
## Instruction:
Remove unused and commented parts
## Code After:
from django import forms as newform
class LoginForm(newform.Form):
username = newform.CharField()
password = newform.CharField(widget=newform.PasswordInput)
|
# ... existing code ...
from django import forms as newform
class LoginForm(newform.Form):
username = newform.CharField()
password = newform.CharField(widget=newform.PasswordInput)
# ... rest of the code ...
|
0a78f0cc03124662871c27ae2ac8647ecac58457
|
rasa_nlu/tokenizers/spacy_tokenizer.py
|
rasa_nlu/tokenizers/spacy_tokenizer.py
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import typing
from typing import Any, List
from rasa_nlu.components import Component
from rasa_nlu.config import RasaNLUModelConfig
from rasa_nlu.tokenizers import Tokenizer, Token
from rasa_nlu.training_data import Message
from rasa_nlu.training_data import TrainingData
if typing.TYPE_CHECKING:
from spacy.tokens.doc import Doc
class SpacyTokenizer(Tokenizer, Component):
name = "tokenizer_spacy"
provides = ["tokens"]
def train(self, training_data, config, **kwargs):
# type: (TrainingData, RasaNLUModelConfig, **Any) -> None
for example in training_data.training_examples:
example.set("tokens", self.tokenize(example.get("spacy_doc")))
def process(self, message, **kwargs):
# type: (Message, **Any) -> None
message.set("tokens", self.tokenize(message.get("spacy_doc")))
def tokenize(self, doc):
# type: (Doc) -> List[Token]
return [Token(t.text, t.idx) for t in doc]
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import typing
from typing import Any, List
from rasa_nlu.components import Component
from rasa_nlu.config import RasaNLUModelConfig
from rasa_nlu.tokenizers import Tokenizer, Token
from rasa_nlu.training_data import Message
from rasa_nlu.training_data import TrainingData
if typing.TYPE_CHECKING:
from spacy.tokens.doc import Doc
class SpacyTokenizer(Tokenizer, Component):
name = "tokenizer_spacy"
provides = ["tokens"]
requires = ["spacy_doc"]
def train(self, training_data, config, **kwargs):
# type: (TrainingData, RasaNLUModelConfig, **Any) -> None
for example in training_data.training_examples:
example.set("tokens", self.tokenize(example.get("spacy_doc")))
def process(self, message, **kwargs):
# type: (Message, **Any) -> None
message.set("tokens", self.tokenize(message.get("spacy_doc")))
def tokenize(self, doc):
# type: (Doc) -> List[Token]
return [Token(t.text, t.idx) for t in doc]
|
Add missing "requires" to spacy tokenizer
|
Add missing "requires" to spacy tokenizer
|
Python
|
apache-2.0
|
RasaHQ/rasa_nlu,RasaHQ/rasa_nlu,RasaHQ/rasa_nlu
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import typing
from typing import Any, List
from rasa_nlu.components import Component
from rasa_nlu.config import RasaNLUModelConfig
from rasa_nlu.tokenizers import Tokenizer, Token
from rasa_nlu.training_data import Message
from rasa_nlu.training_data import TrainingData
if typing.TYPE_CHECKING:
from spacy.tokens.doc import Doc
class SpacyTokenizer(Tokenizer, Component):
name = "tokenizer_spacy"
provides = ["tokens"]
+ requires = ["spacy_doc"]
+
def train(self, training_data, config, **kwargs):
# type: (TrainingData, RasaNLUModelConfig, **Any) -> None
for example in training_data.training_examples:
example.set("tokens", self.tokenize(example.get("spacy_doc")))
def process(self, message, **kwargs):
# type: (Message, **Any) -> None
message.set("tokens", self.tokenize(message.get("spacy_doc")))
def tokenize(self, doc):
# type: (Doc) -> List[Token]
return [Token(t.text, t.idx) for t in doc]
|
Add missing "requires" to spacy tokenizer
|
## Code Before:
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import typing
from typing import Any, List
from rasa_nlu.components import Component
from rasa_nlu.config import RasaNLUModelConfig
from rasa_nlu.tokenizers import Tokenizer, Token
from rasa_nlu.training_data import Message
from rasa_nlu.training_data import TrainingData
if typing.TYPE_CHECKING:
from spacy.tokens.doc import Doc
class SpacyTokenizer(Tokenizer, Component):
name = "tokenizer_spacy"
provides = ["tokens"]
def train(self, training_data, config, **kwargs):
# type: (TrainingData, RasaNLUModelConfig, **Any) -> None
for example in training_data.training_examples:
example.set("tokens", self.tokenize(example.get("spacy_doc")))
def process(self, message, **kwargs):
# type: (Message, **Any) -> None
message.set("tokens", self.tokenize(message.get("spacy_doc")))
def tokenize(self, doc):
# type: (Doc) -> List[Token]
return [Token(t.text, t.idx) for t in doc]
## Instruction:
Add missing "requires" to spacy tokenizer
## Code After:
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import typing
from typing import Any, List
from rasa_nlu.components import Component
from rasa_nlu.config import RasaNLUModelConfig
from rasa_nlu.tokenizers import Tokenizer, Token
from rasa_nlu.training_data import Message
from rasa_nlu.training_data import TrainingData
if typing.TYPE_CHECKING:
from spacy.tokens.doc import Doc
class SpacyTokenizer(Tokenizer, Component):
name = "tokenizer_spacy"
provides = ["tokens"]
requires = ["spacy_doc"]
def train(self, training_data, config, **kwargs):
# type: (TrainingData, RasaNLUModelConfig, **Any) -> None
for example in training_data.training_examples:
example.set("tokens", self.tokenize(example.get("spacy_doc")))
def process(self, message, **kwargs):
# type: (Message, **Any) -> None
message.set("tokens", self.tokenize(message.get("spacy_doc")))
def tokenize(self, doc):
# type: (Doc) -> List[Token]
return [Token(t.text, t.idx) for t in doc]
|
...
provides = ["tokens"]
requires = ["spacy_doc"]
def train(self, training_data, config, **kwargs):
# type: (TrainingData, RasaNLUModelConfig, **Any) -> None
...
|
e16960eaaf38513e80fb18580c3e4320978407e4
|
chainer/training/triggers/__init__.py
|
chainer/training/triggers/__init__.py
|
from chainer.training.triggers import interval_trigger # NOQA
from chainer.training.triggers import minmax_value_trigger # NOQA
# import class and function
from chainer.training.triggers.early_stopping_trigger import EarlyStoppingTrigger # NOQA
from chainer.training.triggers.interval_trigger import IntervalTrigger # NOQA
from chainer.training.triggers.manual_schedule_trigger import ManualScheduleTrigger # NOQA
from chainer.training.triggers.minmax_value_trigger import BestValueTrigger # NOQA
from chainer.training.triggers.minmax_value_trigger import MaxValueTrigger # NOQA
from chainer.training.triggers.minmax_value_trigger import MinValueTrigger # NOQA
from chainer.training.triggers.early_stopping_trigger import EarlyStoppingTrigger # NOQA
|
from chainer.training.triggers import interval_trigger # NOQA
from chainer.training.triggers import minmax_value_trigger # NOQA
# import class and function
from chainer.training.triggers.early_stopping_trigger import EarlyStoppingTrigger # NOQA
from chainer.training.triggers.interval_trigger import IntervalTrigger # NOQA
from chainer.training.triggers.manual_schedule_trigger import ManualScheduleTrigger # NOQA
from chainer.training.triggers.minmax_value_trigger import BestValueTrigger # NOQA
from chainer.training.triggers.minmax_value_trigger import MaxValueTrigger # NOQA
from chainer.training.triggers.minmax_value_trigger import MinValueTrigger # NOQA
|
Fix the order of importing
|
Fix the order of importing
|
Python
|
mit
|
wkentaro/chainer,ktnyt/chainer,chainer/chainer,ktnyt/chainer,jnishi/chainer,jnishi/chainer,wkentaro/chainer,chainer/chainer,ktnyt/chainer,ktnyt/chainer,niboshi/chainer,jnishi/chainer,hvy/chainer,chainer/chainer,pfnet/chainer,niboshi/chainer,keisuke-umezawa/chainer,keisuke-umezawa/chainer,hvy/chainer,keisuke-umezawa/chainer,hvy/chainer,rezoo/chainer,okuta/chainer,okuta/chainer,okuta/chainer,aonotas/chainer,niboshi/chainer,hvy/chainer,wkentaro/chainer,chainer/chainer,ronekko/chainer,jnishi/chainer,okuta/chainer,anaruse/chainer,tkerola/chainer,niboshi/chainer,keisuke-umezawa/chainer,wkentaro/chainer
|
from chainer.training.triggers import interval_trigger # NOQA
from chainer.training.triggers import minmax_value_trigger # NOQA
# import class and function
from chainer.training.triggers.early_stopping_trigger import EarlyStoppingTrigger # NOQA
from chainer.training.triggers.interval_trigger import IntervalTrigger # NOQA
from chainer.training.triggers.manual_schedule_trigger import ManualScheduleTrigger # NOQA
from chainer.training.triggers.minmax_value_trigger import BestValueTrigger # NOQA
from chainer.training.triggers.minmax_value_trigger import MaxValueTrigger # NOQA
from chainer.training.triggers.minmax_value_trigger import MinValueTrigger # NOQA
- from chainer.training.triggers.early_stopping_trigger import EarlyStoppingTrigger # NOQA
|
Fix the order of importing
|
## Code Before:
from chainer.training.triggers import interval_trigger # NOQA
from chainer.training.triggers import minmax_value_trigger # NOQA
# import class and function
from chainer.training.triggers.early_stopping_trigger import EarlyStoppingTrigger # NOQA
from chainer.training.triggers.interval_trigger import IntervalTrigger # NOQA
from chainer.training.triggers.manual_schedule_trigger import ManualScheduleTrigger # NOQA
from chainer.training.triggers.minmax_value_trigger import BestValueTrigger # NOQA
from chainer.training.triggers.minmax_value_trigger import MaxValueTrigger # NOQA
from chainer.training.triggers.minmax_value_trigger import MinValueTrigger # NOQA
from chainer.training.triggers.early_stopping_trigger import EarlyStoppingTrigger # NOQA
## Instruction:
Fix the order of importing
## Code After:
from chainer.training.triggers import interval_trigger # NOQA
from chainer.training.triggers import minmax_value_trigger # NOQA
# import class and function
from chainer.training.triggers.early_stopping_trigger import EarlyStoppingTrigger # NOQA
from chainer.training.triggers.interval_trigger import IntervalTrigger # NOQA
from chainer.training.triggers.manual_schedule_trigger import ManualScheduleTrigger # NOQA
from chainer.training.triggers.minmax_value_trigger import BestValueTrigger # NOQA
from chainer.training.triggers.minmax_value_trigger import MaxValueTrigger # NOQA
from chainer.training.triggers.minmax_value_trigger import MinValueTrigger # NOQA
|
# ... existing code ...
from chainer.training.triggers.minmax_value_trigger import BestValueTrigger # NOQA
from chainer.training.triggers.minmax_value_trigger import MaxValueTrigger # NOQA
from chainer.training.triggers.minmax_value_trigger import MinValueTrigger # NOQA
# ... rest of the code ...
|
2bd14f768ce7d82f7ef84d1e67d61afda5044581
|
st2common/st2common/constants/logging.py
|
st2common/st2common/constants/logging.py
|
import os
__all__ = [
'DEFAULT_LOGGING_CONF_PATH'
]
BASE_PATH = os.path.abspath(os.path.dirname(__file__))
DEFAULT_LOGGING_CONF_PATH = os.path.join(BASE_PATH, '../conf/base.logging.conf')
DEFAULT_LOGGING_CONF_PATH = os.path.abspath(DEFAULT_LOGGING_CONF_PATH)
|
import os
__all__ = [
'DEFAULT_LOGGING_CONF_PATH'
]
BASE_PATH = os.path.dirname(os.path.abspath((__file__)))
DEFAULT_LOGGING_CONF_PATH = os.path.join(BASE_PATH, '../conf/base.logging.conf')
DEFAULT_LOGGING_CONF_PATH = os.path.abspath(DEFAULT_LOGGING_CONF_PATH)
|
Use the correct base path.
|
Use the correct base path.
|
Python
|
apache-2.0
|
punalpatel/st2,lakshmi-kannan/st2,Plexxi/st2,jtopjian/st2,Plexxi/st2,grengojbo/st2,emedvedev/st2,punalpatel/st2,peak6/st2,dennybaa/st2,alfasin/st2,Plexxi/st2,Itxaka/st2,pinterb/st2,StackStorm/st2,nzlosh/st2,grengojbo/st2,nzlosh/st2,Itxaka/st2,pixelrebel/st2,Plexxi/st2,tonybaloney/st2,peak6/st2,StackStorm/st2,jtopjian/st2,lakshmi-kannan/st2,Itxaka/st2,StackStorm/st2,alfasin/st2,emedvedev/st2,grengojbo/st2,pinterb/st2,pixelrebel/st2,pixelrebel/st2,dennybaa/st2,StackStorm/st2,nzlosh/st2,tonybaloney/st2,armab/st2,lakshmi-kannan/st2,armab/st2,nzlosh/st2,pinterb/st2,jtopjian/st2,punalpatel/st2,dennybaa/st2,peak6/st2,emedvedev/st2,alfasin/st2,tonybaloney/st2,armab/st2
|
import os
__all__ = [
'DEFAULT_LOGGING_CONF_PATH'
]
- BASE_PATH = os.path.abspath(os.path.dirname(__file__))
+ BASE_PATH = os.path.dirname(os.path.abspath((__file__)))
DEFAULT_LOGGING_CONF_PATH = os.path.join(BASE_PATH, '../conf/base.logging.conf')
DEFAULT_LOGGING_CONF_PATH = os.path.abspath(DEFAULT_LOGGING_CONF_PATH)
|
Use the correct base path.
|
## Code Before:
import os
__all__ = [
'DEFAULT_LOGGING_CONF_PATH'
]
BASE_PATH = os.path.abspath(os.path.dirname(__file__))
DEFAULT_LOGGING_CONF_PATH = os.path.join(BASE_PATH, '../conf/base.logging.conf')
DEFAULT_LOGGING_CONF_PATH = os.path.abspath(DEFAULT_LOGGING_CONF_PATH)
## Instruction:
Use the correct base path.
## Code After:
import os
__all__ = [
'DEFAULT_LOGGING_CONF_PATH'
]
BASE_PATH = os.path.dirname(os.path.abspath((__file__)))
DEFAULT_LOGGING_CONF_PATH = os.path.join(BASE_PATH, '../conf/base.logging.conf')
DEFAULT_LOGGING_CONF_PATH = os.path.abspath(DEFAULT_LOGGING_CONF_PATH)
|
...
'DEFAULT_LOGGING_CONF_PATH'
]
BASE_PATH = os.path.dirname(os.path.abspath((__file__)))
DEFAULT_LOGGING_CONF_PATH = os.path.join(BASE_PATH, '../conf/base.logging.conf')
DEFAULT_LOGGING_CONF_PATH = os.path.abspath(DEFAULT_LOGGING_CONF_PATH)
...
|
6adbbe71dcde926fbd9288b4a43b45ff1a339cdc
|
turbustat/statistics/stats_utils.py
|
turbustat/statistics/stats_utils.py
|
import numpy as np
def hellinger(data1, data2):
'''
Calculate the Hellinger Distance between two datasets.
Parameters
----------
data1 : numpy.ndarray
1D array.
data2 : numpy.ndarray
1D array.
Returns
-------
distance : float
Distance value.
'''
distance = (1 / np.sqrt(2)) * \
np.sqrt(np.nansum((np.sqrt(data1) - np.sqrt(data2)) ** 2.))
return distance
def standardize(x):
return (x - np.nanmean(x)) / np.nanstd(x)
|
import numpy as np
def hellinger(data1, data2):
'''
Calculate the Hellinger Distance between two datasets.
Parameters
----------
data1 : numpy.ndarray
1D array.
data2 : numpy.ndarray
1D array.
Returns
-------
distance : float
Distance value.
'''
distance = (1 / np.sqrt(2)) * \
np.sqrt(np.nansum((np.sqrt(data1) - np.sqrt(data2)) ** 2.))
return distance
def standardize(x):
return (x - np.nanmean(x)) / np.nanstd(x)
def kl_divergence(P, Q):
'''
Kullback Leidler Divergence
Parameters
----------
P,Q : numpy.ndarray
Two Discrete Probability distributions
Returns
-------
kl_divergence : float
'''
P = P[~np.isnan(P)]
Q = Q[~np.isnan(Q)]
P = P[np.isfinite(P)]
Q = Q[np.isfinite(Q)]
return np.nansum(np.where(Q != 0, P * np.log(P / Q), 0))
|
Move KL Div to utils file
|
Move KL Div to utils file
|
Python
|
mit
|
e-koch/TurbuStat,Astroua/TurbuStat
|
import numpy as np
def hellinger(data1, data2):
'''
Calculate the Hellinger Distance between two datasets.
Parameters
----------
data1 : numpy.ndarray
1D array.
data2 : numpy.ndarray
1D array.
Returns
-------
distance : float
Distance value.
'''
distance = (1 / np.sqrt(2)) * \
np.sqrt(np.nansum((np.sqrt(data1) - np.sqrt(data2)) ** 2.))
return distance
def standardize(x):
return (x - np.nanmean(x)) / np.nanstd(x)
+
+ def kl_divergence(P, Q):
+ '''
+ Kullback Leidler Divergence
+
+ Parameters
+ ----------
+
+ P,Q : numpy.ndarray
+ Two Discrete Probability distributions
+
+ Returns
+ -------
+
+ kl_divergence : float
+ '''
+ P = P[~np.isnan(P)]
+ Q = Q[~np.isnan(Q)]
+ P = P[np.isfinite(P)]
+ Q = Q[np.isfinite(Q)]
+ return np.nansum(np.where(Q != 0, P * np.log(P / Q), 0))
+
|
Move KL Div to utils file
|
## Code Before:
import numpy as np
def hellinger(data1, data2):
'''
Calculate the Hellinger Distance between two datasets.
Parameters
----------
data1 : numpy.ndarray
1D array.
data2 : numpy.ndarray
1D array.
Returns
-------
distance : float
Distance value.
'''
distance = (1 / np.sqrt(2)) * \
np.sqrt(np.nansum((np.sqrt(data1) - np.sqrt(data2)) ** 2.))
return distance
def standardize(x):
return (x - np.nanmean(x)) / np.nanstd(x)
## Instruction:
Move KL Div to utils file
## Code After:
import numpy as np
def hellinger(data1, data2):
'''
Calculate the Hellinger Distance between two datasets.
Parameters
----------
data1 : numpy.ndarray
1D array.
data2 : numpy.ndarray
1D array.
Returns
-------
distance : float
Distance value.
'''
distance = (1 / np.sqrt(2)) * \
np.sqrt(np.nansum((np.sqrt(data1) - np.sqrt(data2)) ** 2.))
return distance
def standardize(x):
return (x - np.nanmean(x)) / np.nanstd(x)
def kl_divergence(P, Q):
'''
Kullback Leidler Divergence
Parameters
----------
P,Q : numpy.ndarray
Two Discrete Probability distributions
Returns
-------
kl_divergence : float
'''
P = P[~np.isnan(P)]
Q = Q[~np.isnan(Q)]
P = P[np.isfinite(P)]
Q = Q[np.isfinite(Q)]
return np.nansum(np.where(Q != 0, P * np.log(P / Q), 0))
|
...
def standardize(x):
return (x - np.nanmean(x)) / np.nanstd(x)
def kl_divergence(P, Q):
'''
Kullback Leidler Divergence
Parameters
----------
P,Q : numpy.ndarray
Two Discrete Probability distributions
Returns
-------
kl_divergence : float
'''
P = P[~np.isnan(P)]
Q = Q[~np.isnan(Q)]
P = P[np.isfinite(P)]
Q = Q[np.isfinite(Q)]
return np.nansum(np.where(Q != 0, P * np.log(P / Q), 0))
...
|
ddf3e604cee09d82ea8741d2ed08f600ba2f70c0
|
scaffolder/commands/list.py
|
scaffolder/commands/list.py
|
from optparse import make_option
from optparse import OptionParser
from scaffolder.core.commands import BaseCommand
from scaffolder.core.template import TemplateManager
class ListCommand(BaseCommand):
def __init__(self, name, help='', aliases=(), stdout=None, stderr=None):
help = 'Template command help entry'
parser = OptionParser(
version=self.get_version(),
option_list=self.get_option_list(),
usage='\n %prog {0} [OPTIONS]'.format(name)
)
aliases = ('tmp',)
BaseCommand.__init__(self, name, parser=parser, help=help, aliases=aliases)
def run(self, *args, **options):
manger = TemplateManager()
manger.list()
def get_default_option(self):
return []
|
from optparse import make_option
from optparse import OptionParser
from scaffolder.core.commands import BaseCommand
from scaffolder.core.template import TemplateManager
class ListCommand(BaseCommand):
help = 'Template command help entry'
def run(self, *args, **options):
manger = TemplateManager()
manger.list()
def get_default_option(self):
return []
|
Remove __init__ method, not needed.
|
ListCommand: Remove __init__ method, not needed.
|
Python
|
mit
|
goliatone/minions
|
from optparse import make_option
from optparse import OptionParser
from scaffolder.core.commands import BaseCommand
from scaffolder.core.template import TemplateManager
class ListCommand(BaseCommand):
- def __init__(self, name, help='', aliases=(), stdout=None, stderr=None):
- help = 'Template command help entry'
+ help = 'Template command help entry'
- parser = OptionParser(
- version=self.get_version(),
- option_list=self.get_option_list(),
- usage='\n %prog {0} [OPTIONS]'.format(name)
- )
- aliases = ('tmp',)
- BaseCommand.__init__(self, name, parser=parser, help=help, aliases=aliases)
def run(self, *args, **options):
manger = TemplateManager()
manger.list()
def get_default_option(self):
return []
|
Remove __init__ method, not needed.
|
## Code Before:
from optparse import make_option
from optparse import OptionParser
from scaffolder.core.commands import BaseCommand
from scaffolder.core.template import TemplateManager
class ListCommand(BaseCommand):
def __init__(self, name, help='', aliases=(), stdout=None, stderr=None):
help = 'Template command help entry'
parser = OptionParser(
version=self.get_version(),
option_list=self.get_option_list(),
usage='\n %prog {0} [OPTIONS]'.format(name)
)
aliases = ('tmp',)
BaseCommand.__init__(self, name, parser=parser, help=help, aliases=aliases)
def run(self, *args, **options):
manger = TemplateManager()
manger.list()
def get_default_option(self):
return []
## Instruction:
Remove __init__ method, not needed.
## Code After:
from optparse import make_option
from optparse import OptionParser
from scaffolder.core.commands import BaseCommand
from scaffolder.core.template import TemplateManager
class ListCommand(BaseCommand):
help = 'Template command help entry'
def run(self, *args, **options):
manger = TemplateManager()
manger.list()
def get_default_option(self):
return []
|
...
class ListCommand(BaseCommand):
help = 'Template command help entry'
def run(self, *args, **options):
manger = TemplateManager()
...
|
bfc50caf2ad967fa930faf34c6cac6b20b7fd4a7
|
nn/embedding/id_sequence_to_embedding.py
|
nn/embedding/id_sequence_to_embedding.py
|
from .ids_to_embeddings import ids_to_embeddings
from .embeddings_to_embedding import embeddings_to_embedding
from ..util import static_rank
def id_sequecne_to_embedding(child_id_sequence,
*,
output_embedding_size,
context_vector_size):
assert static_rank(child_id_sequence) == 2
return embeddings_to_embedding(
ids_to_embeddings(child_id_sequence),
output_embedding_size=output_embedding_size,
context_vector_size=context_vector_size)
|
from .ids_to_embeddings import ids_to_embeddings
from .embeddings_to_embedding import embeddings_to_embedding
from ..util import static_rank
def id_sequecne_to_embedding(child_id_sequence,
child_embeddings,
*,
output_embedding_size,
context_vector_size):
assert static_rank(child_id_sequence) == 2
return embeddings_to_embedding(
ids_to_embeddings(child_id_sequence, child_embeddings),
output_embedding_size=output_embedding_size,
context_vector_size=context_vector_size)
|
Fix missing argument of child embeddings
|
Fix missing argument of child embeddings
|
Python
|
unlicense
|
raviqqe/tensorflow-extenteten,raviqqe/tensorflow-extenteten
|
from .ids_to_embeddings import ids_to_embeddings
from .embeddings_to_embedding import embeddings_to_embedding
from ..util import static_rank
def id_sequecne_to_embedding(child_id_sequence,
+ child_embeddings,
*,
output_embedding_size,
context_vector_size):
assert static_rank(child_id_sequence) == 2
return embeddings_to_embedding(
- ids_to_embeddings(child_id_sequence),
+ ids_to_embeddings(child_id_sequence, child_embeddings),
output_embedding_size=output_embedding_size,
context_vector_size=context_vector_size)
|
Fix missing argument of child embeddings
|
## Code Before:
from .ids_to_embeddings import ids_to_embeddings
from .embeddings_to_embedding import embeddings_to_embedding
from ..util import static_rank
def id_sequecne_to_embedding(child_id_sequence,
*,
output_embedding_size,
context_vector_size):
assert static_rank(child_id_sequence) == 2
return embeddings_to_embedding(
ids_to_embeddings(child_id_sequence),
output_embedding_size=output_embedding_size,
context_vector_size=context_vector_size)
## Instruction:
Fix missing argument of child embeddings
## Code After:
from .ids_to_embeddings import ids_to_embeddings
from .embeddings_to_embedding import embeddings_to_embedding
from ..util import static_rank
def id_sequecne_to_embedding(child_id_sequence,
child_embeddings,
*,
output_embedding_size,
context_vector_size):
assert static_rank(child_id_sequence) == 2
return embeddings_to_embedding(
ids_to_embeddings(child_id_sequence, child_embeddings),
output_embedding_size=output_embedding_size,
context_vector_size=context_vector_size)
|
...
def id_sequecne_to_embedding(child_id_sequence,
child_embeddings,
*,
output_embedding_size,
context_vector_size):
...
assert static_rank(child_id_sequence) == 2
return embeddings_to_embedding(
ids_to_embeddings(child_id_sequence, child_embeddings),
output_embedding_size=output_embedding_size,
context_vector_size=context_vector_size)
...
|
e170666cbbc1f2a61c0ffa077c66da4556a6c5bb
|
app/packages/views.py
|
app/packages/views.py
|
import requests
from . import packages
from models import Package, Downloads
from flask import jsonify
from datetime import timedelta
from app import cache
from utils import cache_timeout
@packages.route('/stats', methods=['GET'])
@cache_timeout
@cache.cached()
def stats():
resp = dict()
resp["count"] = Package.get_count()
resp["day"] = Downloads.get_overall_downloads_count(timedelta(days=1))
resp["week"] = Downloads.get_overall_downloads_count(timedelta(days=7))
resp["month"] = Downloads.get_overall_downloads_count(timedelta(days=30))
return jsonify(resp)
@packages.route('/featured', methods=['GET'])
@cache_timeout
@cache.cached()
def featured():
package_list = requests.get("https://atom.io/api/packages/featured")
theme_list = requests.get("https://atom.io/api/themes/featured")
featured_list = package_list.json() + theme_list.json()
# limit data to multiples of three
length = (len(featured_list) / 3) * 3
featured_list = featured_list[:length]
json_data = []
for item in featured_list:
obj = Package.get_package(item['name'])
if obj is not None:
json_data.append(obj.get_json())
return jsonify(results=json_data)
|
import requests
from . import packages
from models import Package, Downloads
from flask import jsonify
from datetime import timedelta
from app import cache
from utils import cache_timeout
@packages.route('/stats', methods=['GET'])
@cache_timeout
@cache.cached()
def stats():
resp = dict()
resp["count"] = Package.get_count()
resp["day"] = Downloads.get_overall_downloads_count(timedelta(days=1))
resp["week"] = Downloads.get_overall_downloads_count(timedelta(days=7))
resp["month"] = Downloads.get_overall_downloads_count(timedelta(days=30))
return jsonify(resp)
@packages.route('/featured', methods=['GET'])
@cache_timeout
@cache.cached()
def featured():
package_list = requests.get("https://atom.io/api/packages/featured")
theme_list = requests.get("https://atom.io/api/themes/featured")
featured_list = package_list.json() + theme_list.json()
# limit data to multiples of three
length = ((len(featured_list) + 2) / 3) * 3
featured_list = featured_list[:(length - 2)]
json_data = []
for item in featured_list:
obj = Package.get_package(item['name'])
if obj is not None:
json_data.append(obj.get_json())
for item in ["docblockr", "git-log"]:
obj = Package.get_package(item)
json_data.append(obj.get_json())
return jsonify(results=json_data)
|
Add my packages to featured list
|
Add my packages to featured list
|
Python
|
bsd-2-clause
|
NikhilKalige/atom-website,NikhilKalige/atom-website,NikhilKalige/atom-website
|
import requests
from . import packages
from models import Package, Downloads
from flask import jsonify
from datetime import timedelta
from app import cache
from utils import cache_timeout
@packages.route('/stats', methods=['GET'])
@cache_timeout
@cache.cached()
def stats():
resp = dict()
resp["count"] = Package.get_count()
resp["day"] = Downloads.get_overall_downloads_count(timedelta(days=1))
resp["week"] = Downloads.get_overall_downloads_count(timedelta(days=7))
resp["month"] = Downloads.get_overall_downloads_count(timedelta(days=30))
return jsonify(resp)
@packages.route('/featured', methods=['GET'])
@cache_timeout
@cache.cached()
def featured():
package_list = requests.get("https://atom.io/api/packages/featured")
theme_list = requests.get("https://atom.io/api/themes/featured")
featured_list = package_list.json() + theme_list.json()
# limit data to multiples of three
- length = (len(featured_list) / 3) * 3
+ length = ((len(featured_list) + 2) / 3) * 3
- featured_list = featured_list[:length]
+ featured_list = featured_list[:(length - 2)]
json_data = []
for item in featured_list:
obj = Package.get_package(item['name'])
if obj is not None:
json_data.append(obj.get_json())
+ for item in ["docblockr", "git-log"]:
+ obj = Package.get_package(item)
+ json_data.append(obj.get_json())
+
return jsonify(results=json_data)
|
Add my packages to featured list
|
## Code Before:
import requests
from . import packages
from models import Package, Downloads
from flask import jsonify
from datetime import timedelta
from app import cache
from utils import cache_timeout
@packages.route('/stats', methods=['GET'])
@cache_timeout
@cache.cached()
def stats():
resp = dict()
resp["count"] = Package.get_count()
resp["day"] = Downloads.get_overall_downloads_count(timedelta(days=1))
resp["week"] = Downloads.get_overall_downloads_count(timedelta(days=7))
resp["month"] = Downloads.get_overall_downloads_count(timedelta(days=30))
return jsonify(resp)
@packages.route('/featured', methods=['GET'])
@cache_timeout
@cache.cached()
def featured():
package_list = requests.get("https://atom.io/api/packages/featured")
theme_list = requests.get("https://atom.io/api/themes/featured")
featured_list = package_list.json() + theme_list.json()
# limit data to multiples of three
length = (len(featured_list) / 3) * 3
featured_list = featured_list[:length]
json_data = []
for item in featured_list:
obj = Package.get_package(item['name'])
if obj is not None:
json_data.append(obj.get_json())
return jsonify(results=json_data)
## Instruction:
Add my packages to featured list
## Code After:
import requests
from . import packages
from models import Package, Downloads
from flask import jsonify
from datetime import timedelta
from app import cache
from utils import cache_timeout
@packages.route('/stats', methods=['GET'])
@cache_timeout
@cache.cached()
def stats():
resp = dict()
resp["count"] = Package.get_count()
resp["day"] = Downloads.get_overall_downloads_count(timedelta(days=1))
resp["week"] = Downloads.get_overall_downloads_count(timedelta(days=7))
resp["month"] = Downloads.get_overall_downloads_count(timedelta(days=30))
return jsonify(resp)
@packages.route('/featured', methods=['GET'])
@cache_timeout
@cache.cached()
def featured():
package_list = requests.get("https://atom.io/api/packages/featured")
theme_list = requests.get("https://atom.io/api/themes/featured")
featured_list = package_list.json() + theme_list.json()
# limit data to multiples of three
length = ((len(featured_list) + 2) / 3) * 3
featured_list = featured_list[:(length - 2)]
json_data = []
for item in featured_list:
obj = Package.get_package(item['name'])
if obj is not None:
json_data.append(obj.get_json())
for item in ["docblockr", "git-log"]:
obj = Package.get_package(item)
json_data.append(obj.get_json())
return jsonify(results=json_data)
|
// ... existing code ...
theme_list = requests.get("https://atom.io/api/themes/featured")
featured_list = package_list.json() + theme_list.json()
# limit data to multiples of three
length = ((len(featured_list) + 2) / 3) * 3
featured_list = featured_list[:(length - 2)]
json_data = []
for item in featured_list:
// ... modified code ...
if obj is not None:
json_data.append(obj.get_json())
for item in ["docblockr", "git-log"]:
obj = Package.get_package(item)
json_data.append(obj.get_json())
return jsonify(results=json_data)
// ... rest of the code ...
|
9ac9efbea5ad9e51d564ec563fe25349726ec1f7
|
inpassing/view_util.py
|
inpassing/view_util.py
|
from . import exceptions as ex
from . import models
from .models import db, User, Org
def user_is_participant(user_id, org_id):
q = db.session.query(models.org_participants).filter_by(
participant=user_id, org=org_id
)
(ret,) = db.session.query(q.exists()).first()
return ret
def user_is_mod(user_id, org_id):
q = db.session.query(models.org_mods).filter_by(mod=user_id, org=org_id)
(ret,) = db.session.query(q.exists()).first()
return ret
def get_field(request, field):
val = request.get_json().get(field, None)
if val is None:
raise ex.MissingFieldError(field)
return val
def get_org_by_id(org_id):
org = Org.query.filter_by(id=org_id).first()
if org is None:
raise ex.OrgNotFound(org_id)
return org
def get_user_by_id(user_id):
user = User.query.filter_by(id=user_id).first()
if user is None:
raise ex.UserNotFound(user_id)
return user
|
from . import exceptions as ex
from . import models
from .models import db, User, Org, Daystate
def user_is_participant(user_id, org_id):
q = db.session.query(models.org_participants).filter_by(
participant=user_id, org=org_id
)
(ret,) = db.session.query(q.exists()).first()
return ret
def user_is_mod(user_id, org_id):
q = db.session.query(models.org_mods).filter_by(mod=user_id, org=org_id)
(ret,) = db.session.query(q.exists()).first()
return ret
def get_field(request, field):
val = request.get_json().get(field, None)
if val is None:
raise ex.MissingFieldError(field)
return val
def get_org_by_id(org_id):
org = Org.query.filter_by(id=org_id).first()
if org is None:
raise ex.OrgNotFound(org_id)
return org
def get_user_by_id(user_id):
user = User.query.filter_by(id=user_id).first()
if user is None:
raise ex.UserNotFound(user_id)
return user
def daystate_exists(daystate_id, org_id):
query = Daystate.query.filter_by(id=daystate_id, org_id=org_id)
(ret,) = db.session.query(query.exists()).first()
return ret
|
Add function to figure out if a given daystate ID is valid for an org
|
Add function to figure out if a given daystate ID is valid for an org
|
Python
|
mit
|
lukesanantonio/inpassing-backend,lukesanantonio/inpassing-backend
|
from . import exceptions as ex
from . import models
- from .models import db, User, Org
+ from .models import db, User, Org, Daystate
def user_is_participant(user_id, org_id):
q = db.session.query(models.org_participants).filter_by(
participant=user_id, org=org_id
)
(ret,) = db.session.query(q.exists()).first()
return ret
def user_is_mod(user_id, org_id):
q = db.session.query(models.org_mods).filter_by(mod=user_id, org=org_id)
(ret,) = db.session.query(q.exists()).first()
return ret
def get_field(request, field):
val = request.get_json().get(field, None)
if val is None:
raise ex.MissingFieldError(field)
return val
def get_org_by_id(org_id):
org = Org.query.filter_by(id=org_id).first()
if org is None:
raise ex.OrgNotFound(org_id)
return org
def get_user_by_id(user_id):
user = User.query.filter_by(id=user_id).first()
if user is None:
raise ex.UserNotFound(user_id)
return user
+
+ def daystate_exists(daystate_id, org_id):
+ query = Daystate.query.filter_by(id=daystate_id, org_id=org_id)
+ (ret,) = db.session.query(query.exists()).first()
+ return ret
+
|
Add function to figure out if a given daystate ID is valid for an org
|
## Code Before:
from . import exceptions as ex
from . import models
from .models import db, User, Org
def user_is_participant(user_id, org_id):
q = db.session.query(models.org_participants).filter_by(
participant=user_id, org=org_id
)
(ret,) = db.session.query(q.exists()).first()
return ret
def user_is_mod(user_id, org_id):
q = db.session.query(models.org_mods).filter_by(mod=user_id, org=org_id)
(ret,) = db.session.query(q.exists()).first()
return ret
def get_field(request, field):
val = request.get_json().get(field, None)
if val is None:
raise ex.MissingFieldError(field)
return val
def get_org_by_id(org_id):
org = Org.query.filter_by(id=org_id).first()
if org is None:
raise ex.OrgNotFound(org_id)
return org
def get_user_by_id(user_id):
user = User.query.filter_by(id=user_id).first()
if user is None:
raise ex.UserNotFound(user_id)
return user
## Instruction:
Add function to figure out if a given daystate ID is valid for an org
## Code After:
from . import exceptions as ex
from . import models
from .models import db, User, Org, Daystate
def user_is_participant(user_id, org_id):
q = db.session.query(models.org_participants).filter_by(
participant=user_id, org=org_id
)
(ret,) = db.session.query(q.exists()).first()
return ret
def user_is_mod(user_id, org_id):
q = db.session.query(models.org_mods).filter_by(mod=user_id, org=org_id)
(ret,) = db.session.query(q.exists()).first()
return ret
def get_field(request, field):
val = request.get_json().get(field, None)
if val is None:
raise ex.MissingFieldError(field)
return val
def get_org_by_id(org_id):
org = Org.query.filter_by(id=org_id).first()
if org is None:
raise ex.OrgNotFound(org_id)
return org
def get_user_by_id(user_id):
user = User.query.filter_by(id=user_id).first()
if user is None:
raise ex.UserNotFound(user_id)
return user
def daystate_exists(daystate_id, org_id):
query = Daystate.query.filter_by(id=daystate_id, org_id=org_id)
(ret,) = db.session.query(query.exists()).first()
return ret
|
// ... existing code ...
from . import exceptions as ex
from . import models
from .models import db, User, Org, Daystate
def user_is_participant(user_id, org_id):
// ... modified code ...
if user is None:
raise ex.UserNotFound(user_id)
return user
def daystate_exists(daystate_id, org_id):
query = Daystate.query.filter_by(id=daystate_id, org_id=org_id)
(ret,) = db.session.query(query.exists()).first()
return ret
// ... rest of the code ...
|
edd8ac2d77b747cffbcf702e71f2633a148d64c6
|
wagtail/wagtailcore/hooks.py
|
wagtail/wagtailcore/hooks.py
|
from django.conf import settings
try:
from importlib import import_module
except ImportError:
# for Python 2.6, fall back on django.utils.importlib (deprecated as of Django 1.7)
from django.utils.importlib import import_module
_hooks = {}
def register(hook_name, fn=None):
"""
Register hook for ``hook_name``. Can be used as a decorator::
@register('hook_name')
def my_hook(...):
pass
or as a function call::
def my_hook(...):
pass
register('hook_name', my_hook)
"""
# Pretend to be a decorator if fn is not supplied
if fn is None:
return lambda fn: register(hook_name, fn)
if hook_name not in _hooks:
_hooks[hook_name] = []
_hooks[hook_name].append(fn)
_searched_for_hooks = False
def search_for_hooks():
global _searched_for_hooks
if not _searched_for_hooks:
for app_module in settings.INSTALLED_APPS:
try:
import_module('%s.wagtail_hooks' % app_module)
except ImportError:
continue
_searched_for_hooks = True
def get_hooks(hook_name):
search_for_hooks()
return _hooks.get(hook_name, [])
|
from django.conf import settings
try:
from importlib import import_module
except ImportError:
# for Python 2.6, fall back on django.utils.importlib (deprecated as of Django 1.7)
from django.utils.importlib import import_module
_hooks = {}
def register(hook_name, fn=None):
"""
Register hook for ``hook_name``. Can be used as a decorator::
@register('hook_name')
def my_hook(...):
pass
or as a function call::
def my_hook(...):
pass
register('hook_name', my_hook)
"""
# Pretend to be a decorator if fn is not supplied
if fn is None:
def decorator(fn):
register(hook_name, fn)
return fn
return decorator
if hook_name not in _hooks:
_hooks[hook_name] = []
_hooks[hook_name].append(fn)
_searched_for_hooks = False
def search_for_hooks():
global _searched_for_hooks
if not _searched_for_hooks:
for app_module in settings.INSTALLED_APPS:
try:
import_module('%s.wagtail_hooks' % app_module)
except ImportError:
continue
_searched_for_hooks = True
def get_hooks(hook_name):
search_for_hooks()
return _hooks.get(hook_name, [])
|
Return the function again from the hook decorator
|
Return the function again from the hook decorator
The decorator variant of hook registration did not return anything,
meaning that the decorated function would end up being `None`. This was
not noticed, as the functions are rarely called manually, as opposed to
being invoked via the hook.
|
Python
|
bsd-3-clause
|
kaedroho/wagtail,willcodefortea/wagtail,JoshBarr/wagtail,takeshineshiro/wagtail,torchbox/wagtail,dresiu/wagtail,m-sanders/wagtail,jnns/wagtail,bjesus/wagtail,jorge-marques/wagtail,nilnvoid/wagtail,timorieber/wagtail,rsalmaso/wagtail,Toshakins/wagtail,tangentlabs/wagtail,nimasmi/wagtail,WQuanfeng/wagtail,timorieber/wagtail,benjaoming/wagtail,mixxorz/wagtail,thenewguy/wagtail,benjaoming/wagtail,Toshakins/wagtail,inonit/wagtail,mixxorz/wagtail,stevenewey/wagtail,darith27/wagtail,nealtodd/wagtail,kurtrwall/wagtail,taedori81/wagtail,serzans/wagtail,nilnvoid/wagtail,taedori81/wagtail,Pennebaker/wagtail,JoshBarr/wagtail,kurtrwall/wagtail,FlipperPA/wagtail,m-sanders/wagtail,torchbox/wagtail,mixxorz/wagtail,100Shapes/wagtail,takeflight/wagtail,kaedroho/wagtail,rsalmaso/wagtail,taedori81/wagtail,wagtail/wagtail,Klaudit/wagtail,benemery/wagtail,jordij/wagtail,gogobook/wagtail,quru/wagtail,janusnic/wagtail,chrxr/wagtail,torchbox/wagtail,gasman/wagtail,gasman/wagtail,chimeno/wagtail,mephizzle/wagtail,janusnic/wagtail,kurtw/wagtail,marctc/wagtail,kaedroho/wagtail,takeflight/wagtail,gasman/wagtail,zerolab/wagtail,Klaudit/wagtail,kaedroho/wagtail,jorge-marques/wagtail,dresiu/wagtail,tangentlabs/wagtail,chrxr/wagtail,iansprice/wagtail,rjsproxy/wagtail,gogobook/wagtail,mikedingjan/wagtail,thenewguy/wagtail,hamsterbacke23/wagtail,takeshineshiro/wagtail,darith27/wagtail,rv816/wagtail,iho/wagtail,inonit/wagtail,mikedingjan/wagtail,nimasmi/wagtail,davecranwell/wagtail,jorge-marques/wagtail,hamsterbacke23/wagtail,stevenewey/wagtail,nealtodd/wagtail,mephizzle/wagtail,rsalmaso/wagtail,timorieber/wagtail,nutztherookie/wagtail,hanpama/wagtail,100Shapes/wagtail,rsalmaso/wagtail,timorieber/wagtail,marctc/wagtail,wagtail/wagtail,KimGlazebrook/wagtail-experiment,benemery/wagtail,zerolab/wagtail,nealtodd/wagtail,KimGlazebrook/wagtail-experiment,jordij/wagtail,hamsterbacke23/wagtail,nimasmi/wagtail,zerolab/wagtail,100Shapes/wagtail,mjec/wagtail,kurtrwall/wagtail,kurtw/wagtail,marctc/wagtail,iho/wagtail,Toshakins/wagtail,iansprice/wagtail,torchbox/wagtail,Pennebaker/wagtail,tangentlabs/wagtail,mayapurmedia/wagtail,benjaoming/wagtail,mephizzle/wagtail,m-sanders/wagtail,gogobook/wagtail,rv816/wagtail,dresiu/wagtail,quru/wagtail,willcodefortea/wagtail,KimGlazebrook/wagtail-experiment,nutztherookie/wagtail,mephizzle/wagtail,marctc/wagtail,Klaudit/wagtail,mjec/wagtail,jorge-marques/wagtail,zerolab/wagtail,dresiu/wagtail,chimeno/wagtail,jordij/wagtail,Pennebaker/wagtail,Toshakins/wagtail,jordij/wagtail,chrxr/wagtail,rjsproxy/wagtail,hanpama/wagtail,davecranwell/wagtail,gasman/wagtail,thenewguy/wagtail,jnns/wagtail,dresiu/wagtail,Tivix/wagtail,chimeno/wagtail,inonit/wagtail,rjsproxy/wagtail,JoshBarr/wagtail,hamsterbacke23/wagtail,bjesus/wagtail,nilnvoid/wagtail,iho/wagtail,WQuanfeng/wagtail,chimeno/wagtail,FlipperPA/wagtail,janusnic/wagtail,kurtw/wagtail,stevenewey/wagtail,mixxorz/wagtail,benjaoming/wagtail,FlipperPA/wagtail,nrsimha/wagtail,mikedingjan/wagtail,tangentlabs/wagtail,KimGlazebrook/wagtail-experiment,takeshineshiro/wagtail,gogobook/wagtail,takeshineshiro/wagtail,nimasmi/wagtail,mayapurmedia/wagtail,kaedroho/wagtail,bjesus/wagtail,FlipperPA/wagtail,willcodefortea/wagtail,taedori81/wagtail,mayapurmedia/wagtail,wagtail/wagtail,willcodefortea/wagtail,hanpama/wagtail,jnns/wagtail,kurtrwall/wagtail,bjesus/wagtail,serzans/wagtail,serzans/wagtail,nilnvoid/wagtail,takeflight/wagtail,thenewguy/wagtail,quru/wagtail,Klaudit/wagtail,darith27/wagtail,quru/wagtail,nutztherookie/wagtail,zerolab/wagtail,nrsimha/wagtail,iho/wagtail,WQuanfeng/wagtail,wagtail/wagtail,janusnic/wagtail,rv816/wagtail,inonit/wagtail,WQuanfeng/wagtail,Tivix/wagtail,davecranwell/wagtail,mikedingjan/wagtail,benemery/wagtail,JoshBarr/wagtail,gasman/wagtail,davecranwell/wagtail,jorge-marques/wagtail,Tivix/wagtail,thenewguy/wagtail,darith27/wagtail,mayapurmedia/wagtail,iansprice/wagtail,kurtw/wagtail,m-sanders/wagtail,takeflight/wagtail,nrsimha/wagtail,nrsimha/wagtail,hanpama/wagtail,stevenewey/wagtail,chrxr/wagtail,jnns/wagtail,taedori81/wagtail,mjec/wagtail,mixxorz/wagtail,iansprice/wagtail,rsalmaso/wagtail,nealtodd/wagtail,mjec/wagtail,Pennebaker/wagtail,serzans/wagtail,Tivix/wagtail,nutztherookie/wagtail,rjsproxy/wagtail,wagtail/wagtail,benemery/wagtail,chimeno/wagtail,rv816/wagtail
|
from django.conf import settings
try:
from importlib import import_module
except ImportError:
# for Python 2.6, fall back on django.utils.importlib (deprecated as of Django 1.7)
from django.utils.importlib import import_module
_hooks = {}
def register(hook_name, fn=None):
"""
Register hook for ``hook_name``. Can be used as a decorator::
@register('hook_name')
def my_hook(...):
pass
or as a function call::
def my_hook(...):
pass
register('hook_name', my_hook)
"""
# Pretend to be a decorator if fn is not supplied
if fn is None:
+ def decorator(fn):
- return lambda fn: register(hook_name, fn)
+ register(hook_name, fn)
+ return fn
+ return decorator
if hook_name not in _hooks:
_hooks[hook_name] = []
_hooks[hook_name].append(fn)
_searched_for_hooks = False
def search_for_hooks():
global _searched_for_hooks
if not _searched_for_hooks:
for app_module in settings.INSTALLED_APPS:
try:
import_module('%s.wagtail_hooks' % app_module)
except ImportError:
continue
_searched_for_hooks = True
def get_hooks(hook_name):
search_for_hooks()
return _hooks.get(hook_name, [])
|
Return the function again from the hook decorator
|
## Code Before:
from django.conf import settings
try:
from importlib import import_module
except ImportError:
# for Python 2.6, fall back on django.utils.importlib (deprecated as of Django 1.7)
from django.utils.importlib import import_module
_hooks = {}
def register(hook_name, fn=None):
"""
Register hook for ``hook_name``. Can be used as a decorator::
@register('hook_name')
def my_hook(...):
pass
or as a function call::
def my_hook(...):
pass
register('hook_name', my_hook)
"""
# Pretend to be a decorator if fn is not supplied
if fn is None:
return lambda fn: register(hook_name, fn)
if hook_name not in _hooks:
_hooks[hook_name] = []
_hooks[hook_name].append(fn)
_searched_for_hooks = False
def search_for_hooks():
global _searched_for_hooks
if not _searched_for_hooks:
for app_module in settings.INSTALLED_APPS:
try:
import_module('%s.wagtail_hooks' % app_module)
except ImportError:
continue
_searched_for_hooks = True
def get_hooks(hook_name):
search_for_hooks()
return _hooks.get(hook_name, [])
## Instruction:
Return the function again from the hook decorator
## Code After:
from django.conf import settings
try:
from importlib import import_module
except ImportError:
# for Python 2.6, fall back on django.utils.importlib (deprecated as of Django 1.7)
from django.utils.importlib import import_module
_hooks = {}
def register(hook_name, fn=None):
"""
Register hook for ``hook_name``. Can be used as a decorator::
@register('hook_name')
def my_hook(...):
pass
or as a function call::
def my_hook(...):
pass
register('hook_name', my_hook)
"""
# Pretend to be a decorator if fn is not supplied
if fn is None:
def decorator(fn):
register(hook_name, fn)
return fn
return decorator
if hook_name not in _hooks:
_hooks[hook_name] = []
_hooks[hook_name].append(fn)
_searched_for_hooks = False
def search_for_hooks():
global _searched_for_hooks
if not _searched_for_hooks:
for app_module in settings.INSTALLED_APPS:
try:
import_module('%s.wagtail_hooks' % app_module)
except ImportError:
continue
_searched_for_hooks = True
def get_hooks(hook_name):
search_for_hooks()
return _hooks.get(hook_name, [])
|
// ... existing code ...
# Pretend to be a decorator if fn is not supplied
if fn is None:
def decorator(fn):
register(hook_name, fn)
return fn
return decorator
if hook_name not in _hooks:
_hooks[hook_name] = []
// ... rest of the code ...
|
a013cdbe690271c4ec9bc172c994ff5f6e5808c4
|
test/test_assetstore_model_override.py
|
test/test_assetstore_model_override.py
|
import pytest
from girder.models.file import File
from girder.models.model_base import Model
from girder.utility import assetstore_utilities
from girder.utility.model_importer import ModelImporter
from girder.utility.abstract_assetstore_adapter import AbstractAssetstoreAdapter
class Fake(Model):
def initialize(self):
self.name = 'fake_collection'
def validate(self, doc):
return doc
class FakeAdapter(AbstractAssetstoreAdapter):
def __init__(self, assetstore):
self.the_assetstore = assetstore
@pytest.fixture
def fakeModel(db):
ModelImporter.registerModel('fake', Fake(), plugin='fake_plugin')
yield Fake().save({
'foo': 'bar',
'type': 'fake'
})
ModelImporter.unregisterModel('fake', plugin='fake_plugin')
@pytest.fixture
def fakeAdapter(db):
assetstore_utilities.setAssetstoreAdapter('fake', FakeAdapter)
yield
assetstore_utilities.removeAssetstoreAdapter('fake')
def testAssetstoreModelOverride(fakeModel, fakeAdapter, admin):
file = File().createFile(
creator=admin, item=None, name='a.out', size=0, assetstore=fakeModel,
assetstoreModel='fake', assetstoreModelPlugin='fake_plugin')
adapter = File().getAssetstoreAdapter(file)
assert adapter.the_assetstore == fakeModel
|
import pytest
from girder.models.file import File
from girder.models.model_base import Model
from girder.utility import assetstore_utilities
from girder.utility.model_importer import ModelImporter
from girder.utility.abstract_assetstore_adapter import AbstractAssetstoreAdapter
class Fake(Model):
def initialize(self):
self.name = 'fake_collection'
def validate(self, doc):
return doc
class FakeAdapter(AbstractAssetstoreAdapter):
def __init__(self, assetstore):
self.the_assetstore = assetstore
@pytest.fixture
def fakeModel(db):
ModelImporter.registerModel('fake', Fake(), plugin='fake_plugin')
yield Fake
ModelImporter.unregisterModel('fake', plugin='fake_plugin')
@pytest.fixture
def fakeAdapter(db):
assetstore_utilities.setAssetstoreAdapter('fake', FakeAdapter)
yield
assetstore_utilities.removeAssetstoreAdapter('fake')
def testAssetstoreModelOverride(fakeModel, fakeAdapter, admin):
fakeAssetstore = fakeModel().save({
'foo': 'bar',
'type': 'fake'
})
file = File().createFile(
creator=admin, item=None, name='a.out', size=0, assetstore=fakeAssetstore,
assetstoreModel='fake', assetstoreModelPlugin='fake_plugin')
adapter = File().getAssetstoreAdapter(file)
assert isinstance(adapter, FakeAdapter)
assert adapter.the_assetstore == fakeAssetstore
|
Improve clarity of fake assetstore model fixture
|
Improve clarity of fake assetstore model fixture
|
Python
|
apache-2.0
|
data-exp-lab/girder,girder/girder,manthey/girder,kotfic/girder,Xarthisius/girder,jbeezley/girder,girder/girder,manthey/girder,kotfic/girder,RafaelPalomar/girder,girder/girder,data-exp-lab/girder,girder/girder,manthey/girder,Xarthisius/girder,RafaelPalomar/girder,RafaelPalomar/girder,RafaelPalomar/girder,Kitware/girder,jbeezley/girder,data-exp-lab/girder,data-exp-lab/girder,data-exp-lab/girder,RafaelPalomar/girder,kotfic/girder,Kitware/girder,Xarthisius/girder,Xarthisius/girder,jbeezley/girder,Kitware/girder,Kitware/girder,manthey/girder,kotfic/girder,jbeezley/girder,Xarthisius/girder,kotfic/girder
|
import pytest
from girder.models.file import File
from girder.models.model_base import Model
from girder.utility import assetstore_utilities
from girder.utility.model_importer import ModelImporter
from girder.utility.abstract_assetstore_adapter import AbstractAssetstoreAdapter
class Fake(Model):
def initialize(self):
self.name = 'fake_collection'
def validate(self, doc):
return doc
class FakeAdapter(AbstractAssetstoreAdapter):
def __init__(self, assetstore):
self.the_assetstore = assetstore
@pytest.fixture
def fakeModel(db):
ModelImporter.registerModel('fake', Fake(), plugin='fake_plugin')
- yield Fake().save({
+ yield Fake
- 'foo': 'bar',
- 'type': 'fake'
- })
ModelImporter.unregisterModel('fake', plugin='fake_plugin')
@pytest.fixture
def fakeAdapter(db):
assetstore_utilities.setAssetstoreAdapter('fake', FakeAdapter)
yield
assetstore_utilities.removeAssetstoreAdapter('fake')
def testAssetstoreModelOverride(fakeModel, fakeAdapter, admin):
+ fakeAssetstore = fakeModel().save({
+ 'foo': 'bar',
+ 'type': 'fake'
+ })
file = File().createFile(
- creator=admin, item=None, name='a.out', size=0, assetstore=fakeModel,
+ creator=admin, item=None, name='a.out', size=0, assetstore=fakeAssetstore,
assetstoreModel='fake', assetstoreModelPlugin='fake_plugin')
adapter = File().getAssetstoreAdapter(file)
+ assert isinstance(adapter, FakeAdapter)
- assert adapter.the_assetstore == fakeModel
+ assert adapter.the_assetstore == fakeAssetstore
|
Improve clarity of fake assetstore model fixture
|
## Code Before:
import pytest
from girder.models.file import File
from girder.models.model_base import Model
from girder.utility import assetstore_utilities
from girder.utility.model_importer import ModelImporter
from girder.utility.abstract_assetstore_adapter import AbstractAssetstoreAdapter
class Fake(Model):
def initialize(self):
self.name = 'fake_collection'
def validate(self, doc):
return doc
class FakeAdapter(AbstractAssetstoreAdapter):
def __init__(self, assetstore):
self.the_assetstore = assetstore
@pytest.fixture
def fakeModel(db):
ModelImporter.registerModel('fake', Fake(), plugin='fake_plugin')
yield Fake().save({
'foo': 'bar',
'type': 'fake'
})
ModelImporter.unregisterModel('fake', plugin='fake_plugin')
@pytest.fixture
def fakeAdapter(db):
assetstore_utilities.setAssetstoreAdapter('fake', FakeAdapter)
yield
assetstore_utilities.removeAssetstoreAdapter('fake')
def testAssetstoreModelOverride(fakeModel, fakeAdapter, admin):
file = File().createFile(
creator=admin, item=None, name='a.out', size=0, assetstore=fakeModel,
assetstoreModel='fake', assetstoreModelPlugin='fake_plugin')
adapter = File().getAssetstoreAdapter(file)
assert adapter.the_assetstore == fakeModel
## Instruction:
Improve clarity of fake assetstore model fixture
## Code After:
import pytest
from girder.models.file import File
from girder.models.model_base import Model
from girder.utility import assetstore_utilities
from girder.utility.model_importer import ModelImporter
from girder.utility.abstract_assetstore_adapter import AbstractAssetstoreAdapter
class Fake(Model):
def initialize(self):
self.name = 'fake_collection'
def validate(self, doc):
return doc
class FakeAdapter(AbstractAssetstoreAdapter):
def __init__(self, assetstore):
self.the_assetstore = assetstore
@pytest.fixture
def fakeModel(db):
ModelImporter.registerModel('fake', Fake(), plugin='fake_plugin')
yield Fake
ModelImporter.unregisterModel('fake', plugin='fake_plugin')
@pytest.fixture
def fakeAdapter(db):
assetstore_utilities.setAssetstoreAdapter('fake', FakeAdapter)
yield
assetstore_utilities.removeAssetstoreAdapter('fake')
def testAssetstoreModelOverride(fakeModel, fakeAdapter, admin):
fakeAssetstore = fakeModel().save({
'foo': 'bar',
'type': 'fake'
})
file = File().createFile(
creator=admin, item=None, name='a.out', size=0, assetstore=fakeAssetstore,
assetstoreModel='fake', assetstoreModelPlugin='fake_plugin')
adapter = File().getAssetstoreAdapter(file)
assert isinstance(adapter, FakeAdapter)
assert adapter.the_assetstore == fakeAssetstore
|
# ... existing code ...
def fakeModel(db):
ModelImporter.registerModel('fake', Fake(), plugin='fake_plugin')
yield Fake
ModelImporter.unregisterModel('fake', plugin='fake_plugin')
# ... modified code ...
def testAssetstoreModelOverride(fakeModel, fakeAdapter, admin):
fakeAssetstore = fakeModel().save({
'foo': 'bar',
'type': 'fake'
})
file = File().createFile(
creator=admin, item=None, name='a.out', size=0, assetstore=fakeAssetstore,
assetstoreModel='fake', assetstoreModelPlugin='fake_plugin')
adapter = File().getAssetstoreAdapter(file)
assert isinstance(adapter, FakeAdapter)
assert adapter.the_assetstore == fakeAssetstore
# ... rest of the code ...
|
7608d0e89781f70fcb49e7dc3ee5cd57a094f18c
|
rx/__init__.py
|
rx/__init__.py
|
from .observable import Observable
from .anonymousobservable import AnonymousObservable
from .observer import Observer
from . import checkedobserver
from . import linq
from . import backpressure
try:
from asyncio import Future
except ImportError:
Future = None
# Rx configuration dictionary
config = {
"Future" : Future
}
|
from threading import Lock
from .observable import Observable
from .anonymousobservable import AnonymousObservable
from .observer import Observer
from . import checkedobserver
from . import linq
from . import backpressure
try:
from asyncio import Future
except ImportError:
Future = None
# Rx configuration dictionary
config = {
"Future" : Future,
"Lock" : Lock
}
|
Make it possible to set custom Lock
|
Make it possible to set custom Lock
|
Python
|
mit
|
ReactiveX/RxPY,ReactiveX/RxPY,dbrattli/RxPY
|
+ from threading import Lock
+
from .observable import Observable
from .anonymousobservable import AnonymousObservable
from .observer import Observer
from . import checkedobserver
from . import linq
from . import backpressure
try:
from asyncio import Future
except ImportError:
Future = None
# Rx configuration dictionary
config = {
- "Future" : Future
+ "Future" : Future,
+ "Lock" : Lock
}
|
Make it possible to set custom Lock
|
## Code Before:
from .observable import Observable
from .anonymousobservable import AnonymousObservable
from .observer import Observer
from . import checkedobserver
from . import linq
from . import backpressure
try:
from asyncio import Future
except ImportError:
Future = None
# Rx configuration dictionary
config = {
"Future" : Future
}
## Instruction:
Make it possible to set custom Lock
## Code After:
from threading import Lock
from .observable import Observable
from .anonymousobservable import AnonymousObservable
from .observer import Observer
from . import checkedobserver
from . import linq
from . import backpressure
try:
from asyncio import Future
except ImportError:
Future = None
# Rx configuration dictionary
config = {
"Future" : Future,
"Lock" : Lock
}
|
...
from threading import Lock
from .observable import Observable
from .anonymousobservable import AnonymousObservable
from .observer import Observer
...
# Rx configuration dictionary
config = {
"Future" : Future,
"Lock" : Lock
}
...
|
3a0c7caadb46a69fb29fe34bd64de28c9b263fd6
|
restconverter.py
|
restconverter.py
|
from docutils import core
from docutils.writers.html4css1 import Writer, HTMLTranslator
class HTMLFragmentTranslator(HTMLTranslator):
def __init__(self, document):
HTMLTranslator.__init__(self, document)
self.head_prefix = ['','','','','']
self.body_prefix = []
self.body_suffix = []
self.stylesheet = []
def astext(self):
return ''.join(self.body)
html_fragment_writer = Writer()
html_fragment_writer.translator_class = HTMLFragmentTranslator
def rest_to_html(s):
"""Convert ReST input to HTML output"""
return core.publish_string(s, writer=html_fragment_writer)
|
from docutils import core
from docutils.writers.html4css1 import Writer, HTMLTranslator
class HTMLFragmentTranslator(HTMLTranslator):
def __init__(self, document):
HTMLTranslator.__init__(self, document)
self.head_prefix = ['','','','','']
self.body_prefix = []
self.body_suffix = []
self.stylesheet = []
def astext(self):
return ''.join(self.body)
html_fragment_writer = Writer()
html_fragment_writer.translator_class = HTMLFragmentTranslator
def rest_to_html(s):
"""Convert ReST input to HTML output"""
return core.publish_string(s, writer=html_fragment_writer)
def rest_to_html_fragment(s):
parts = core.publish_parts(
source=s,
writer_name='html')
return parts['body_pre_docinfo']+parts['fragment']
|
Add rest_to_html_fragment to be able to convert just the body part
|
Add rest_to_html_fragment to be able to convert just the body part
|
Python
|
bsd-2-clause
|
jkossen/flaskjk
|
from docutils import core
from docutils.writers.html4css1 import Writer, HTMLTranslator
class HTMLFragmentTranslator(HTMLTranslator):
def __init__(self, document):
HTMLTranslator.__init__(self, document)
self.head_prefix = ['','','','','']
self.body_prefix = []
self.body_suffix = []
self.stylesheet = []
def astext(self):
return ''.join(self.body)
html_fragment_writer = Writer()
html_fragment_writer.translator_class = HTMLFragmentTranslator
def rest_to_html(s):
"""Convert ReST input to HTML output"""
return core.publish_string(s, writer=html_fragment_writer)
+ def rest_to_html_fragment(s):
+ parts = core.publish_parts(
+ source=s,
+ writer_name='html')
+ return parts['body_pre_docinfo']+parts['fragment']
+
|
Add rest_to_html_fragment to be able to convert just the body part
|
## Code Before:
from docutils import core
from docutils.writers.html4css1 import Writer, HTMLTranslator
class HTMLFragmentTranslator(HTMLTranslator):
def __init__(self, document):
HTMLTranslator.__init__(self, document)
self.head_prefix = ['','','','','']
self.body_prefix = []
self.body_suffix = []
self.stylesheet = []
def astext(self):
return ''.join(self.body)
html_fragment_writer = Writer()
html_fragment_writer.translator_class = HTMLFragmentTranslator
def rest_to_html(s):
"""Convert ReST input to HTML output"""
return core.publish_string(s, writer=html_fragment_writer)
## Instruction:
Add rest_to_html_fragment to be able to convert just the body part
## Code After:
from docutils import core
from docutils.writers.html4css1 import Writer, HTMLTranslator
class HTMLFragmentTranslator(HTMLTranslator):
def __init__(self, document):
HTMLTranslator.__init__(self, document)
self.head_prefix = ['','','','','']
self.body_prefix = []
self.body_suffix = []
self.stylesheet = []
def astext(self):
return ''.join(self.body)
html_fragment_writer = Writer()
html_fragment_writer.translator_class = HTMLFragmentTranslator
def rest_to_html(s):
"""Convert ReST input to HTML output"""
return core.publish_string(s, writer=html_fragment_writer)
def rest_to_html_fragment(s):
parts = core.publish_parts(
source=s,
writer_name='html')
return parts['body_pre_docinfo']+parts['fragment']
|
// ... existing code ...
"""Convert ReST input to HTML output"""
return core.publish_string(s, writer=html_fragment_writer)
def rest_to_html_fragment(s):
parts = core.publish_parts(
source=s,
writer_name='html')
return parts['body_pre_docinfo']+parts['fragment']
// ... rest of the code ...
|
08b54819a56d9bfc65225045d97a4c331f9a3e11
|
manage.py
|
manage.py
|
from flask_script import Manager
from flask_migrate import Migrate, MigrateCommand
from service import app, db
# db.create_all() needs all models to be imported
from service.db_access import *
migrate = Migrate(app, db)
manager = Manager(app)
manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
manager.run()
|
from flask_script import Manager
from flask_migrate import Migrate, MigrateCommand
from service import app, db
# db.create_all() needs all models to be imported explicitly (not *)
from service.db_access import User
migrate = Migrate(app, db)
manager = Manager(app)
manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
manager.run()
|
Fix model import needed by create_all()
|
Fix model import needed by create_all()
|
Python
|
mit
|
LandRegistry/login-api,LandRegistry/login-api
|
from flask_script import Manager
from flask_migrate import Migrate, MigrateCommand
from service import app, db
- # db.create_all() needs all models to be imported
+ # db.create_all() needs all models to be imported explicitly (not *)
- from service.db_access import *
+ from service.db_access import User
migrate = Migrate(app, db)
manager = Manager(app)
manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
manager.run()
|
Fix model import needed by create_all()
|
## Code Before:
from flask_script import Manager
from flask_migrate import Migrate, MigrateCommand
from service import app, db
# db.create_all() needs all models to be imported
from service.db_access import *
migrate = Migrate(app, db)
manager = Manager(app)
manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
manager.run()
## Instruction:
Fix model import needed by create_all()
## Code After:
from flask_script import Manager
from flask_migrate import Migrate, MigrateCommand
from service import app, db
# db.create_all() needs all models to be imported explicitly (not *)
from service.db_access import User
migrate = Migrate(app, db)
manager = Manager(app)
manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
manager.run()
|
...
from service import app, db
# db.create_all() needs all models to be imported explicitly (not *)
from service.db_access import User
migrate = Migrate(app, db)
...
|
f908501860858311536a3fef03fda7a632ce5412
|
djohno/tests/test_utils.py
|
djohno/tests/test_utils.py
|
from django.core.exceptions import ValidationError
from django.test import TestCase
import djohno
from djohno.utils import (
is_pretty_from_address,
get_app_versions
)
class DjohnoUtilTests(TestCase):
def test_is_pretty_from_address_fails_on_bare_address(self):
"""
Ensure normal email addresses aren't parsed as being "pretty".
"""
self.assertFalse(is_pretty_from_address('[email protected]'))
def test_is_pretty_from_succeeds_on_pretty_address(self):
"""
Ensure pretty addresses (e.g. Foo <[email protected]>) are parsed as
being "pretty".
"""
self.assertTrue(is_pretty_from_address('Foo <[email protected]>'))
def test_is_pretty_from_raises_validation_error_on_bad_input(self):
"""
Ensure invalid email addresses (e.g. "hello") raise
ValidationError if given invalid inputs.
"""
with self.assertRaises(ValidationError):
self.assertTrue(is_pretty_from_address('hello'))
def test_get_installed_app_versions(self):
versions = get_app_versions()
self.assertEqual(versions['Djohno']['installed'], djohno.__version__)
self.assertEqual(versions['Baz']['installed'], '0.4.2')
self.assertEqual(versions['Moo']['installed'], '0.42')
|
from django.core.exceptions import ValidationError
from django.test import TestCase
import djohno
from djohno.utils import (
is_pretty_from_address,
get_app_versions
)
class DjohnoUtilTests(TestCase):
def test_is_pretty_from_address_fails_on_bare_address(self):
"""
Ensure normal email addresses aren't parsed as being "pretty".
"""
self.assertFalse(is_pretty_from_address('[email protected]'))
def test_is_pretty_from_succeeds_on_pretty_address(self):
"""
Ensure pretty addresses (e.g. Foo <[email protected]>) are parsed as
being "pretty".
"""
self.assertTrue(is_pretty_from_address('Foo <[email protected]>'))
def test_is_pretty_from_raises_validation_error_on_bad_input(self):
"""
Ensure invalid email addresses (e.g. "hello") raise
ValidationError if given invalid inputs.
"""
with self.assertRaises(ValidationError):
self.assertTrue(is_pretty_from_address('hello'))
def test_get_installed_app_versions(self):
"""
Ensure we can correctly get the version of a few simple apps
(Baz and Moo are bundled in djohno.test, and set up in
test_settings.py).
"""
versions = get_app_versions()
self.assertEqual(versions['Djohno']['installed'], djohno.__version__)
self.assertEqual(versions['Baz']['installed'], '0.4.2')
self.assertEqual(versions['Moo']['installed'], '0.42')
|
Add a missing test description
|
Add a missing test description
|
Python
|
bsd-2-clause
|
dominicrodger/djohno,dominicrodger/djohno
|
from django.core.exceptions import ValidationError
from django.test import TestCase
import djohno
from djohno.utils import (
is_pretty_from_address,
get_app_versions
)
class DjohnoUtilTests(TestCase):
def test_is_pretty_from_address_fails_on_bare_address(self):
"""
Ensure normal email addresses aren't parsed as being "pretty".
"""
self.assertFalse(is_pretty_from_address('[email protected]'))
def test_is_pretty_from_succeeds_on_pretty_address(self):
"""
Ensure pretty addresses (e.g. Foo <[email protected]>) are parsed as
being "pretty".
"""
self.assertTrue(is_pretty_from_address('Foo <[email protected]>'))
def test_is_pretty_from_raises_validation_error_on_bad_input(self):
"""
Ensure invalid email addresses (e.g. "hello") raise
ValidationError if given invalid inputs.
"""
with self.assertRaises(ValidationError):
self.assertTrue(is_pretty_from_address('hello'))
def test_get_installed_app_versions(self):
+ """
+ Ensure we can correctly get the version of a few simple apps
+ (Baz and Moo are bundled in djohno.test, and set up in
+ test_settings.py).
+ """
versions = get_app_versions()
self.assertEqual(versions['Djohno']['installed'], djohno.__version__)
self.assertEqual(versions['Baz']['installed'], '0.4.2')
self.assertEqual(versions['Moo']['installed'], '0.42')
|
Add a missing test description
|
## Code Before:
from django.core.exceptions import ValidationError
from django.test import TestCase
import djohno
from djohno.utils import (
is_pretty_from_address,
get_app_versions
)
class DjohnoUtilTests(TestCase):
def test_is_pretty_from_address_fails_on_bare_address(self):
"""
Ensure normal email addresses aren't parsed as being "pretty".
"""
self.assertFalse(is_pretty_from_address('[email protected]'))
def test_is_pretty_from_succeeds_on_pretty_address(self):
"""
Ensure pretty addresses (e.g. Foo <[email protected]>) are parsed as
being "pretty".
"""
self.assertTrue(is_pretty_from_address('Foo <[email protected]>'))
def test_is_pretty_from_raises_validation_error_on_bad_input(self):
"""
Ensure invalid email addresses (e.g. "hello") raise
ValidationError if given invalid inputs.
"""
with self.assertRaises(ValidationError):
self.assertTrue(is_pretty_from_address('hello'))
def test_get_installed_app_versions(self):
versions = get_app_versions()
self.assertEqual(versions['Djohno']['installed'], djohno.__version__)
self.assertEqual(versions['Baz']['installed'], '0.4.2')
self.assertEqual(versions['Moo']['installed'], '0.42')
## Instruction:
Add a missing test description
## Code After:
from django.core.exceptions import ValidationError
from django.test import TestCase
import djohno
from djohno.utils import (
is_pretty_from_address,
get_app_versions
)
class DjohnoUtilTests(TestCase):
def test_is_pretty_from_address_fails_on_bare_address(self):
"""
Ensure normal email addresses aren't parsed as being "pretty".
"""
self.assertFalse(is_pretty_from_address('[email protected]'))
def test_is_pretty_from_succeeds_on_pretty_address(self):
"""
Ensure pretty addresses (e.g. Foo <[email protected]>) are parsed as
being "pretty".
"""
self.assertTrue(is_pretty_from_address('Foo <[email protected]>'))
def test_is_pretty_from_raises_validation_error_on_bad_input(self):
"""
Ensure invalid email addresses (e.g. "hello") raise
ValidationError if given invalid inputs.
"""
with self.assertRaises(ValidationError):
self.assertTrue(is_pretty_from_address('hello'))
def test_get_installed_app_versions(self):
"""
Ensure we can correctly get the version of a few simple apps
(Baz and Moo are bundled in djohno.test, and set up in
test_settings.py).
"""
versions = get_app_versions()
self.assertEqual(versions['Djohno']['installed'], djohno.__version__)
self.assertEqual(versions['Baz']['installed'], '0.4.2')
self.assertEqual(versions['Moo']['installed'], '0.42')
|
# ... existing code ...
self.assertTrue(is_pretty_from_address('hello'))
def test_get_installed_app_versions(self):
"""
Ensure we can correctly get the version of a few simple apps
(Baz and Moo are bundled in djohno.test, and set up in
test_settings.py).
"""
versions = get_app_versions()
self.assertEqual(versions['Djohno']['installed'], djohno.__version__)
self.assertEqual(versions['Baz']['installed'], '0.4.2')
# ... rest of the code ...
|
25b9818da4b1922d808812bb43a9c1b35c277b7e
|
integration-test/1687-fewer-places-at-low-zoom.py
|
integration-test/1687-fewer-places-at-low-zoom.py
|
from . import FixtureTest
class LowZoomPlacesTest(FixtureTest):
def test_zoom_1(self):
import dsl
z, x, y = (3, 7, 3)
self.generate_fixtures(
dsl.way(607976629, dsl.tile_centre_shape(z, x, y), {
"min_zoom": 1,
"__ne_max_zoom": 10,
"__ne_min_zoom": 3,
"area": 0,
"place": "country",
"name": "Guam",
"population": 185427,
"source": "openstreetmap.org",
}),
)
# should exist at zoom 3 (the min zoom from NE)
self.assert_has_feature(
z, x, y, 'places', {
'id': 607976629,
'kind': 'country',
'name': 'Guam',
})
# should not exist at zoom 2 (one less than the min zoom)
self.assert_no_matching_feature(
z-1, x//2, y//2, 'places', {
'id': 607976629,
})
# should not exist at zoom 1
self.assert_no_matching_feature(
z-2, x//4, y//4, 'places', {
'id': 607976629,
})
|
from . import FixtureTest
class LowZoomPlacesTest(FixtureTest):
def test_zoom_1(self):
import dsl
z, x, y = (3, 7, 3)
self.generate_fixtures(
dsl.way(607976629, dsl.tile_centre_shape(z, x, y), {
"min_zoom": 1,
"__ne_max_zoom": 10,
"__ne_min_zoom": 3,
"area": 0,
"place": "country",
"name": "Guam",
"population": 185427,
"source": "openstreetmap.org",
}),
)
# should exist at zoom 3 (the min zoom from NE)
self.assert_has_feature(
z, x, y, 'places', {
'id': 607976629,
'kind': 'country',
'name': 'Guam',
})
# should exist at zoom 2 (one past the min zoom)
self.assert_has_feature(
z-1, x//2, y//2, 'places', {
'id': 607976629,
'kind': 'country',
'name': 'Guam',
})
# should not exist at zoom 1
self.assert_no_matching_feature(
z-2, x//4, y//4, 'places', {
'id': 607976629,
})
|
Revert "update Guam to not show at zoom 2"
|
Revert "update Guam to not show at zoom 2"
This reverts commit b12f1560f6b6284c9d26dab96a6c09eac1942424.
|
Python
|
mit
|
mapzen/vector-datasource,mapzen/vector-datasource,mapzen/vector-datasource
|
from . import FixtureTest
class LowZoomPlacesTest(FixtureTest):
def test_zoom_1(self):
import dsl
z, x, y = (3, 7, 3)
self.generate_fixtures(
dsl.way(607976629, dsl.tile_centre_shape(z, x, y), {
"min_zoom": 1,
"__ne_max_zoom": 10,
"__ne_min_zoom": 3,
"area": 0,
"place": "country",
"name": "Guam",
"population": 185427,
"source": "openstreetmap.org",
}),
)
# should exist at zoom 3 (the min zoom from NE)
self.assert_has_feature(
z, x, y, 'places', {
'id': 607976629,
'kind': 'country',
'name': 'Guam',
})
- # should not exist at zoom 2 (one less than the min zoom)
+ # should exist at zoom 2 (one past the min zoom)
- self.assert_no_matching_feature(
+ self.assert_has_feature(
z-1, x//2, y//2, 'places', {
'id': 607976629,
+ 'kind': 'country',
+ 'name': 'Guam',
})
# should not exist at zoom 1
self.assert_no_matching_feature(
z-2, x//4, y//4, 'places', {
'id': 607976629,
})
|
Revert "update Guam to not show at zoom 2"
|
## Code Before:
from . import FixtureTest
class LowZoomPlacesTest(FixtureTest):
def test_zoom_1(self):
import dsl
z, x, y = (3, 7, 3)
self.generate_fixtures(
dsl.way(607976629, dsl.tile_centre_shape(z, x, y), {
"min_zoom": 1,
"__ne_max_zoom": 10,
"__ne_min_zoom": 3,
"area": 0,
"place": "country",
"name": "Guam",
"population": 185427,
"source": "openstreetmap.org",
}),
)
# should exist at zoom 3 (the min zoom from NE)
self.assert_has_feature(
z, x, y, 'places', {
'id': 607976629,
'kind': 'country',
'name': 'Guam',
})
# should not exist at zoom 2 (one less than the min zoom)
self.assert_no_matching_feature(
z-1, x//2, y//2, 'places', {
'id': 607976629,
})
# should not exist at zoom 1
self.assert_no_matching_feature(
z-2, x//4, y//4, 'places', {
'id': 607976629,
})
## Instruction:
Revert "update Guam to not show at zoom 2"
## Code After:
from . import FixtureTest
class LowZoomPlacesTest(FixtureTest):
def test_zoom_1(self):
import dsl
z, x, y = (3, 7, 3)
self.generate_fixtures(
dsl.way(607976629, dsl.tile_centre_shape(z, x, y), {
"min_zoom": 1,
"__ne_max_zoom": 10,
"__ne_min_zoom": 3,
"area": 0,
"place": "country",
"name": "Guam",
"population": 185427,
"source": "openstreetmap.org",
}),
)
# should exist at zoom 3 (the min zoom from NE)
self.assert_has_feature(
z, x, y, 'places', {
'id': 607976629,
'kind': 'country',
'name': 'Guam',
})
# should exist at zoom 2 (one past the min zoom)
self.assert_has_feature(
z-1, x//2, y//2, 'places', {
'id': 607976629,
'kind': 'country',
'name': 'Guam',
})
# should not exist at zoom 1
self.assert_no_matching_feature(
z-2, x//4, y//4, 'places', {
'id': 607976629,
})
|
...
'name': 'Guam',
})
# should exist at zoom 2 (one past the min zoom)
self.assert_has_feature(
z-1, x//2, y//2, 'places', {
'id': 607976629,
'kind': 'country',
'name': 'Guam',
})
# should not exist at zoom 1
...
|
9504529dd4b9140be0026d0b30a0e88e5dea5e25
|
rtrss/config.py
|
rtrss/config.py
|
import os
import logging
import importlib
# All configuration defaults are set in this module
TRACKER_HOST = 'rutracker.org'
# Timeone for the tracker times
TZNAME = 'Europe/Moscow'
LOGLEVEL = logging.INFO
LOG_FORMAT_LOGENTRIES = '%(levelname)s %(name)s %(message)s'
LOG_FORMAT_BRIEF = '%(asctime)s %(levelname)s %(name)s %(message)s'
ADMIN_LOGIN = os.environ.get('ADMIN_LOGIN', 'admin')
ADMIN_PASSWORD = os.environ.get('ADMIN_PASSWORD', 'admin')
ADMIN_EMAIL = os.environ.get('ADMIN_EMAIL', 'admin@localhost')
# path to save torrent files
TORRENT_PATH_PATTERN = 'torrents/{}.torrent'
APP_ENVIRONMENT = os.environ.get('RTRSS_ENVIRONMENT')
if not APP_ENVIRONMENT:
raise EnvironmentError('RTRSS_ENVIRONMENT must be set')
_mod = importlib.import_module('rtrss.config_{}'.format(APP_ENVIRONMENT))
_envconf = {k: v for k, v in _mod.__dict__.items() if k == k.upper()}
globals().update(_envconf)
|
import os
import logging
import importlib
# All configuration defaults are set in this module
TRACKER_HOST = 'rutracker.org'
# Timeone for the tracker times
TZNAME = 'Europe/Moscow'
LOGLEVEL = logging.INFO
LOG_FORMAT_LOGENTRIES = '%(levelname)s %(name)s %(message)s'
LOG_FORMAT_BRIEF = '%(asctime)s %(levelname)s %(name)s %(message)s'
ADMIN_LOGIN = os.environ.get('ADMIN_LOGIN', 'admin')
ADMIN_PASSWORD = os.environ.get('ADMIN_PASSWORD', 'admin')
ADMIN_EMAIL = os.environ.get('ADMIN_EMAIL', 'admin@localhost')
# path to save torrent files
TORRENT_PATH_PATTERN = 'torrents/{}.torrent'
APP_ENVIRONMENT = os.environ.get('RTRSS_ENVIRONMENT')
if not APP_ENVIRONMENT:
raise EnvironmentError('RTRSS_ENVIRONMENT must be set')
IP = '0.0.0.0'
PORT = 8080
_mod = importlib.import_module('rtrss.config_{}'.format(APP_ENVIRONMENT))
_envconf = {k: v for k, v in _mod.__dict__.items() if k == k.upper()}
globals().update(_envconf)
|
Add default IP and PORT
|
Add default IP and PORT
|
Python
|
apache-2.0
|
notapresent/rtrss,notapresent/rtrss,notapresent/rtrss,notapresent/rtrss
|
import os
import logging
import importlib
# All configuration defaults are set in this module
TRACKER_HOST = 'rutracker.org'
# Timeone for the tracker times
TZNAME = 'Europe/Moscow'
LOGLEVEL = logging.INFO
LOG_FORMAT_LOGENTRIES = '%(levelname)s %(name)s %(message)s'
LOG_FORMAT_BRIEF = '%(asctime)s %(levelname)s %(name)s %(message)s'
ADMIN_LOGIN = os.environ.get('ADMIN_LOGIN', 'admin')
ADMIN_PASSWORD = os.environ.get('ADMIN_PASSWORD', 'admin')
ADMIN_EMAIL = os.environ.get('ADMIN_EMAIL', 'admin@localhost')
# path to save torrent files
TORRENT_PATH_PATTERN = 'torrents/{}.torrent'
APP_ENVIRONMENT = os.environ.get('RTRSS_ENVIRONMENT')
if not APP_ENVIRONMENT:
raise EnvironmentError('RTRSS_ENVIRONMENT must be set')
+ IP = '0.0.0.0'
+ PORT = 8080
+
_mod = importlib.import_module('rtrss.config_{}'.format(APP_ENVIRONMENT))
_envconf = {k: v for k, v in _mod.__dict__.items() if k == k.upper()}
globals().update(_envconf)
|
Add default IP and PORT
|
## Code Before:
import os
import logging
import importlib
# All configuration defaults are set in this module
TRACKER_HOST = 'rutracker.org'
# Timeone for the tracker times
TZNAME = 'Europe/Moscow'
LOGLEVEL = logging.INFO
LOG_FORMAT_LOGENTRIES = '%(levelname)s %(name)s %(message)s'
LOG_FORMAT_BRIEF = '%(asctime)s %(levelname)s %(name)s %(message)s'
ADMIN_LOGIN = os.environ.get('ADMIN_LOGIN', 'admin')
ADMIN_PASSWORD = os.environ.get('ADMIN_PASSWORD', 'admin')
ADMIN_EMAIL = os.environ.get('ADMIN_EMAIL', 'admin@localhost')
# path to save torrent files
TORRENT_PATH_PATTERN = 'torrents/{}.torrent'
APP_ENVIRONMENT = os.environ.get('RTRSS_ENVIRONMENT')
if not APP_ENVIRONMENT:
raise EnvironmentError('RTRSS_ENVIRONMENT must be set')
_mod = importlib.import_module('rtrss.config_{}'.format(APP_ENVIRONMENT))
_envconf = {k: v for k, v in _mod.__dict__.items() if k == k.upper()}
globals().update(_envconf)
## Instruction:
Add default IP and PORT
## Code After:
import os
import logging
import importlib
# All configuration defaults are set in this module
TRACKER_HOST = 'rutracker.org'
# Timeone for the tracker times
TZNAME = 'Europe/Moscow'
LOGLEVEL = logging.INFO
LOG_FORMAT_LOGENTRIES = '%(levelname)s %(name)s %(message)s'
LOG_FORMAT_BRIEF = '%(asctime)s %(levelname)s %(name)s %(message)s'
ADMIN_LOGIN = os.environ.get('ADMIN_LOGIN', 'admin')
ADMIN_PASSWORD = os.environ.get('ADMIN_PASSWORD', 'admin')
ADMIN_EMAIL = os.environ.get('ADMIN_EMAIL', 'admin@localhost')
# path to save torrent files
TORRENT_PATH_PATTERN = 'torrents/{}.torrent'
APP_ENVIRONMENT = os.environ.get('RTRSS_ENVIRONMENT')
if not APP_ENVIRONMENT:
raise EnvironmentError('RTRSS_ENVIRONMENT must be set')
IP = '0.0.0.0'
PORT = 8080
_mod = importlib.import_module('rtrss.config_{}'.format(APP_ENVIRONMENT))
_envconf = {k: v for k, v in _mod.__dict__.items() if k == k.upper()}
globals().update(_envconf)
|
# ... existing code ...
if not APP_ENVIRONMENT:
raise EnvironmentError('RTRSS_ENVIRONMENT must be set')
IP = '0.0.0.0'
PORT = 8080
_mod = importlib.import_module('rtrss.config_{}'.format(APP_ENVIRONMENT))
_envconf = {k: v for k, v in _mod.__dict__.items() if k == k.upper()}
globals().update(_envconf)
# ... rest of the code ...
|
64671712fb465a9e940484a5f2f4b8d673aaee75
|
words.py
|
words.py
|
"""Function to fetch words."""
import random
WORDLIST = 'wordlist.txt'
def get_random_word(min_word_length):
"""Get a random word from the wordlist using no extra memory."""
num_words_processed = 0
curr_word = None
with open(WORDLIST, 'r') as f:
for word in f:
if len(word) < min_word_length:
continue
num_words_processed += 1
if random.randint(1, num_words_processed) == 1:
curr_word = word
return curr_word
|
"""Function to fetch words."""
import random
WORDLIST = 'wordlist.txt'
def get_random_word(min_word_length):
"""Get a random word from the wordlist using no extra memory."""
num_words_processed = 0
curr_word = None
with open(WORDLIST, 'r') as f:
for word in f:
word = word.strip().lower()
if len(word) < min_word_length:
continue
num_words_processed += 1
if random.randint(1, num_words_processed) == 1:
curr_word = word
return curr_word
|
Enforce lowercase on word selection
|
Enforce lowercase on word selection
|
Python
|
mit
|
andrewyang96/HangmanGame
|
"""Function to fetch words."""
import random
WORDLIST = 'wordlist.txt'
def get_random_word(min_word_length):
"""Get a random word from the wordlist using no extra memory."""
num_words_processed = 0
curr_word = None
with open(WORDLIST, 'r') as f:
for word in f:
+ word = word.strip().lower()
if len(word) < min_word_length:
continue
num_words_processed += 1
if random.randint(1, num_words_processed) == 1:
curr_word = word
return curr_word
|
Enforce lowercase on word selection
|
## Code Before:
"""Function to fetch words."""
import random
WORDLIST = 'wordlist.txt'
def get_random_word(min_word_length):
"""Get a random word from the wordlist using no extra memory."""
num_words_processed = 0
curr_word = None
with open(WORDLIST, 'r') as f:
for word in f:
if len(word) < min_word_length:
continue
num_words_processed += 1
if random.randint(1, num_words_processed) == 1:
curr_word = word
return curr_word
## Instruction:
Enforce lowercase on word selection
## Code After:
"""Function to fetch words."""
import random
WORDLIST = 'wordlist.txt'
def get_random_word(min_word_length):
"""Get a random word from the wordlist using no extra memory."""
num_words_processed = 0
curr_word = None
with open(WORDLIST, 'r') as f:
for word in f:
word = word.strip().lower()
if len(word) < min_word_length:
continue
num_words_processed += 1
if random.randint(1, num_words_processed) == 1:
curr_word = word
return curr_word
|
# ... existing code ...
curr_word = None
with open(WORDLIST, 'r') as f:
for word in f:
word = word.strip().lower()
if len(word) < min_word_length:
continue
num_words_processed += 1
# ... rest of the code ...
|
51a126c0ada7c00a99416b241bb1c11888e82836
|
esmgrids/jra55_grid.py
|
esmgrids/jra55_grid.py
|
import numpy as np
import netCDF4 as nc
from .base_grid import BaseGrid
class Jra55Grid(BaseGrid):
def __init__(self, h_grid_def, description='JRA55 regular grid'):
self.type = 'Arakawa A'
self.full_name = 'JRA55'
with nc.Dataset(h_grid_def) as f:
x_t = f.variables['lon'][:]
y_t = f.variables['lat'][1:-1]
super(Jra55Grid, self).__init__(x_t=x_t, y_t=y_t, description=description)
def fix_pole_holes(self):
clat_copy = np.copy(self.clat_t)
self.clat_t[2, -1,:] = 90.0
self.clat_t[3, -1,:] = 90.0
# Do South pole as well
self.clat_t[0, 0,:] = -90.0
self.clat_t[1, 0,:] = -90.0
|
import numpy as np
import netCDF4 as nc
from .base_grid import BaseGrid
class Jra55Grid(BaseGrid):
def __init__(self, h_grid_def, description='JRA55 regular grid'):
self.type = 'Arakawa A'
self.full_name = 'JRA55'
with nc.Dataset(h_grid_def) as f:
lon_bnds = f.variables['lon_bnds'][:]
lat_bnds = f.variables['lat_bnds'][:]
dx_t = lon_bnds[:, 1] - lon_bnds[:, 0]
dy_t = lat_bnds[:, 1] - lat_bnds[:, 0]
x_t = lon_bnds[:, 0] + dx_t[:] / 2
y_t = lat_bnds[:, 0] + dy_t[:] / 2
super(Jra55Grid, self).__init__(x_t=x_t, y_t=y_t, description=description)
def fix_pole_holes(self):
clat_copy = np.copy(self.clat_t)
self.clat_t[2, -1,:] = 90.0
self.clat_t[3, -1,:] = 90.0
# Do South pole as well
self.clat_t[0, 0,:] = -90.0
self.clat_t[1, 0,:] = -90.0
|
Use bounds to determing jra55 grid cell locations.
|
Use bounds to determing jra55 grid cell locations.
|
Python
|
apache-2.0
|
DoublePrecision/esmgrids
|
import numpy as np
import netCDF4 as nc
from .base_grid import BaseGrid
class Jra55Grid(BaseGrid):
def __init__(self, h_grid_def, description='JRA55 regular grid'):
self.type = 'Arakawa A'
self.full_name = 'JRA55'
with nc.Dataset(h_grid_def) as f:
- x_t = f.variables['lon'][:]
+ lon_bnds = f.variables['lon_bnds'][:]
- y_t = f.variables['lat'][1:-1]
+ lat_bnds = f.variables['lat_bnds'][:]
+ dx_t = lon_bnds[:, 1] - lon_bnds[:, 0]
+ dy_t = lat_bnds[:, 1] - lat_bnds[:, 0]
+ x_t = lon_bnds[:, 0] + dx_t[:] / 2
+ y_t = lat_bnds[:, 0] + dy_t[:] / 2
super(Jra55Grid, self).__init__(x_t=x_t, y_t=y_t, description=description)
def fix_pole_holes(self):
clat_copy = np.copy(self.clat_t)
self.clat_t[2, -1,:] = 90.0
self.clat_t[3, -1,:] = 90.0
# Do South pole as well
self.clat_t[0, 0,:] = -90.0
self.clat_t[1, 0,:] = -90.0
|
Use bounds to determing jra55 grid cell locations.
|
## Code Before:
import numpy as np
import netCDF4 as nc
from .base_grid import BaseGrid
class Jra55Grid(BaseGrid):
def __init__(self, h_grid_def, description='JRA55 regular grid'):
self.type = 'Arakawa A'
self.full_name = 'JRA55'
with nc.Dataset(h_grid_def) as f:
x_t = f.variables['lon'][:]
y_t = f.variables['lat'][1:-1]
super(Jra55Grid, self).__init__(x_t=x_t, y_t=y_t, description=description)
def fix_pole_holes(self):
clat_copy = np.copy(self.clat_t)
self.clat_t[2, -1,:] = 90.0
self.clat_t[3, -1,:] = 90.0
# Do South pole as well
self.clat_t[0, 0,:] = -90.0
self.clat_t[1, 0,:] = -90.0
## Instruction:
Use bounds to determing jra55 grid cell locations.
## Code After:
import numpy as np
import netCDF4 as nc
from .base_grid import BaseGrid
class Jra55Grid(BaseGrid):
def __init__(self, h_grid_def, description='JRA55 regular grid'):
self.type = 'Arakawa A'
self.full_name = 'JRA55'
with nc.Dataset(h_grid_def) as f:
lon_bnds = f.variables['lon_bnds'][:]
lat_bnds = f.variables['lat_bnds'][:]
dx_t = lon_bnds[:, 1] - lon_bnds[:, 0]
dy_t = lat_bnds[:, 1] - lat_bnds[:, 0]
x_t = lon_bnds[:, 0] + dx_t[:] / 2
y_t = lat_bnds[:, 0] + dy_t[:] / 2
super(Jra55Grid, self).__init__(x_t=x_t, y_t=y_t, description=description)
def fix_pole_holes(self):
clat_copy = np.copy(self.clat_t)
self.clat_t[2, -1,:] = 90.0
self.clat_t[3, -1,:] = 90.0
# Do South pole as well
self.clat_t[0, 0,:] = -90.0
self.clat_t[1, 0,:] = -90.0
|
...
self.full_name = 'JRA55'
with nc.Dataset(h_grid_def) as f:
lon_bnds = f.variables['lon_bnds'][:]
lat_bnds = f.variables['lat_bnds'][:]
dx_t = lon_bnds[:, 1] - lon_bnds[:, 0]
dy_t = lat_bnds[:, 1] - lat_bnds[:, 0]
x_t = lon_bnds[:, 0] + dx_t[:] / 2
y_t = lat_bnds[:, 0] + dy_t[:] / 2
super(Jra55Grid, self).__init__(x_t=x_t, y_t=y_t, description=description)
...
|
9eec48753b2643d25d3ce1e143125b29351e0804
|
features/environment.py
|
features/environment.py
|
import os
import tempfile
from flask import json
import tsserver
# If set to True, each time the test is run, new database is created as a
# temporary file. If the value is equal to False, tests will be using SQLite
# in-memory database.
USE_DB_TEMP_FILE = False
def before_scenario(context, scenario):
if USE_DB_TEMP_FILE:
context.db_fd, context.db_url = tempfile.mkstemp()
db_url = 'sqlite:///' + context.db_url
else:
db_url = 'sqlite://'
tsserver.app.config['SQLALCHEMY_DATABASE_URI'] = db_url
# Ensure the tests are actually run in temporary database
assert str(tsserver.db.engine.url) == db_url
tsserver.app.config['TESTING'] = True
tsserver.db.create_all()
context.app = tsserver.app.test_client()
def request(url, method='GET'):
"""
Wrapper over Flask.open function that parses returned data as JSON
:param method: HTTP method to be used. GET is used by default
:param url: URL to retrieve
:return: Response object
"""
rv = context.app.open(url, method=method)
rv.json_data = json.loads(rv.data)
return rv
context.request = request
def after_scenario(context, scenario):
if USE_DB_TEMP_FILE:
os.close(context.db_fd)
os.unlink(context.db_url)
|
import os
import tempfile
from flask import json
import tsserver
# If set to True, each time the test is run, new database is created as a
# temporary file. If the value is equal to False, tests will be using SQLite
# in-memory database.
USE_DB_TEMP_FILE = False
def before_scenario(context, scenario):
if USE_DB_TEMP_FILE:
context.db_fd, context.db_url = tempfile.mkstemp()
db_url = 'sqlite:///' + context.db_url
else:
db_url = 'sqlite://'
tsserver.app.config['SQLALCHEMY_DATABASE_URI'] = db_url
# Ensure the tests are actually run in temporary database
assert str(tsserver.db.engine.url) == db_url
tsserver.app.config['TESTING'] = True
tsserver.db.create_all()
context.app = tsserver.app.test_client()
def request(url, method='GET', *args, **kwargs):
"""
Wrapper over Flask.open function that parses returned data as JSON
:param method: HTTP method to be used. GET is used by default
:param url: URL to retrieve
:return: Response object
"""
rv = context.app.open(url, method=method, *args, **kwargs)
rv.json_data = json.loads(rv.data)
return rv
context.request = request
def after_scenario(context, scenario):
if USE_DB_TEMP_FILE:
os.close(context.db_fd)
os.unlink(context.db_url)
|
Add support for arguments in request() in tests
|
Add support for arguments in request() in tests
|
Python
|
mit
|
m4tx/techswarm-server
|
import os
import tempfile
from flask import json
import tsserver
# If set to True, each time the test is run, new database is created as a
# temporary file. If the value is equal to False, tests will be using SQLite
# in-memory database.
USE_DB_TEMP_FILE = False
def before_scenario(context, scenario):
if USE_DB_TEMP_FILE:
context.db_fd, context.db_url = tempfile.mkstemp()
db_url = 'sqlite:///' + context.db_url
else:
db_url = 'sqlite://'
tsserver.app.config['SQLALCHEMY_DATABASE_URI'] = db_url
# Ensure the tests are actually run in temporary database
assert str(tsserver.db.engine.url) == db_url
tsserver.app.config['TESTING'] = True
tsserver.db.create_all()
context.app = tsserver.app.test_client()
- def request(url, method='GET'):
+ def request(url, method='GET', *args, **kwargs):
"""
Wrapper over Flask.open function that parses returned data as JSON
:param method: HTTP method to be used. GET is used by default
:param url: URL to retrieve
:return: Response object
"""
- rv = context.app.open(url, method=method)
+ rv = context.app.open(url, method=method, *args, **kwargs)
rv.json_data = json.loads(rv.data)
return rv
context.request = request
def after_scenario(context, scenario):
if USE_DB_TEMP_FILE:
os.close(context.db_fd)
os.unlink(context.db_url)
|
Add support for arguments in request() in tests
|
## Code Before:
import os
import tempfile
from flask import json
import tsserver
# If set to True, each time the test is run, new database is created as a
# temporary file. If the value is equal to False, tests will be using SQLite
# in-memory database.
USE_DB_TEMP_FILE = False
def before_scenario(context, scenario):
if USE_DB_TEMP_FILE:
context.db_fd, context.db_url = tempfile.mkstemp()
db_url = 'sqlite:///' + context.db_url
else:
db_url = 'sqlite://'
tsserver.app.config['SQLALCHEMY_DATABASE_URI'] = db_url
# Ensure the tests are actually run in temporary database
assert str(tsserver.db.engine.url) == db_url
tsserver.app.config['TESTING'] = True
tsserver.db.create_all()
context.app = tsserver.app.test_client()
def request(url, method='GET'):
"""
Wrapper over Flask.open function that parses returned data as JSON
:param method: HTTP method to be used. GET is used by default
:param url: URL to retrieve
:return: Response object
"""
rv = context.app.open(url, method=method)
rv.json_data = json.loads(rv.data)
return rv
context.request = request
def after_scenario(context, scenario):
if USE_DB_TEMP_FILE:
os.close(context.db_fd)
os.unlink(context.db_url)
## Instruction:
Add support for arguments in request() in tests
## Code After:
import os
import tempfile
from flask import json
import tsserver
# If set to True, each time the test is run, new database is created as a
# temporary file. If the value is equal to False, tests will be using SQLite
# in-memory database.
USE_DB_TEMP_FILE = False
def before_scenario(context, scenario):
if USE_DB_TEMP_FILE:
context.db_fd, context.db_url = tempfile.mkstemp()
db_url = 'sqlite:///' + context.db_url
else:
db_url = 'sqlite://'
tsserver.app.config['SQLALCHEMY_DATABASE_URI'] = db_url
# Ensure the tests are actually run in temporary database
assert str(tsserver.db.engine.url) == db_url
tsserver.app.config['TESTING'] = True
tsserver.db.create_all()
context.app = tsserver.app.test_client()
def request(url, method='GET', *args, **kwargs):
"""
Wrapper over Flask.open function that parses returned data as JSON
:param method: HTTP method to be used. GET is used by default
:param url: URL to retrieve
:return: Response object
"""
rv = context.app.open(url, method=method, *args, **kwargs)
rv.json_data = json.loads(rv.data)
return rv
context.request = request
def after_scenario(context, scenario):
if USE_DB_TEMP_FILE:
os.close(context.db_fd)
os.unlink(context.db_url)
|
...
tsserver.db.create_all()
context.app = tsserver.app.test_client()
def request(url, method='GET', *args, **kwargs):
"""
Wrapper over Flask.open function that parses returned data as JSON
...
:param url: URL to retrieve
:return: Response object
"""
rv = context.app.open(url, method=method, *args, **kwargs)
rv.json_data = json.loads(rv.data)
return rv
...
|
22e0e894bdfb457f00bad1016ae28884ef94256c
|
okupy/otp/__init__.py
|
okupy/otp/__init__.py
|
from django_otp import login as otp_login
from django_otp.middleware import OTPMiddleware
from .sotp.models import SOTPDevice
from .totp.models import TOTPDevice
def init_otp(request):
"""
Initialize OTP after login. This sets up OTP devices
for django_otp and calls the middleware to fill
request.user.is_verified().
"""
tdev, created = TOTPDevice.objects.get_or_create(
user=request.user,
defaults={
'name': 'TOTP device with LDAP secret',
})
if created:
tdev.save()
sdev, created = SOTPDevice.objects.get_or_create(
user=request.user,
defaults={
'name': 'SOTP device with LDAP passwords',
})
if created:
sdev.save()
# if OTP is disabled, it will match already
if tdev.verify_token():
otp_login(request, tdev)
# add .is_verified()
OTPMiddleware().process_request(request)
|
from django.db import IntegrityError
from django_otp import login as otp_login
from django_otp.middleware import OTPMiddleware
from .sotp.models import SOTPDevice
from .totp.models import TOTPDevice
def init_otp(request):
"""
Initialize OTP after login. This sets up OTP devices
for django_otp and calls the middleware to fill
request.user.is_verified().
"""
tdev = TOTPDevice(user=request.user,
name='TOTP device with LDAP secret')
try:
tdev.save()
except IntegrityError:
tdev = TOTPDevice.objects.get(user=request.user)
sdev = SOTPDevice(user=request.user,
name='SOTP device with LDAP secret')
try:
sdev.save()
except IntegrityError:
pass
# if OTP is disabled, it will match already
if tdev.verify_token():
otp_login(request, tdev)
# add .is_verified()
OTPMiddleware().process_request(request)
|
Make otp_init() race condition safe.
|
Make otp_init() race condition safe.
A race condition in get_or_create() may have resulted in two devices
created per user. Now we guarantee only one. Not that it matters real
much...
|
Python
|
agpl-3.0
|
gentoo/identity.gentoo.org,gentoo/identity.gentoo.org,dastergon/identity.gentoo.org,dastergon/identity.gentoo.org
|
+ from django.db import IntegrityError
from django_otp import login as otp_login
from django_otp.middleware import OTPMiddleware
from .sotp.models import SOTPDevice
from .totp.models import TOTPDevice
def init_otp(request):
"""
Initialize OTP after login. This sets up OTP devices
for django_otp and calls the middleware to fill
request.user.is_verified().
"""
+ tdev = TOTPDevice(user=request.user,
- tdev, created = TOTPDevice.objects.get_or_create(
- user=request.user,
- defaults={
- 'name': 'TOTP device with LDAP secret',
+ name='TOTP device with LDAP secret')
+ try:
- })
- if created:
tdev.save()
+ except IntegrityError:
+ tdev = TOTPDevice.objects.get(user=request.user)
+ sdev = SOTPDevice(user=request.user,
- sdev, created = SOTPDevice.objects.get_or_create(
- user=request.user,
- defaults={
- 'name': 'SOTP device with LDAP passwords',
+ name='SOTP device with LDAP secret')
+ try:
- })
- if created:
sdev.save()
+ except IntegrityError:
+ pass
# if OTP is disabled, it will match already
if tdev.verify_token():
otp_login(request, tdev)
# add .is_verified()
OTPMiddleware().process_request(request)
|
Make otp_init() race condition safe.
|
## Code Before:
from django_otp import login as otp_login
from django_otp.middleware import OTPMiddleware
from .sotp.models import SOTPDevice
from .totp.models import TOTPDevice
def init_otp(request):
"""
Initialize OTP after login. This sets up OTP devices
for django_otp and calls the middleware to fill
request.user.is_verified().
"""
tdev, created = TOTPDevice.objects.get_or_create(
user=request.user,
defaults={
'name': 'TOTP device with LDAP secret',
})
if created:
tdev.save()
sdev, created = SOTPDevice.objects.get_or_create(
user=request.user,
defaults={
'name': 'SOTP device with LDAP passwords',
})
if created:
sdev.save()
# if OTP is disabled, it will match already
if tdev.verify_token():
otp_login(request, tdev)
# add .is_verified()
OTPMiddleware().process_request(request)
## Instruction:
Make otp_init() race condition safe.
## Code After:
from django.db import IntegrityError
from django_otp import login as otp_login
from django_otp.middleware import OTPMiddleware
from .sotp.models import SOTPDevice
from .totp.models import TOTPDevice
def init_otp(request):
"""
Initialize OTP after login. This sets up OTP devices
for django_otp and calls the middleware to fill
request.user.is_verified().
"""
tdev = TOTPDevice(user=request.user,
name='TOTP device with LDAP secret')
try:
tdev.save()
except IntegrityError:
tdev = TOTPDevice.objects.get(user=request.user)
sdev = SOTPDevice(user=request.user,
name='SOTP device with LDAP secret')
try:
sdev.save()
except IntegrityError:
pass
# if OTP is disabled, it will match already
if tdev.verify_token():
otp_login(request, tdev)
# add .is_verified()
OTPMiddleware().process_request(request)
|
# ... existing code ...
from django.db import IntegrityError
from django_otp import login as otp_login
from django_otp.middleware import OTPMiddleware
# ... modified code ...
request.user.is_verified().
"""
tdev = TOTPDevice(user=request.user,
name='TOTP device with LDAP secret')
try:
tdev.save()
except IntegrityError:
tdev = TOTPDevice.objects.get(user=request.user)
sdev = SOTPDevice(user=request.user,
name='SOTP device with LDAP secret')
try:
sdev.save()
except IntegrityError:
pass
# if OTP is disabled, it will match already
if tdev.verify_token():
# ... rest of the code ...
|
efe1417ad049e4bb78bf1f111db6b2ea9c603461
|
rapt/util.py
|
rapt/util.py
|
import sys
import yaml
import click
def dump_yaml(obj):
return yaml.dump(obj, default_flow_style=False)
def edit_yaml(content='', footer=''):
MARKER = '# Everything below is ignored\n\n'
message = click.edit(content + '\n\n' + MARKER + footer,
extension='.yaml')
if message is not None:
yaml_content = message.split(MARKER, 1)[0].rstrip('\n')
return yaml.safe_load(yaml_content)
def stdin():
for line in sys.stdin:
yield line.strip()
|
import sys
import yaml
import click
def load_yaml(fh_or_string):
return yaml.safe_load(fh_or_string)
def dump_yaml(obj):
return yaml.dump(obj, default_flow_style=False)
def edit_yaml(content='', footer=''):
MARKER = '# Everything below is ignored\n\n'
message = click.edit(content + '\n\n' + MARKER + footer,
extension='.yaml')
if message is not None:
yaml_content = message.split(MARKER, 1)[0].rstrip('\n')
return yaml.safe_load(yaml_content)
def stdin():
for line in sys.stdin:
yield line.strip()
|
Add a load yaml helper
|
Add a load yaml helper
|
Python
|
bsd-3-clause
|
yougov/rapt,yougov/rapt
|
import sys
import yaml
import click
+
+
+ def load_yaml(fh_or_string):
+ return yaml.safe_load(fh_or_string)
def dump_yaml(obj):
return yaml.dump(obj, default_flow_style=False)
def edit_yaml(content='', footer=''):
MARKER = '# Everything below is ignored\n\n'
message = click.edit(content + '\n\n' + MARKER + footer,
extension='.yaml')
if message is not None:
yaml_content = message.split(MARKER, 1)[0].rstrip('\n')
return yaml.safe_load(yaml_content)
def stdin():
for line in sys.stdin:
yield line.strip()
|
Add a load yaml helper
|
## Code Before:
import sys
import yaml
import click
def dump_yaml(obj):
return yaml.dump(obj, default_flow_style=False)
def edit_yaml(content='', footer=''):
MARKER = '# Everything below is ignored\n\n'
message = click.edit(content + '\n\n' + MARKER + footer,
extension='.yaml')
if message is not None:
yaml_content = message.split(MARKER, 1)[0].rstrip('\n')
return yaml.safe_load(yaml_content)
def stdin():
for line in sys.stdin:
yield line.strip()
## Instruction:
Add a load yaml helper
## Code After:
import sys
import yaml
import click
def load_yaml(fh_or_string):
return yaml.safe_load(fh_or_string)
def dump_yaml(obj):
return yaml.dump(obj, default_flow_style=False)
def edit_yaml(content='', footer=''):
MARKER = '# Everything below is ignored\n\n'
message = click.edit(content + '\n\n' + MARKER + footer,
extension='.yaml')
if message is not None:
yaml_content = message.split(MARKER, 1)[0].rstrip('\n')
return yaml.safe_load(yaml_content)
def stdin():
for line in sys.stdin:
yield line.strip()
|
...
import sys
import yaml
import click
def load_yaml(fh_or_string):
return yaml.safe_load(fh_or_string)
def dump_yaml(obj):
...
|
03d8a4e20ee4b6fd49495b7b047ea78d0b9a5bb4
|
dmoj/graders/base.py
|
dmoj/graders/base.py
|
class BaseGrader(object):
def __init__(self, judge, problem, language, source):
self.source = source
self.language = language
self.problem = problem
self.judge = judge
self.binary = self._generate_binary()
self._terminate_grading = False
self._current_proc = None
def grade(self, case):
raise NotImplementedError
def _generate_binary(self):
raise NotImplementedError
def terminate_grading(self):
self._terminate_grading = True
if self._current_proc:
try:
self._current_proc.kill()
except OSError:
pass
pass
|
class BaseGrader(object):
def __init__(self, judge, problem, language, source):
if isinstance(source, unicode):
source = source.encode('utf-8')
self.source = source
self.language = language
self.problem = problem
self.judge = judge
self.binary = self._generate_binary()
self._terminate_grading = False
self._current_proc = None
def grade(self, case):
raise NotImplementedError
def _generate_binary(self):
raise NotImplementedError
def terminate_grading(self):
self._terminate_grading = True
if self._current_proc:
try:
self._current_proc.kill()
except OSError:
pass
pass
|
Make source utf-8 encoded bytes.
|
Make source utf-8 encoded bytes.
|
Python
|
agpl-3.0
|
DMOJ/judge,DMOJ/judge,DMOJ/judge
|
class BaseGrader(object):
def __init__(self, judge, problem, language, source):
+ if isinstance(source, unicode):
+ source = source.encode('utf-8')
self.source = source
self.language = language
self.problem = problem
self.judge = judge
self.binary = self._generate_binary()
self._terminate_grading = False
self._current_proc = None
def grade(self, case):
raise NotImplementedError
def _generate_binary(self):
raise NotImplementedError
def terminate_grading(self):
self._terminate_grading = True
if self._current_proc:
try:
self._current_proc.kill()
except OSError:
pass
pass
|
Make source utf-8 encoded bytes.
|
## Code Before:
class BaseGrader(object):
def __init__(self, judge, problem, language, source):
self.source = source
self.language = language
self.problem = problem
self.judge = judge
self.binary = self._generate_binary()
self._terminate_grading = False
self._current_proc = None
def grade(self, case):
raise NotImplementedError
def _generate_binary(self):
raise NotImplementedError
def terminate_grading(self):
self._terminate_grading = True
if self._current_proc:
try:
self._current_proc.kill()
except OSError:
pass
pass
## Instruction:
Make source utf-8 encoded bytes.
## Code After:
class BaseGrader(object):
def __init__(self, judge, problem, language, source):
if isinstance(source, unicode):
source = source.encode('utf-8')
self.source = source
self.language = language
self.problem = problem
self.judge = judge
self.binary = self._generate_binary()
self._terminate_grading = False
self._current_proc = None
def grade(self, case):
raise NotImplementedError
def _generate_binary(self):
raise NotImplementedError
def terminate_grading(self):
self._terminate_grading = True
if self._current_proc:
try:
self._current_proc.kill()
except OSError:
pass
pass
|
...
class BaseGrader(object):
def __init__(self, judge, problem, language, source):
if isinstance(source, unicode):
source = source.encode('utf-8')
self.source = source
self.language = language
self.problem = problem
...
|
5a1ad6a2fdd0586517899b3f2ec3d27a00a5d2b1
|
databroker/intake_xarray_core/__init__.py
|
databroker/intake_xarray_core/__init__.py
|
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
import intake # Import this first to avoid circular imports during discovery.
from .netcdf import NetCDFSource
from .opendap import OpenDapSource
from .raster import RasterIOSource
from .xzarr import ZarrSource
from .xarray_container import RemoteXarray
from .image import ImageSource
import intake.container
intake.registry['remote-xarray'] = RemoteXarray
intake.container.container_map['xarray'] = RemoteXarray
|
import intake # Import this first to avoid circular imports during discovery.
from .xarray_container import RemoteXarray
import intake.container
intake.registry['remote-xarray'] = RemoteXarray
intake.container.container_map['xarray'] = RemoteXarray
|
Remove imports of omitted modules.
|
Remove imports of omitted modules.
|
Python
|
bsd-3-clause
|
ericdill/databroker,ericdill/databroker
|
- from ._version import get_versions
- __version__ = get_versions()['version']
- del get_versions
-
import intake # Import this first to avoid circular imports during discovery.
- from .netcdf import NetCDFSource
- from .opendap import OpenDapSource
- from .raster import RasterIOSource
- from .xzarr import ZarrSource
from .xarray_container import RemoteXarray
- from .image import ImageSource
import intake.container
intake.registry['remote-xarray'] = RemoteXarray
intake.container.container_map['xarray'] = RemoteXarray
|
Remove imports of omitted modules.
|
## Code Before:
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
import intake # Import this first to avoid circular imports during discovery.
from .netcdf import NetCDFSource
from .opendap import OpenDapSource
from .raster import RasterIOSource
from .xzarr import ZarrSource
from .xarray_container import RemoteXarray
from .image import ImageSource
import intake.container
intake.registry['remote-xarray'] = RemoteXarray
intake.container.container_map['xarray'] = RemoteXarray
## Instruction:
Remove imports of omitted modules.
## Code After:
import intake # Import this first to avoid circular imports during discovery.
from .xarray_container import RemoteXarray
import intake.container
intake.registry['remote-xarray'] = RemoteXarray
intake.container.container_map['xarray'] = RemoteXarray
|
// ... existing code ...
import intake # Import this first to avoid circular imports during discovery.
from .xarray_container import RemoteXarray
import intake.container
// ... rest of the code ...
|
8026b5f309264d4e72c3bc503601468cf1cdfcdd
|
src/nodeconductor_assembly_waldur/packages/filters.py
|
src/nodeconductor_assembly_waldur/packages/filters.py
|
import django_filters
from nodeconductor.core.filters import UUIDFilter
from . import models
class PackageTemplateFilter(django_filters.FilterSet):
name = django_filters.CharFilter(lookup_type='icontains')
settings_uuid = UUIDFilter(name='service_settings__uuid')
class Meta(object):
model = models.PackageTemplate
fields = ('name', 'settings_uuid',)
class OpenStackPackageFilter(django_filters.FilterSet):
name = django_filters.CharFilter(lookup_type='icontains')
settings_uuid = UUIDFilter(name='service_settings__uuid')
class Meta(object):
model = models.OpenStackPackage
fields = ('name', 'settings_uuid',)
|
import django_filters
from nodeconductor.core.filters import UUIDFilter
from . import models
class PackageTemplateFilter(django_filters.FilterSet):
name = django_filters.CharFilter(lookup_type='icontains')
settings_uuid = UUIDFilter(name='service_settings__uuid')
class Meta(object):
model = models.PackageTemplate
fields = ('name', 'settings_uuid',)
class OpenStackPackageFilter(django_filters.FilterSet):
name = django_filters.CharFilter(lookup_type='icontains')
customer = UUIDFilter(name='tenant__service_project_link__project__customer')
project = UUIDFilter(name='tenant__service_project_link__project')
class Meta(object):
model = models.OpenStackPackage
fields = ('name', 'customer', 'project')
|
Enable filtering OpenStack package by customer and project (WAL-49)
|
Enable filtering OpenStack package by customer and project (WAL-49)
|
Python
|
mit
|
opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/nodeconductor-assembly-waldur,opennode/waldur-mastermind,opennode/nodeconductor-assembly-waldur,opennode/nodeconductor-assembly-waldur
|
import django_filters
from nodeconductor.core.filters import UUIDFilter
from . import models
class PackageTemplateFilter(django_filters.FilterSet):
name = django_filters.CharFilter(lookup_type='icontains')
settings_uuid = UUIDFilter(name='service_settings__uuid')
class Meta(object):
model = models.PackageTemplate
fields = ('name', 'settings_uuid',)
class OpenStackPackageFilter(django_filters.FilterSet):
name = django_filters.CharFilter(lookup_type='icontains')
- settings_uuid = UUIDFilter(name='service_settings__uuid')
+ customer = UUIDFilter(name='tenant__service_project_link__project__customer')
+ project = UUIDFilter(name='tenant__service_project_link__project')
class Meta(object):
model = models.OpenStackPackage
- fields = ('name', 'settings_uuid',)
+ fields = ('name', 'customer', 'project')
|
Enable filtering OpenStack package by customer and project (WAL-49)
|
## Code Before:
import django_filters
from nodeconductor.core.filters import UUIDFilter
from . import models
class PackageTemplateFilter(django_filters.FilterSet):
name = django_filters.CharFilter(lookup_type='icontains')
settings_uuid = UUIDFilter(name='service_settings__uuid')
class Meta(object):
model = models.PackageTemplate
fields = ('name', 'settings_uuid',)
class OpenStackPackageFilter(django_filters.FilterSet):
name = django_filters.CharFilter(lookup_type='icontains')
settings_uuid = UUIDFilter(name='service_settings__uuid')
class Meta(object):
model = models.OpenStackPackage
fields = ('name', 'settings_uuid',)
## Instruction:
Enable filtering OpenStack package by customer and project (WAL-49)
## Code After:
import django_filters
from nodeconductor.core.filters import UUIDFilter
from . import models
class PackageTemplateFilter(django_filters.FilterSet):
name = django_filters.CharFilter(lookup_type='icontains')
settings_uuid = UUIDFilter(name='service_settings__uuid')
class Meta(object):
model = models.PackageTemplate
fields = ('name', 'settings_uuid',)
class OpenStackPackageFilter(django_filters.FilterSet):
name = django_filters.CharFilter(lookup_type='icontains')
customer = UUIDFilter(name='tenant__service_project_link__project__customer')
project = UUIDFilter(name='tenant__service_project_link__project')
class Meta(object):
model = models.OpenStackPackage
fields = ('name', 'customer', 'project')
|
# ... existing code ...
class OpenStackPackageFilter(django_filters.FilterSet):
name = django_filters.CharFilter(lookup_type='icontains')
customer = UUIDFilter(name='tenant__service_project_link__project__customer')
project = UUIDFilter(name='tenant__service_project_link__project')
class Meta(object):
model = models.OpenStackPackage
fields = ('name', 'customer', 'project')
# ... rest of the code ...
|
9334d20adb15f3a6be393c57c797311e31fcd8fc
|
ConectorDriverComando.py
|
ConectorDriverComando.py
|
from serial import SerialException
import importlib
import threading
import logging
class ConectorError(Exception):
pass
class ConectorDriverComando:
driver = None
def __init__(self, comando, driver, *args, **kwargs):
logging.getLogger().info("inicializando ConectorDriverComando driver de %s" % driver)
self._comando = comando
self.driver_name = driver
# instanciar el driver dinamicamente segun el driver pasado como parametro
libraryName = "Drivers." + driver + "Driver"
driverModule = importlib.import_module(libraryName)
driverClass = getattr(driverModule, driver + "Driver")
self.driver = driverClass(**kwargs)
def sendCommand(self, *args):
logging.getLogger().info("Enviando comando %s" % args)
return self.driver.sendCommand(*args)
def close(self):
# Si el driver es Receipt, se cierra desde la misma clase del driver, sino, tira error de Bad File Descriptor por querer cerrarlo dos veces.
if self.driver_name == "ReceiptDirectJet":
if self.driver.connected is False:
return None
self.driver.close()
self.driver = None
|
from serial import SerialException
import importlib
import threading
import logging
class ConectorError(Exception):
pass
class ConectorDriverComando:
driver = None
def __init__(self, comando, driver, *args, **kwargs):
# logging.getLogger().info("inicializando ConectorDriverComando driver de %s" % driver)
logging.getLogger().info("inicializando ConectorDriverComando driver de '${0}'".format(driver))
self._comando = comando
self.driver_name = driver
# instanciar el driver dinamicamente segun el driver pasado como parametro
libraryName = "Drivers." + driver + "Driver"
driverModule = importlib.import_module(libraryName)
driverClass = getattr(driverModule, driver + "Driver")
self.driver = driverClass(**kwargs)
def sendCommand(self, *args):
# logging.getLogger().info("Enviando comando %s" % args)
logging.getLogger().info("Enviando comando '${0}'".format(args))
return self.driver.sendCommand(*args)
def close(self):
# Si el driver es Receipt, se cierra desde la misma clase del driver, sino, tira error de Bad File Descriptor por querer cerrarlo dos veces.
if self.driver_name == "ReceiptDirectJet":
if self.driver.connected is False:
return None
self.driver.close()
self.driver = None
|
FIX Format String Error in Conector Driver Comando
|
FIX Format String Error in Conector Driver Comando
|
Python
|
mit
|
ristorantino/fiscalberry,ristorantino/fiscalberry,ristorantino/fiscalberry,ristorantino/fiscalberry
|
from serial import SerialException
import importlib
import threading
import logging
class ConectorError(Exception):
pass
class ConectorDriverComando:
driver = None
def __init__(self, comando, driver, *args, **kwargs):
- logging.getLogger().info("inicializando ConectorDriverComando driver de %s" % driver)
+ # logging.getLogger().info("inicializando ConectorDriverComando driver de %s" % driver)
+ logging.getLogger().info("inicializando ConectorDriverComando driver de '${0}'".format(driver))
self._comando = comando
self.driver_name = driver
# instanciar el driver dinamicamente segun el driver pasado como parametro
libraryName = "Drivers." + driver + "Driver"
driverModule = importlib.import_module(libraryName)
driverClass = getattr(driverModule, driver + "Driver")
self.driver = driverClass(**kwargs)
def sendCommand(self, *args):
- logging.getLogger().info("Enviando comando %s" % args)
+ # logging.getLogger().info("Enviando comando %s" % args)
+ logging.getLogger().info("Enviando comando '${0}'".format(args))
return self.driver.sendCommand(*args)
def close(self):
# Si el driver es Receipt, se cierra desde la misma clase del driver, sino, tira error de Bad File Descriptor por querer cerrarlo dos veces.
if self.driver_name == "ReceiptDirectJet":
if self.driver.connected is False:
return None
self.driver.close()
self.driver = None
|
FIX Format String Error in Conector Driver Comando
|
## Code Before:
from serial import SerialException
import importlib
import threading
import logging
class ConectorError(Exception):
pass
class ConectorDriverComando:
driver = None
def __init__(self, comando, driver, *args, **kwargs):
logging.getLogger().info("inicializando ConectorDriverComando driver de %s" % driver)
self._comando = comando
self.driver_name = driver
# instanciar el driver dinamicamente segun el driver pasado como parametro
libraryName = "Drivers." + driver + "Driver"
driverModule = importlib.import_module(libraryName)
driverClass = getattr(driverModule, driver + "Driver")
self.driver = driverClass(**kwargs)
def sendCommand(self, *args):
logging.getLogger().info("Enviando comando %s" % args)
return self.driver.sendCommand(*args)
def close(self):
# Si el driver es Receipt, se cierra desde la misma clase del driver, sino, tira error de Bad File Descriptor por querer cerrarlo dos veces.
if self.driver_name == "ReceiptDirectJet":
if self.driver.connected is False:
return None
self.driver.close()
self.driver = None
## Instruction:
FIX Format String Error in Conector Driver Comando
## Code After:
from serial import SerialException
import importlib
import threading
import logging
class ConectorError(Exception):
pass
class ConectorDriverComando:
driver = None
def __init__(self, comando, driver, *args, **kwargs):
# logging.getLogger().info("inicializando ConectorDriverComando driver de %s" % driver)
logging.getLogger().info("inicializando ConectorDriverComando driver de '${0}'".format(driver))
self._comando = comando
self.driver_name = driver
# instanciar el driver dinamicamente segun el driver pasado como parametro
libraryName = "Drivers." + driver + "Driver"
driverModule = importlib.import_module(libraryName)
driverClass = getattr(driverModule, driver + "Driver")
self.driver = driverClass(**kwargs)
def sendCommand(self, *args):
# logging.getLogger().info("Enviando comando %s" % args)
logging.getLogger().info("Enviando comando '${0}'".format(args))
return self.driver.sendCommand(*args)
def close(self):
# Si el driver es Receipt, se cierra desde la misma clase del driver, sino, tira error de Bad File Descriptor por querer cerrarlo dos veces.
if self.driver_name == "ReceiptDirectJet":
if self.driver.connected is False:
return None
self.driver.close()
self.driver = None
|
// ... existing code ...
driver = None
def __init__(self, comando, driver, *args, **kwargs):
# logging.getLogger().info("inicializando ConectorDriverComando driver de %s" % driver)
logging.getLogger().info("inicializando ConectorDriverComando driver de '${0}'".format(driver))
self._comando = comando
self.driver_name = driver
// ... modified code ...
self.driver = driverClass(**kwargs)
def sendCommand(self, *args):
# logging.getLogger().info("Enviando comando %s" % args)
logging.getLogger().info("Enviando comando '${0}'".format(args))
return self.driver.sendCommand(*args)
def close(self):
// ... rest of the code ...
|
9a30728493258d7dcf60b67a8c87489e1457df1a
|
kitchen/dashboard/templatetags/filters.py
|
kitchen/dashboard/templatetags/filters.py
|
"""Dashboard template filters"""
from django import template
import littlechef
from kitchen.settings import REPO
register = template.Library()
@register.filter(name='get_role_list')
def get_role_list(run_list):
"""Returns the role sublist from the given run_list"""
prev_role_list = littlechef.lib.get_roles_in_node({'run_list': run_list})
role_list = []
for role in prev_role_list:
if not role.startswith(REPO['EXCLUDE_ROLE_PREFIX']):
# Only add if it doesn't start with excluded role prefixes
role_list.append(role)
return role_list
@register.filter(name='get_recipe_list')
def get_recipe_list(run_list):
"""Returns the recipe sublist from the given run_list"""
return littlechef.lib.get_recipes_in_node({'run_list': run_list})
|
"""Dashboard template filters"""
from django import template
import littlechef
from kitchen.settings import REPO
register = template.Library()
@register.filter(name='get_role_list')
def get_role_list(run_list):
"""Returns the role sublist from the given run_list"""
if run_list:
all_roles = littlechef.lib.get_roles_in_node(
{'run_list': run_list})
role_list = []
for role in all_roles:
if not role.startswith(REPO['EXCLUDE_ROLE_PREFIX']):
# Only add if it doesn't start with excluded role prefixes
role_list.append(role)
return role_list
else:
return []
@register.filter(name='get_recipe_list')
def get_recipe_list(run_list):
"""Returns the recipe sublist from the given run_list"""
if run_list:
return littlechef.lib.get_recipes_in_node({'run_list': run_list})
else:
return []
|
Check 'role_list' before sending it to little_chef
|
Check 'role_list' before sending it to little_chef
|
Python
|
apache-2.0
|
edelight/kitchen,edelight/kitchen,edelight/kitchen,edelight/kitchen
|
"""Dashboard template filters"""
from django import template
import littlechef
from kitchen.settings import REPO
register = template.Library()
@register.filter(name='get_role_list')
def get_role_list(run_list):
"""Returns the role sublist from the given run_list"""
- prev_role_list = littlechef.lib.get_roles_in_node({'run_list': run_list})
+ if run_list:
+ all_roles = littlechef.lib.get_roles_in_node(
+ {'run_list': run_list})
- role_list = []
+ role_list = []
- for role in prev_role_list:
+ for role in all_roles:
- if not role.startswith(REPO['EXCLUDE_ROLE_PREFIX']):
+ if not role.startswith(REPO['EXCLUDE_ROLE_PREFIX']):
- # Only add if it doesn't start with excluded role prefixes
+ # Only add if it doesn't start with excluded role prefixes
- role_list.append(role)
+ role_list.append(role)
- return role_list
+ return role_list
+ else:
+ return []
@register.filter(name='get_recipe_list')
def get_recipe_list(run_list):
"""Returns the recipe sublist from the given run_list"""
+ if run_list:
- return littlechef.lib.get_recipes_in_node({'run_list': run_list})
+ return littlechef.lib.get_recipes_in_node({'run_list': run_list})
+ else:
+ return []
|
Check 'role_list' before sending it to little_chef
|
## Code Before:
"""Dashboard template filters"""
from django import template
import littlechef
from kitchen.settings import REPO
register = template.Library()
@register.filter(name='get_role_list')
def get_role_list(run_list):
"""Returns the role sublist from the given run_list"""
prev_role_list = littlechef.lib.get_roles_in_node({'run_list': run_list})
role_list = []
for role in prev_role_list:
if not role.startswith(REPO['EXCLUDE_ROLE_PREFIX']):
# Only add if it doesn't start with excluded role prefixes
role_list.append(role)
return role_list
@register.filter(name='get_recipe_list')
def get_recipe_list(run_list):
"""Returns the recipe sublist from the given run_list"""
return littlechef.lib.get_recipes_in_node({'run_list': run_list})
## Instruction:
Check 'role_list' before sending it to little_chef
## Code After:
"""Dashboard template filters"""
from django import template
import littlechef
from kitchen.settings import REPO
register = template.Library()
@register.filter(name='get_role_list')
def get_role_list(run_list):
"""Returns the role sublist from the given run_list"""
if run_list:
all_roles = littlechef.lib.get_roles_in_node(
{'run_list': run_list})
role_list = []
for role in all_roles:
if not role.startswith(REPO['EXCLUDE_ROLE_PREFIX']):
# Only add if it doesn't start with excluded role prefixes
role_list.append(role)
return role_list
else:
return []
@register.filter(name='get_recipe_list')
def get_recipe_list(run_list):
"""Returns the recipe sublist from the given run_list"""
if run_list:
return littlechef.lib.get_recipes_in_node({'run_list': run_list})
else:
return []
|
# ... existing code ...
@register.filter(name='get_role_list')
def get_role_list(run_list):
"""Returns the role sublist from the given run_list"""
if run_list:
all_roles = littlechef.lib.get_roles_in_node(
{'run_list': run_list})
role_list = []
for role in all_roles:
if not role.startswith(REPO['EXCLUDE_ROLE_PREFIX']):
# Only add if it doesn't start with excluded role prefixes
role_list.append(role)
return role_list
else:
return []
@register.filter(name='get_recipe_list')
def get_recipe_list(run_list):
"""Returns the recipe sublist from the given run_list"""
if run_list:
return littlechef.lib.get_recipes_in_node({'run_list': run_list})
else:
return []
# ... rest of the code ...
|
88393283ff5e7f7720a98eda5eec8fa53b30f700
|
grains/grains.py
|
grains/grains.py
|
import itertools
square = [x for x in range(1, 65)]
grains = [2 ** x for x in range(0, 65)]
board = dict(zip(square, grains))
def on_square(num):
for k, v in board.iteritems():
if k == num:
return v
def total_after(num):
for k, v in board.iteritems():
if k == num:
total_after = sum(map(board.get, itertools.takewhile(lambda key: key != v, board)))
return total_after
print (board)
print (total_after(1))
print(on_square(1))
|
import itertools
square = [x for x in range(1, 65)]
grains = [2 ** x for x in range(0, 65)]
board = dict(zip(square, grains))
def on_square(num):
for k, v in board.iteritems():
if k == num:
return v
def total_after(num):
if num == 1:
return 1
else:
for k, v in board.iteritems():
if k == num:
total_after = sum(map(board.get, itertools.takewhile(lambda key: key != v, board)))
return total_after
print (board)
print (total_after(1))
print(on_square(1))
|
Add condition to avoid index error
|
Add condition to avoid index error
|
Python
|
mit
|
amalshehu/exercism-python
|
import itertools
square = [x for x in range(1, 65)]
grains = [2 ** x for x in range(0, 65)]
board = dict(zip(square, grains))
def on_square(num):
for k, v in board.iteritems():
if k == num:
return v
def total_after(num):
+ if num == 1:
+ return 1
+ else:
- for k, v in board.iteritems():
+ for k, v in board.iteritems():
- if k == num:
+ if k == num:
- total_after = sum(map(board.get, itertools.takewhile(lambda key: key != v, board)))
+ total_after = sum(map(board.get, itertools.takewhile(lambda key: key != v, board)))
return total_after
print (board)
print (total_after(1))
print(on_square(1))
|
Add condition to avoid index error
|
## Code Before:
import itertools
square = [x for x in range(1, 65)]
grains = [2 ** x for x in range(0, 65)]
board = dict(zip(square, grains))
def on_square(num):
for k, v in board.iteritems():
if k == num:
return v
def total_after(num):
for k, v in board.iteritems():
if k == num:
total_after = sum(map(board.get, itertools.takewhile(lambda key: key != v, board)))
return total_after
print (board)
print (total_after(1))
print(on_square(1))
## Instruction:
Add condition to avoid index error
## Code After:
import itertools
square = [x for x in range(1, 65)]
grains = [2 ** x for x in range(0, 65)]
board = dict(zip(square, grains))
def on_square(num):
for k, v in board.iteritems():
if k == num:
return v
def total_after(num):
if num == 1:
return 1
else:
for k, v in board.iteritems():
if k == num:
total_after = sum(map(board.get, itertools.takewhile(lambda key: key != v, board)))
return total_after
print (board)
print (total_after(1))
print(on_square(1))
|
// ... existing code ...
def total_after(num):
if num == 1:
return 1
else:
for k, v in board.iteritems():
if k == num:
total_after = sum(map(board.get, itertools.takewhile(lambda key: key != v, board)))
return total_after
print (board)
// ... rest of the code ...
|
d1614d3747f72c1f32e74afb6e4b98eb476c7266
|
utils/layers_test.py
|
utils/layers_test.py
|
"""Tests for spectral."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
import numpy as np
import os
import layers
class LayersTest(tf.test.TestCase):
def test_conv_transpose_shape(self):
inputs = np.random.normal(size=(10, 5, 2)).astype(np.float32)
conv_transpose = layers.Conv1DTranspose(
filters=2, kernel_size=1, strides=1
)
outputs = conv_transpose(inputs)
self.assertShapeEqual(inputs, outputs)
if __name__ == '__main__':
os.environ["CUDA_VISIBLE_DEVICES"] = ''
tf.test.main()
|
"""Tests for spectral."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
import numpy as np
import os
import layers
class LayersTest(tf.test.TestCase):
def test_conv_transpose_shape(self):
inputs = np.random.normal(size=(10, 5, 2)).astype(np.float32)
conv_transpose = layers.Conv1DTranspose(
filters=2, kernel_size=1, strides=1
)
outputs = conv_transpose(inputs)
self.assertShapeEqual(inputs, outputs)
def test_conv_transpose_shape_upscale(self):
inputs = np.random.normal(size=(10, 5, 2)).astype(np.float32)
conv_transpose = layers.Conv1DTranspose(
filters=2, kernel_size=1, strides=2
)
outputs = conv_transpose(inputs)
self.assertEqual((10, 10, 2), outputs.shape)
if __name__ == '__main__':
os.environ["CUDA_VISIBLE_DEVICES"] = ''
tf.test.main()
|
Add Second Shape Test for Layers Util
|
Add Second Shape Test for Layers Util
|
Python
|
apache-2.0
|
googleinterns/audio_synthesis
|
"""Tests for spectral."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
import numpy as np
import os
import layers
class LayersTest(tf.test.TestCase):
def test_conv_transpose_shape(self):
inputs = np.random.normal(size=(10, 5, 2)).astype(np.float32)
conv_transpose = layers.Conv1DTranspose(
filters=2, kernel_size=1, strides=1
)
outputs = conv_transpose(inputs)
self.assertShapeEqual(inputs, outputs)
+
+ def test_conv_transpose_shape_upscale(self):
+ inputs = np.random.normal(size=(10, 5, 2)).astype(np.float32)
+ conv_transpose = layers.Conv1DTranspose(
+ filters=2, kernel_size=1, strides=2
+ )
+
+ outputs = conv_transpose(inputs)
+ self.assertEqual((10, 10, 2), outputs.shape)
if __name__ == '__main__':
os.environ["CUDA_VISIBLE_DEVICES"] = ''
tf.test.main()
|
Add Second Shape Test for Layers Util
|
## Code Before:
"""Tests for spectral."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
import numpy as np
import os
import layers
class LayersTest(tf.test.TestCase):
def test_conv_transpose_shape(self):
inputs = np.random.normal(size=(10, 5, 2)).astype(np.float32)
conv_transpose = layers.Conv1DTranspose(
filters=2, kernel_size=1, strides=1
)
outputs = conv_transpose(inputs)
self.assertShapeEqual(inputs, outputs)
if __name__ == '__main__':
os.environ["CUDA_VISIBLE_DEVICES"] = ''
tf.test.main()
## Instruction:
Add Second Shape Test for Layers Util
## Code After:
"""Tests for spectral."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
import numpy as np
import os
import layers
class LayersTest(tf.test.TestCase):
def test_conv_transpose_shape(self):
inputs = np.random.normal(size=(10, 5, 2)).astype(np.float32)
conv_transpose = layers.Conv1DTranspose(
filters=2, kernel_size=1, strides=1
)
outputs = conv_transpose(inputs)
self.assertShapeEqual(inputs, outputs)
def test_conv_transpose_shape_upscale(self):
inputs = np.random.normal(size=(10, 5, 2)).astype(np.float32)
conv_transpose = layers.Conv1DTranspose(
filters=2, kernel_size=1, strides=2
)
outputs = conv_transpose(inputs)
self.assertEqual((10, 10, 2), outputs.shape)
if __name__ == '__main__':
os.environ["CUDA_VISIBLE_DEVICES"] = ''
tf.test.main()
|
# ... existing code ...
outputs = conv_transpose(inputs)
self.assertShapeEqual(inputs, outputs)
def test_conv_transpose_shape_upscale(self):
inputs = np.random.normal(size=(10, 5, 2)).astype(np.float32)
conv_transpose = layers.Conv1DTranspose(
filters=2, kernel_size=1, strides=2
)
outputs = conv_transpose(inputs)
self.assertEqual((10, 10, 2), outputs.shape)
if __name__ == '__main__':
# ... rest of the code ...
|
9a2cc99b068b2aaa572f52b4516852b239577c34
|
dummyserver/server.py
|
dummyserver/server.py
|
import threading, socket
"""
Dummy server using for unit testing
"""
class Server(threading.Thread):
def __init__(self, handler, host='localhost', port=8021):
threading.Thread.__init__(self)
self.handler = handler
self.host = host
self.port = port
self.ready_event = threading.Event()
self.stop_event = threading.Event()
def run(self):
sock = socket.socket()
sock.bind((self.host, self.port))
sock.listen(0)
self.ready_event.set()
self.handler(sock)
self.stop_event.set()
sock.close()
def __enter__(self):
self.start()
self.ready_event.wait()
return self.host, self.port
def __exit__(self, exc_type, exc_value, traceback):
if exc_type is None:
self.stop_event.wait()
return False # allow exceptions to propagate
|
import threading, socket
class Server(threading.Thread):
""" Dummy server using for unit testing """
def __init__(self, handler, host='localhost', port=8021):
threading.Thread.__init__(self)
self.handler = handler
self.host = host
self.port = port
self.ready_event = threading.Event()
self.stop_event = threading.Event()
def run(self):
sock = socket.socket()
sock.bind((self.host, self.port))
sock.listen(0)
self.ready_event.set()
self.handler(sock)
self.stop_event.set()
sock.close()
def __enter__(self):
self.start()
self.ready_event.wait()
return self.host, self.port
def __exit__(self, exc_type, exc_value, traceback):
if exc_type is None:
self.stop_event.wait()
return False # allow exceptions to propagate
|
Put docstring inside Server class
|
Put docstring inside Server class
|
Python
|
apache-2.0
|
psf/requests
|
import threading, socket
- """
- Dummy server using for unit testing
- """
class Server(threading.Thread):
+ """ Dummy server using for unit testing """
+
def __init__(self, handler, host='localhost', port=8021):
threading.Thread.__init__(self)
self.handler = handler
self.host = host
self.port = port
self.ready_event = threading.Event()
self.stop_event = threading.Event()
def run(self):
sock = socket.socket()
sock.bind((self.host, self.port))
sock.listen(0)
self.ready_event.set()
self.handler(sock)
self.stop_event.set()
sock.close()
def __enter__(self):
self.start()
self.ready_event.wait()
return self.host, self.port
def __exit__(self, exc_type, exc_value, traceback):
if exc_type is None:
self.stop_event.wait()
return False # allow exceptions to propagate
|
Put docstring inside Server class
|
## Code Before:
import threading, socket
"""
Dummy server using for unit testing
"""
class Server(threading.Thread):
def __init__(self, handler, host='localhost', port=8021):
threading.Thread.__init__(self)
self.handler = handler
self.host = host
self.port = port
self.ready_event = threading.Event()
self.stop_event = threading.Event()
def run(self):
sock = socket.socket()
sock.bind((self.host, self.port))
sock.listen(0)
self.ready_event.set()
self.handler(sock)
self.stop_event.set()
sock.close()
def __enter__(self):
self.start()
self.ready_event.wait()
return self.host, self.port
def __exit__(self, exc_type, exc_value, traceback):
if exc_type is None:
self.stop_event.wait()
return False # allow exceptions to propagate
## Instruction:
Put docstring inside Server class
## Code After:
import threading, socket
class Server(threading.Thread):
""" Dummy server using for unit testing """
def __init__(self, handler, host='localhost', port=8021):
threading.Thread.__init__(self)
self.handler = handler
self.host = host
self.port = port
self.ready_event = threading.Event()
self.stop_event = threading.Event()
def run(self):
sock = socket.socket()
sock.bind((self.host, self.port))
sock.listen(0)
self.ready_event.set()
self.handler(sock)
self.stop_event.set()
sock.close()
def __enter__(self):
self.start()
self.ready_event.wait()
return self.host, self.port
def __exit__(self, exc_type, exc_value, traceback):
if exc_type is None:
self.stop_event.wait()
return False # allow exceptions to propagate
|
// ... existing code ...
class Server(threading.Thread):
""" Dummy server using for unit testing """
def __init__(self, handler, host='localhost', port=8021):
threading.Thread.__init__(self)
self.handler = handler
// ... rest of the code ...
|
172c0123d5ce59ce4f162d806fc706dc50eb4312
|
distarray/tests/test_client.py
|
distarray/tests/test_client.py
|
import unittest
import numpy as np
from IPython.parallel import Client
from distarray.client import DistArrayContext
class TestDistArrayContext(unittest.TestCase):
def setUp(self):
self.client = Client()
self.dv = self.client[:]
def test_create_DAC(self):
'''Can we create a plain vanilla context?'''
dac = DistArrayContext(self.dv)
self.assertIs(dac.view, self.dv)
def test_create_DAC_with_targets(self):
'''Can we create a context with a subset of engines?'''
dac = DistArrayContext(self.dv, targets=[0, 1])
self.assertIs(dac.view, self.dv)
class TestDistArrayProxy(unittest.TestCase):
def setUp(self):
self.client = Client()
self.dv = self.client[:]
self.dac = DistArrayContext(self.dv)
self.dap = self.dac.fromndarray(np.arange(100))
def test_getitem(self):
self.assertEqual(self.dap[55], 55)
def test_setitem(self):
self.dap[35] = 9999
print self.dap[35]
if __name__ == '__main__':
unittest.main(verbosity=2)
|
import unittest
from IPython.parallel import Client
from distarray.client import DistArrayContext
class TestDistArrayContext(unittest.TestCase):
def setUp(self):
self.client = Client()
self.dv = self.client[:]
def test_create_DAC(self):
'''Can we create a plain vanilla context?'''
dac = DistArrayContext(self.dv)
self.assertIs(dac.view, self.dv)
def test_create_DAC_with_targets(self):
'''Can we create a context with a subset of engines?'''
dac = DistArrayContext(self.dv, targets=[0, 1])
self.assertIs(dac.view, self.dv)
class TestDistArrayProxy(unittest.TestCase):
def setUp(self):
self.client = Client()
self.dv = self.client[:]
self.dac = DistArrayContext(self.dv)
def test_set_and_getitem_(self):
dap = self.dac.empty((100,))
for val in xrange(100):
dap[val] = val
for val in xrange(100):
self.assertEqual(dap[val], val)
if __name__ == '__main__':
unittest.main(verbosity=2)
|
Test DAP getitem and setitem together.
|
Test DAP getitem and setitem together.
|
Python
|
bsd-3-clause
|
RaoUmer/distarray,enthought/distarray,RaoUmer/distarray,enthought/distarray
|
import unittest
- import numpy as np
from IPython.parallel import Client
from distarray.client import DistArrayContext
class TestDistArrayContext(unittest.TestCase):
def setUp(self):
self.client = Client()
self.dv = self.client[:]
def test_create_DAC(self):
'''Can we create a plain vanilla context?'''
dac = DistArrayContext(self.dv)
self.assertIs(dac.view, self.dv)
def test_create_DAC_with_targets(self):
'''Can we create a context with a subset of engines?'''
dac = DistArrayContext(self.dv, targets=[0, 1])
self.assertIs(dac.view, self.dv)
class TestDistArrayProxy(unittest.TestCase):
def setUp(self):
self.client = Client()
self.dv = self.client[:]
self.dac = DistArrayContext(self.dv)
- self.dap = self.dac.fromndarray(np.arange(100))
+ def test_set_and_getitem_(self):
+ dap = self.dac.empty((100,))
- def test_getitem(self):
- self.assertEqual(self.dap[55], 55)
+ for val in xrange(100):
+ dap[val] = val
+ for val in xrange(100):
+ self.assertEqual(dap[val], val)
- def test_setitem(self):
- self.dap[35] = 9999
- print self.dap[35]
if __name__ == '__main__':
unittest.main(verbosity=2)
|
Test DAP getitem and setitem together.
|
## Code Before:
import unittest
import numpy as np
from IPython.parallel import Client
from distarray.client import DistArrayContext
class TestDistArrayContext(unittest.TestCase):
def setUp(self):
self.client = Client()
self.dv = self.client[:]
def test_create_DAC(self):
'''Can we create a plain vanilla context?'''
dac = DistArrayContext(self.dv)
self.assertIs(dac.view, self.dv)
def test_create_DAC_with_targets(self):
'''Can we create a context with a subset of engines?'''
dac = DistArrayContext(self.dv, targets=[0, 1])
self.assertIs(dac.view, self.dv)
class TestDistArrayProxy(unittest.TestCase):
def setUp(self):
self.client = Client()
self.dv = self.client[:]
self.dac = DistArrayContext(self.dv)
self.dap = self.dac.fromndarray(np.arange(100))
def test_getitem(self):
self.assertEqual(self.dap[55], 55)
def test_setitem(self):
self.dap[35] = 9999
print self.dap[35]
if __name__ == '__main__':
unittest.main(verbosity=2)
## Instruction:
Test DAP getitem and setitem together.
## Code After:
import unittest
from IPython.parallel import Client
from distarray.client import DistArrayContext
class TestDistArrayContext(unittest.TestCase):
def setUp(self):
self.client = Client()
self.dv = self.client[:]
def test_create_DAC(self):
'''Can we create a plain vanilla context?'''
dac = DistArrayContext(self.dv)
self.assertIs(dac.view, self.dv)
def test_create_DAC_with_targets(self):
'''Can we create a context with a subset of engines?'''
dac = DistArrayContext(self.dv, targets=[0, 1])
self.assertIs(dac.view, self.dv)
class TestDistArrayProxy(unittest.TestCase):
def setUp(self):
self.client = Client()
self.dv = self.client[:]
self.dac = DistArrayContext(self.dv)
def test_set_and_getitem_(self):
dap = self.dac.empty((100,))
for val in xrange(100):
dap[val] = val
for val in xrange(100):
self.assertEqual(dap[val], val)
if __name__ == '__main__':
unittest.main(verbosity=2)
|
// ... existing code ...
import unittest
from IPython.parallel import Client
from distarray.client import DistArrayContext
// ... modified code ...
self.dv = self.client[:]
self.dac = DistArrayContext(self.dv)
def test_set_and_getitem_(self):
dap = self.dac.empty((100,))
for val in xrange(100):
dap[val] = val
for val in xrange(100):
self.assertEqual(dap[val], val)
if __name__ == '__main__':
// ... rest of the code ...
|
74983020db5cfcd3e81e258837979522f2d1b639
|
flac_errors.py
|
flac_errors.py
|
import re
errorFile = open('samples/foobar2000-errors.txt', 'r')
errors = errorFile.readlines()
index = -1
for i in range(len(errors)):
line = errors[i]
if re.search(r'List of undecodable items:', line):
index = i
nbErrorsFoobar = 0
for i in range(index, len(errors)):
line = errors[i]
match = re.search(r'"(.*.flac)"', line)
if match:
nbErrorsFoobar += 1
print(match.group(1))
errorFile.close()
errorFile = open('samples/dbpoweramp-errors.txt', 'r')
errors = errorFile.readlines()
index = -1
nbErrorsDbPowerAmp = 0
for i in range(len(errors)):
line = errors[i]
match = re.search(r'/.+/[^/]+.flac', line)
if match:
print(match.group())
if (re.search(r'Encountered', line) or re.search(r'md5 did not match decoded data, file is corrupt.', line)):
nbErrorsDbPowerAmp += 1
print(nbErrorsFoobar, 'FLAC errors from foobar2000')
print(nbErrorsDbPowerAmp, 'FLAC errors from dbPowerAmp')
|
import re
errorFile = open('samples/foobar2000-errors.txt', 'r')
errors = errorFile.readlines()
errorsSet = set()
index = -1
for i in range(len(errors)):
line = errors[i]
if re.search(r'List of undecodable items:', line):
index = i
nbErrorsFoobar = 0
for i in range(index, len(errors)):
line = errors[i]
match = re.search(r'\\.+\\([^\\]+.flac)', line)
if match:
errorsSet.add(match.group(1))
nbErrorsFoobar += 1
# print(match.group(1))
errorFile.close()
errorFile = open('samples/dbpoweramp-errors.txt', 'r')
errors = errorFile.readlines()
index = -1
nbErrorsDbPowerAmp = 0
for i in range(len(errors)):
line = errors[i]
match = re.search(r'/.+/([^/]+.flac)', line)
if match:
errorsSet.add(match.group(1))
# print(match.group(1))
if (re.search(r'Encountered', line) or re.search(r'md5 did not match decoded data, file is corrupt.', line)):
nbErrorsDbPowerAmp += 1
print(nbErrorsFoobar, 'FLAC errors from foobar2000')
print(nbErrorsDbPowerAmp, 'FLAC errors from dbPowerAmp')
print(len(errorsSet))
for item in errorsSet:
print(item)
|
Use a Set to avoid duplicates
|
Use a Set to avoid duplicates
|
Python
|
mit
|
derekhendrickx/find-my-flac-errors
|
import re
errorFile = open('samples/foobar2000-errors.txt', 'r')
errors = errorFile.readlines()
+ errorsSet = set()
index = -1
for i in range(len(errors)):
line = errors[i]
if re.search(r'List of undecodable items:', line):
index = i
nbErrorsFoobar = 0
for i in range(index, len(errors)):
line = errors[i]
- match = re.search(r'"(.*.flac)"', line)
+ match = re.search(r'\\.+\\([^\\]+.flac)', line)
if match:
+ errorsSet.add(match.group(1))
nbErrorsFoobar += 1
- print(match.group(1))
+ # print(match.group(1))
errorFile.close()
errorFile = open('samples/dbpoweramp-errors.txt', 'r')
errors = errorFile.readlines()
index = -1
nbErrorsDbPowerAmp = 0
for i in range(len(errors)):
line = errors[i]
- match = re.search(r'/.+/[^/]+.flac', line)
+ match = re.search(r'/.+/([^/]+.flac)', line)
if match:
+ errorsSet.add(match.group(1))
- print(match.group())
+ # print(match.group(1))
if (re.search(r'Encountered', line) or re.search(r'md5 did not match decoded data, file is corrupt.', line)):
nbErrorsDbPowerAmp += 1
print(nbErrorsFoobar, 'FLAC errors from foobar2000')
print(nbErrorsDbPowerAmp, 'FLAC errors from dbPowerAmp')
+ print(len(errorsSet))
+
+ for item in errorsSet:
+ print(item)
|
Use a Set to avoid duplicates
|
## Code Before:
import re
errorFile = open('samples/foobar2000-errors.txt', 'r')
errors = errorFile.readlines()
index = -1
for i in range(len(errors)):
line = errors[i]
if re.search(r'List of undecodable items:', line):
index = i
nbErrorsFoobar = 0
for i in range(index, len(errors)):
line = errors[i]
match = re.search(r'"(.*.flac)"', line)
if match:
nbErrorsFoobar += 1
print(match.group(1))
errorFile.close()
errorFile = open('samples/dbpoweramp-errors.txt', 'r')
errors = errorFile.readlines()
index = -1
nbErrorsDbPowerAmp = 0
for i in range(len(errors)):
line = errors[i]
match = re.search(r'/.+/[^/]+.flac', line)
if match:
print(match.group())
if (re.search(r'Encountered', line) or re.search(r'md5 did not match decoded data, file is corrupt.', line)):
nbErrorsDbPowerAmp += 1
print(nbErrorsFoobar, 'FLAC errors from foobar2000')
print(nbErrorsDbPowerAmp, 'FLAC errors from dbPowerAmp')
## Instruction:
Use a Set to avoid duplicates
## Code After:
import re
errorFile = open('samples/foobar2000-errors.txt', 'r')
errors = errorFile.readlines()
errorsSet = set()
index = -1
for i in range(len(errors)):
line = errors[i]
if re.search(r'List of undecodable items:', line):
index = i
nbErrorsFoobar = 0
for i in range(index, len(errors)):
line = errors[i]
match = re.search(r'\\.+\\([^\\]+.flac)', line)
if match:
errorsSet.add(match.group(1))
nbErrorsFoobar += 1
# print(match.group(1))
errorFile.close()
errorFile = open('samples/dbpoweramp-errors.txt', 'r')
errors = errorFile.readlines()
index = -1
nbErrorsDbPowerAmp = 0
for i in range(len(errors)):
line = errors[i]
match = re.search(r'/.+/([^/]+.flac)', line)
if match:
errorsSet.add(match.group(1))
# print(match.group(1))
if (re.search(r'Encountered', line) or re.search(r'md5 did not match decoded data, file is corrupt.', line)):
nbErrorsDbPowerAmp += 1
print(nbErrorsFoobar, 'FLAC errors from foobar2000')
print(nbErrorsDbPowerAmp, 'FLAC errors from dbPowerAmp')
print(len(errorsSet))
for item in errorsSet:
print(item)
|
# ... existing code ...
errorFile = open('samples/foobar2000-errors.txt', 'r')
errors = errorFile.readlines()
errorsSet = set()
index = -1
for i in range(len(errors)):
# ... modified code ...
nbErrorsFoobar = 0
for i in range(index, len(errors)):
line = errors[i]
match = re.search(r'\\.+\\([^\\]+.flac)', line)
if match:
errorsSet.add(match.group(1))
nbErrorsFoobar += 1
# print(match.group(1))
errorFile.close()
...
nbErrorsDbPowerAmp = 0
for i in range(len(errors)):
line = errors[i]
match = re.search(r'/.+/([^/]+.flac)', line)
if match:
errorsSet.add(match.group(1))
# print(match.group(1))
if (re.search(r'Encountered', line) or re.search(r'md5 did not match decoded data, file is corrupt.', line)):
nbErrorsDbPowerAmp += 1
print(nbErrorsFoobar, 'FLAC errors from foobar2000')
print(nbErrorsDbPowerAmp, 'FLAC errors from dbPowerAmp')
print(len(errorsSet))
for item in errorsSet:
print(item)
# ... rest of the code ...
|
2b8869bb508f4fb67867385f3058372bde664ca5
|
CheckProxy/CheckProxy.py
|
CheckProxy/CheckProxy.py
|
import discord
import requests
from discord.ext import commands
class checkproxy:
"""Cog for proxy checking"""
def __init__(self, bot):
self.bot = bot
@commands.command(pass_context=True)
async def checkproxy(self, ctx, proxy):
"""Checks the provided proxy."""
p = proxy
pr = {
'http': p,
'https': p
}
try:
r = requests.get('https://pgorelease.nianticlabs.com/plfe/version', proxies=pr)
if r.status_code == 200:
await self.bot.say(':white_check_mark: 200 OK, proxy is not banned.')
if r.status_code == 403:
await self.bot.say(':x: 403 Forbidden, proxy is banned.')
except requests.exceptions.RequestException as e:
await self.bot.say('Something is wrong with your proxy. Make sure to put the port as well as remove http or https from your input. Authentication is not supported right now.')
if not ctx.message.channel.is_private:
await self.bot.delete_message(ctx.message)
def setup(bot):
bot.add_cog(checkproxy(bot))
|
import discord
import requests
from discord.ext import commands
class checkproxy:
"""Cog for proxy checking"""
def __init__(self, bot):
self.bot = bot
@commands.command(pass_context=True)
async def checkproxy(self, ctx, proxy):
"""Checks the provided proxy."""
p = proxy
pr = {
'http': p,
'https': p
}
try:
r = requests.get('https://pgorelease.nianticlabs.com/plfe/version', proxies=pr, timeout=5)
if r.status_code == 200:
await self.bot.say(':white_check_mark: 200 OK, proxy is not banned.')
if r.status_code == 403:
await self.bot.say(':x: 403 Forbidden, proxy is banned.')
except requests.exceptions.timeout:
await self.bot.say(':x: Timed out checking proxy.')
except requests.exceptions.RequestException as e:
await self.bot.say('Something is wrong with your proxy. Make sure to put the port as well as remove http or https from your input. Authentication is not supported right now.')
if not ctx.message.channel.is_private:
await self.bot.delete_message(ctx.message)
def setup(bot):
bot.add_cog(checkproxy(bot))
|
Add 5s timeout to checkproxy (in an effort to prevent bot hanging
|
Add 5s timeout to checkproxy (in an effort to prevent bot hanging
|
Python
|
agpl-3.0
|
FrostTheFox/RocketMap-cogs
|
import discord
import requests
from discord.ext import commands
class checkproxy:
"""Cog for proxy checking"""
def __init__(self, bot):
self.bot = bot
@commands.command(pass_context=True)
async def checkproxy(self, ctx, proxy):
"""Checks the provided proxy."""
p = proxy
pr = {
'http': p,
'https': p
}
try:
- r = requests.get('https://pgorelease.nianticlabs.com/plfe/version', proxies=pr)
+ r = requests.get('https://pgorelease.nianticlabs.com/plfe/version', proxies=pr, timeout=5)
if r.status_code == 200:
await self.bot.say(':white_check_mark: 200 OK, proxy is not banned.')
if r.status_code == 403:
await self.bot.say(':x: 403 Forbidden, proxy is banned.')
+ except requests.exceptions.timeout:
+ await self.bot.say(':x: Timed out checking proxy.')
except requests.exceptions.RequestException as e:
await self.bot.say('Something is wrong with your proxy. Make sure to put the port as well as remove http or https from your input. Authentication is not supported right now.')
if not ctx.message.channel.is_private:
await self.bot.delete_message(ctx.message)
def setup(bot):
bot.add_cog(checkproxy(bot))
|
Add 5s timeout to checkproxy (in an effort to prevent bot hanging
|
## Code Before:
import discord
import requests
from discord.ext import commands
class checkproxy:
"""Cog for proxy checking"""
def __init__(self, bot):
self.bot = bot
@commands.command(pass_context=True)
async def checkproxy(self, ctx, proxy):
"""Checks the provided proxy."""
p = proxy
pr = {
'http': p,
'https': p
}
try:
r = requests.get('https://pgorelease.nianticlabs.com/plfe/version', proxies=pr)
if r.status_code == 200:
await self.bot.say(':white_check_mark: 200 OK, proxy is not banned.')
if r.status_code == 403:
await self.bot.say(':x: 403 Forbidden, proxy is banned.')
except requests.exceptions.RequestException as e:
await self.bot.say('Something is wrong with your proxy. Make sure to put the port as well as remove http or https from your input. Authentication is not supported right now.')
if not ctx.message.channel.is_private:
await self.bot.delete_message(ctx.message)
def setup(bot):
bot.add_cog(checkproxy(bot))
## Instruction:
Add 5s timeout to checkproxy (in an effort to prevent bot hanging
## Code After:
import discord
import requests
from discord.ext import commands
class checkproxy:
"""Cog for proxy checking"""
def __init__(self, bot):
self.bot = bot
@commands.command(pass_context=True)
async def checkproxy(self, ctx, proxy):
"""Checks the provided proxy."""
p = proxy
pr = {
'http': p,
'https': p
}
try:
r = requests.get('https://pgorelease.nianticlabs.com/plfe/version', proxies=pr, timeout=5)
if r.status_code == 200:
await self.bot.say(':white_check_mark: 200 OK, proxy is not banned.')
if r.status_code == 403:
await self.bot.say(':x: 403 Forbidden, proxy is banned.')
except requests.exceptions.timeout:
await self.bot.say(':x: Timed out checking proxy.')
except requests.exceptions.RequestException as e:
await self.bot.say('Something is wrong with your proxy. Make sure to put the port as well as remove http or https from your input. Authentication is not supported right now.')
if not ctx.message.channel.is_private:
await self.bot.delete_message(ctx.message)
def setup(bot):
bot.add_cog(checkproxy(bot))
|
...
'https': p
}
try:
r = requests.get('https://pgorelease.nianticlabs.com/plfe/version', proxies=pr, timeout=5)
if r.status_code == 200:
await self.bot.say(':white_check_mark: 200 OK, proxy is not banned.')
if r.status_code == 403:
await self.bot.say(':x: 403 Forbidden, proxy is banned.')
except requests.exceptions.timeout:
await self.bot.say(':x: Timed out checking proxy.')
except requests.exceptions.RequestException as e:
await self.bot.say('Something is wrong with your proxy. Make sure to put the port as well as remove http or https from your input. Authentication is not supported right now.')
...
|
1fc2e747f1c02d5b8559f03187464eecda008190
|
fernet_fields/test/testmigrate/migrations/0004_copy_values.py
|
fernet_fields/test/testmigrate/migrations/0004_copy_values.py
|
from __future__ import unicode_literals
from django.db import migrations
def forwards(apps, schema_editor):
DualText = apps.get_model('testmigrate', 'DualText')
for obj in DualText.objects.all():
obj.value_dual = obj.value
def backwards(apps, schema_editor):
DualText = apps.get_model('testmigrate', 'DualText')
for obj in DualText.objects.all():
obj.value = obj.value_dual
class Migration(migrations.Migration):
dependencies = [
('testmigrate', '0003_add_value_dual'),
]
operations = [
migrations.RunPython(forwards, backwards),
]
|
from __future__ import unicode_literals
from django.db import migrations
def forwards(apps, schema_editor):
DualText = apps.get_model('testmigrate', 'DualText')
for obj in DualText.objects.all():
obj.value_dual = obj.value
obj.save(force_update=True)
def backwards(apps, schema_editor):
DualText = apps.get_model('testmigrate', 'DualText')
for obj in DualText.objects.all():
obj.value = obj.value_dual
obj.save(force_update=True)
class Migration(migrations.Migration):
dependencies = [
('testmigrate', '0003_add_value_dual'),
]
operations = [
migrations.RunPython(forwards, backwards),
]
|
Fix test migration to actually save updates.
|
Fix test migration to actually save updates.
|
Python
|
bsd-3-clause
|
orcasgit/django-fernet-fields
|
from __future__ import unicode_literals
from django.db import migrations
def forwards(apps, schema_editor):
DualText = apps.get_model('testmigrate', 'DualText')
for obj in DualText.objects.all():
obj.value_dual = obj.value
+ obj.save(force_update=True)
def backwards(apps, schema_editor):
DualText = apps.get_model('testmigrate', 'DualText')
for obj in DualText.objects.all():
obj.value = obj.value_dual
+ obj.save(force_update=True)
class Migration(migrations.Migration):
dependencies = [
('testmigrate', '0003_add_value_dual'),
]
operations = [
migrations.RunPython(forwards, backwards),
]
|
Fix test migration to actually save updates.
|
## Code Before:
from __future__ import unicode_literals
from django.db import migrations
def forwards(apps, schema_editor):
DualText = apps.get_model('testmigrate', 'DualText')
for obj in DualText.objects.all():
obj.value_dual = obj.value
def backwards(apps, schema_editor):
DualText = apps.get_model('testmigrate', 'DualText')
for obj in DualText.objects.all():
obj.value = obj.value_dual
class Migration(migrations.Migration):
dependencies = [
('testmigrate', '0003_add_value_dual'),
]
operations = [
migrations.RunPython(forwards, backwards),
]
## Instruction:
Fix test migration to actually save updates.
## Code After:
from __future__ import unicode_literals
from django.db import migrations
def forwards(apps, schema_editor):
DualText = apps.get_model('testmigrate', 'DualText')
for obj in DualText.objects.all():
obj.value_dual = obj.value
obj.save(force_update=True)
def backwards(apps, schema_editor):
DualText = apps.get_model('testmigrate', 'DualText')
for obj in DualText.objects.all():
obj.value = obj.value_dual
obj.save(force_update=True)
class Migration(migrations.Migration):
dependencies = [
('testmigrate', '0003_add_value_dual'),
]
operations = [
migrations.RunPython(forwards, backwards),
]
|
// ... existing code ...
DualText = apps.get_model('testmigrate', 'DualText')
for obj in DualText.objects.all():
obj.value_dual = obj.value
obj.save(force_update=True)
def backwards(apps, schema_editor):
// ... modified code ...
DualText = apps.get_model('testmigrate', 'DualText')
for obj in DualText.objects.all():
obj.value = obj.value_dual
obj.save(force_update=True)
class Migration(migrations.Migration):
// ... rest of the code ...
|
ec6099421bad222595be15f4f0b2596952d8c9cc
|
username_to_uuid.py
|
username_to_uuid.py
|
import http.client
import json
class UsernameToUUID:
def __init__(self, username):
self.username = username
def get_uuid(self, timestamp=None):
"""
Get the UUID of the player.
Parameters
----------
timestamp : long integer
The time at which the player used this name, expressed as a Unix timestamp.
"""
get_args = "" if timestamp is None else "?at=" + str(timestamp)
http_conn = http.client.HTTPSConnection("api.mojang.com");
http_conn.request("GET", "/users/profiles/minecraft/" + self.username + get_args,
headers={'User-Agent':'Minecraft Username -> UUID', 'Content-Type':'application/json'});
response = http_conn.getresponse().read().decode("utf-8")
if (not response and timestamp is None): # No response & no timestamp
return self.get_uuid(0) # Let's retry with the Unix timestamp 0.
if (not response): # No response (player probably doesn't exist)
return ""
json_data = json.loads(response)
uuid = json_data['id']
return uuid
|
import http.client
import json
class UsernameToUUID:
def __init__(self, username):
self.username = username
def get_uuid(self, timestamp=None):
"""
Get the UUID of the player.
Parameters
----------
timestamp : long integer
The time at which the player used this name, expressed as a Unix timestamp.
"""
get_args = "" if timestamp is None else "?at=" + str(timestamp)
http_conn = http.client.HTTPSConnection("api.mojang.com");
http_conn.request("GET", "/users/profiles/minecraft/" + self.username + get_args,
headers={'User-Agent':'Minecraft Username -> UUID', 'Content-Type':'application/json'});
response = http_conn.getresponse().read().decode("utf-8")
if (not response and timestamp is None): # No response & no timestamp
return self.get_uuid(0) # Let's retry with the Unix timestamp 0.
if (not response): # No response (player probably doesn't exist)
return ""
json_data = json.loads(response)
try:
uuid = json_data['id']
except KeyError as e:
print("KeyError raised:", e);
return uuid
|
Improve robustness: surround the 'id' fetch from result array with a try clause.
|
Improve robustness: surround the 'id' fetch from result array with a try clause.
|
Python
|
mit
|
mrlolethan/MinecraftUsernameToUUID
|
import http.client
import json
class UsernameToUUID:
def __init__(self, username):
self.username = username
def get_uuid(self, timestamp=None):
"""
Get the UUID of the player.
Parameters
----------
timestamp : long integer
The time at which the player used this name, expressed as a Unix timestamp.
"""
get_args = "" if timestamp is None else "?at=" + str(timestamp)
http_conn = http.client.HTTPSConnection("api.mojang.com");
http_conn.request("GET", "/users/profiles/minecraft/" + self.username + get_args,
headers={'User-Agent':'Minecraft Username -> UUID', 'Content-Type':'application/json'});
response = http_conn.getresponse().read().decode("utf-8")
if (not response and timestamp is None): # No response & no timestamp
return self.get_uuid(0) # Let's retry with the Unix timestamp 0.
if (not response): # No response (player probably doesn't exist)
return ""
json_data = json.loads(response)
+ try:
- uuid = json_data['id']
+ uuid = json_data['id']
+ except KeyError as e:
+ print("KeyError raised:", e);
return uuid
|
Improve robustness: surround the 'id' fetch from result array with a try clause.
|
## Code Before:
import http.client
import json
class UsernameToUUID:
def __init__(self, username):
self.username = username
def get_uuid(self, timestamp=None):
"""
Get the UUID of the player.
Parameters
----------
timestamp : long integer
The time at which the player used this name, expressed as a Unix timestamp.
"""
get_args = "" if timestamp is None else "?at=" + str(timestamp)
http_conn = http.client.HTTPSConnection("api.mojang.com");
http_conn.request("GET", "/users/profiles/minecraft/" + self.username + get_args,
headers={'User-Agent':'Minecraft Username -> UUID', 'Content-Type':'application/json'});
response = http_conn.getresponse().read().decode("utf-8")
if (not response and timestamp is None): # No response & no timestamp
return self.get_uuid(0) # Let's retry with the Unix timestamp 0.
if (not response): # No response (player probably doesn't exist)
return ""
json_data = json.loads(response)
uuid = json_data['id']
return uuid
## Instruction:
Improve robustness: surround the 'id' fetch from result array with a try clause.
## Code After:
import http.client
import json
class UsernameToUUID:
def __init__(self, username):
self.username = username
def get_uuid(self, timestamp=None):
"""
Get the UUID of the player.
Parameters
----------
timestamp : long integer
The time at which the player used this name, expressed as a Unix timestamp.
"""
get_args = "" if timestamp is None else "?at=" + str(timestamp)
http_conn = http.client.HTTPSConnection("api.mojang.com");
http_conn.request("GET", "/users/profiles/minecraft/" + self.username + get_args,
headers={'User-Agent':'Minecraft Username -> UUID', 'Content-Type':'application/json'});
response = http_conn.getresponse().read().decode("utf-8")
if (not response and timestamp is None): # No response & no timestamp
return self.get_uuid(0) # Let's retry with the Unix timestamp 0.
if (not response): # No response (player probably doesn't exist)
return ""
json_data = json.loads(response)
try:
uuid = json_data['id']
except KeyError as e:
print("KeyError raised:", e);
return uuid
|
...
return ""
json_data = json.loads(response)
try:
uuid = json_data['id']
except KeyError as e:
print("KeyError raised:", e);
return uuid
...
|
270af43ffbe8974698d17ff6d5cae20fbf410f73
|
admin/urls.py
|
admin/urls.py
|
from .views import CubeHandler, ConnectionHandler
from .views import ElementHandler, DashboardHandler, APIElementCubeHandler
INCLUDE_URLS = [
(r"/admin/connection/?(?P<slug>[\w-]+)?", ConnectionHandler),
(r"/admin/cube/?(?P<slug>[\w-]+)?", CubeHandler),
(r"/admin/api/element/cube/?(?P<slug>[\w-]+)?", APIElementCubeHandler),
(r"/admin/element/?(?P<slug>[\w-]+)?", ElementHandler),
(r"/admin/dashboard/?(?P<slug>[\w-]+)?", DashboardHandler),
]
|
from .views import CubeHandler, ConnectionHandler, DeleteHandler
from .views import ElementHandler, DashboardHandler, APIElementCubeHandler
INCLUDE_URLS = [
(r"/admin/delete/(?P<bucket>[\w-]+)/(?P<slug>[\w-]+)", DeleteHandler),
(r"/admin/connection/?(?P<slug>[\w-]+)?", ConnectionHandler),
(r"/admin/cube/?(?P<slug>[\w-]+)?", CubeHandler),
(r"/admin/api/element/cube/?(?P<slug>[\w-]+)?", APIElementCubeHandler),
(r"/admin/element/?(?P<slug>[\w-]+)?", ElementHandler),
(r"/admin/dashboard/?(?P<slug>[\w-]+)?", DashboardHandler),
]
|
Add url enter delete element on riak
|
Add url enter delete element on riak
|
Python
|
mit
|
jgabriellima/mining,avelino/mining,chrisdamba/mining,seagoat/mining,avelino/mining,AndrzejR/mining,mlgruby/mining,mlgruby/mining,mining/mining,mlgruby/mining,mining/mining,chrisdamba/mining,AndrzejR/mining,seagoat/mining,jgabriellima/mining
|
- from .views import CubeHandler, ConnectionHandler
+ from .views import CubeHandler, ConnectionHandler, DeleteHandler
from .views import ElementHandler, DashboardHandler, APIElementCubeHandler
INCLUDE_URLS = [
+ (r"/admin/delete/(?P<bucket>[\w-]+)/(?P<slug>[\w-]+)", DeleteHandler),
(r"/admin/connection/?(?P<slug>[\w-]+)?", ConnectionHandler),
(r"/admin/cube/?(?P<slug>[\w-]+)?", CubeHandler),
(r"/admin/api/element/cube/?(?P<slug>[\w-]+)?", APIElementCubeHandler),
(r"/admin/element/?(?P<slug>[\w-]+)?", ElementHandler),
(r"/admin/dashboard/?(?P<slug>[\w-]+)?", DashboardHandler),
]
|
Add url enter delete element on riak
|
## Code Before:
from .views import CubeHandler, ConnectionHandler
from .views import ElementHandler, DashboardHandler, APIElementCubeHandler
INCLUDE_URLS = [
(r"/admin/connection/?(?P<slug>[\w-]+)?", ConnectionHandler),
(r"/admin/cube/?(?P<slug>[\w-]+)?", CubeHandler),
(r"/admin/api/element/cube/?(?P<slug>[\w-]+)?", APIElementCubeHandler),
(r"/admin/element/?(?P<slug>[\w-]+)?", ElementHandler),
(r"/admin/dashboard/?(?P<slug>[\w-]+)?", DashboardHandler),
]
## Instruction:
Add url enter delete element on riak
## Code After:
from .views import CubeHandler, ConnectionHandler, DeleteHandler
from .views import ElementHandler, DashboardHandler, APIElementCubeHandler
INCLUDE_URLS = [
(r"/admin/delete/(?P<bucket>[\w-]+)/(?P<slug>[\w-]+)", DeleteHandler),
(r"/admin/connection/?(?P<slug>[\w-]+)?", ConnectionHandler),
(r"/admin/cube/?(?P<slug>[\w-]+)?", CubeHandler),
(r"/admin/api/element/cube/?(?P<slug>[\w-]+)?", APIElementCubeHandler),
(r"/admin/element/?(?P<slug>[\w-]+)?", ElementHandler),
(r"/admin/dashboard/?(?P<slug>[\w-]+)?", DashboardHandler),
]
|
// ... existing code ...
from .views import CubeHandler, ConnectionHandler, DeleteHandler
from .views import ElementHandler, DashboardHandler, APIElementCubeHandler
INCLUDE_URLS = [
(r"/admin/delete/(?P<bucket>[\w-]+)/(?P<slug>[\w-]+)", DeleteHandler),
(r"/admin/connection/?(?P<slug>[\w-]+)?", ConnectionHandler),
(r"/admin/cube/?(?P<slug>[\w-]+)?", CubeHandler),
(r"/admin/api/element/cube/?(?P<slug>[\w-]+)?", APIElementCubeHandler),
// ... rest of the code ...
|
8bf27ff0e112781724d0a8a28b1b3d44976f0155
|
lfs_order_numbers/models.py
|
lfs_order_numbers/models.py
|
from django.utils.translation import ugettext_lazy as _
from django.db import models
# lfs imports
from lfs.plugins import OrderNumberGenerator as Base
class OrderNumberGenerator(Base):
"""Generates order numbers and saves the last one.
**Attributes:**
id
The primary key of the order number.
last
The last stored/returned order number.
format
The format of the integer part of the order number.
"""
id = models.CharField(primary_key=True, max_length=20)
last = models.IntegerField(_(u"Last order number"), default=0)
format = models.CharField(blank=True, max_length=20)
def get_next(self, formatted=True):
"""Returns the next order number.
**Parameters:**
formatted
If True the number will be returned within the stored format.
"""
self.last += 1
self.save()
if formatted:
return self.format % self.last
else:
return self.last
|
from django.utils.translation import ugettext_lazy as _
from django.db import models
# lfs imports
from lfs.plugins import OrderNumberGenerator as Base
class OrderNumberGenerator(Base):
"""Generates order numbers and saves the last one.
**Attributes:**
id
The primary key of the order number.
last
The last stored/returned order number.
format
The format of the integer part of the order number.
"""
id = models.CharField(primary_key=True, max_length=20)
last = models.IntegerField(_(u"Last order number"), default=0)
format = models.CharField(blank=True, max_length=20)
def get_next(self, formatted=True):
"""Returns the next order number.
**Parameters:**
formatted
If True the number will be returned within the stored format.
"""
self.last += 1
self.save()
if formatted and self.format:
return self.format % self.last
else:
return self.last
|
Check whether there is a format given (this must be checked for validity though).
|
Check whether there is a format given (this must be checked for validity though).
|
Python
|
bsd-3-clause
|
diefenbach/lfs-order-numbers
|
from django.utils.translation import ugettext_lazy as _
from django.db import models
# lfs imports
from lfs.plugins import OrderNumberGenerator as Base
class OrderNumberGenerator(Base):
"""Generates order numbers and saves the last one.
**Attributes:**
id
The primary key of the order number.
last
The last stored/returned order number.
format
The format of the integer part of the order number.
"""
id = models.CharField(primary_key=True, max_length=20)
last = models.IntegerField(_(u"Last order number"), default=0)
format = models.CharField(blank=True, max_length=20)
def get_next(self, formatted=True):
"""Returns the next order number.
**Parameters:**
formatted
If True the number will be returned within the stored format.
"""
self.last += 1
self.save()
- if formatted:
+ if formatted and self.format:
return self.format % self.last
else:
return self.last
|
Check whether there is a format given (this must be checked for validity though).
|
## Code Before:
from django.utils.translation import ugettext_lazy as _
from django.db import models
# lfs imports
from lfs.plugins import OrderNumberGenerator as Base
class OrderNumberGenerator(Base):
"""Generates order numbers and saves the last one.
**Attributes:**
id
The primary key of the order number.
last
The last stored/returned order number.
format
The format of the integer part of the order number.
"""
id = models.CharField(primary_key=True, max_length=20)
last = models.IntegerField(_(u"Last order number"), default=0)
format = models.CharField(blank=True, max_length=20)
def get_next(self, formatted=True):
"""Returns the next order number.
**Parameters:**
formatted
If True the number will be returned within the stored format.
"""
self.last += 1
self.save()
if formatted:
return self.format % self.last
else:
return self.last
## Instruction:
Check whether there is a format given (this must be checked for validity though).
## Code After:
from django.utils.translation import ugettext_lazy as _
from django.db import models
# lfs imports
from lfs.plugins import OrderNumberGenerator as Base
class OrderNumberGenerator(Base):
"""Generates order numbers and saves the last one.
**Attributes:**
id
The primary key of the order number.
last
The last stored/returned order number.
format
The format of the integer part of the order number.
"""
id = models.CharField(primary_key=True, max_length=20)
last = models.IntegerField(_(u"Last order number"), default=0)
format = models.CharField(blank=True, max_length=20)
def get_next(self, formatted=True):
"""Returns the next order number.
**Parameters:**
formatted
If True the number will be returned within the stored format.
"""
self.last += 1
self.save()
if formatted and self.format:
return self.format % self.last
else:
return self.last
|
...
"""
self.last += 1
self.save()
if formatted and self.format:
return self.format % self.last
else:
return self.last
...
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.