commit
stringlengths 40
40
| old_file
stringlengths 4
106
| new_file
stringlengths 4
106
| old_contents
stringlengths 10
2.94k
| new_contents
stringlengths 21
2.95k
| subject
stringlengths 16
444
| message
stringlengths 17
2.63k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 7
43k
| ndiff
stringlengths 52
3.31k
| instruction
stringlengths 16
444
| content
stringlengths 133
4.32k
| diff
stringlengths 49
3.61k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
54bce2a224843ec9c1c8b7eb35cdc6bf19d5726b
|
expensonator/api.py
|
expensonator/api.py
|
from tastypie.authorization import Authorization
from tastypie.fields import CharField
from tastypie.resources import ModelResource
from expensonator.models import Expense
class ExpenseResource(ModelResource):
tags = CharField()
def dehydrate_tags(self, bundle):
return bundle.obj.tags_as_string()
def save(self, bundle, skip_errors=False):
bundle = super(ExpenseResource, self).save(bundle, skip_errors)
bundle.obj.reset_tags_from_string(bundle.data["tags"])
return bundle
class Meta:
queryset = Expense.objects.all()
excludes = ["created", "updated"]
# WARNING: Tastypie docs say that this is VERY INSECURE!
# For development only!
authorization = Authorization()
|
from tastypie.authorization import Authorization
from tastypie.fields import CharField
from tastypie.resources import ModelResource
from expensonator.models import Expense
class ExpenseResource(ModelResource):
tags = CharField()
def dehydrate_tags(self, bundle):
return bundle.obj.tags_as_string()
def save(self, bundle, skip_errors=False):
bundle = super(ExpenseResource, self).save(bundle, skip_errors)
if "tags" in bundle.data:
bundle.obj.reset_tags_from_string(bundle.data["tags"])
return bundle
class Meta:
queryset = Expense.objects.all()
excludes = ["created", "updated"]
# WARNING: Tastypie docs say that this is VERY INSECURE!
# For development only!
authorization = Authorization()
|
Fix key error when no tags are specified
|
Fix key error when no tags are specified
|
Python
|
mit
|
matt-haigh/expensonator
|
from tastypie.authorization import Authorization
from tastypie.fields import CharField
from tastypie.resources import ModelResource
from expensonator.models import Expense
class ExpenseResource(ModelResource):
tags = CharField()
def dehydrate_tags(self, bundle):
return bundle.obj.tags_as_string()
def save(self, bundle, skip_errors=False):
bundle = super(ExpenseResource, self).save(bundle, skip_errors)
+ if "tags" in bundle.data:
- bundle.obj.reset_tags_from_string(bundle.data["tags"])
+ bundle.obj.reset_tags_from_string(bundle.data["tags"])
return bundle
class Meta:
queryset = Expense.objects.all()
excludes = ["created", "updated"]
# WARNING: Tastypie docs say that this is VERY INSECURE!
# For development only!
authorization = Authorization()
|
Fix key error when no tags are specified
|
## Code Before:
from tastypie.authorization import Authorization
from tastypie.fields import CharField
from tastypie.resources import ModelResource
from expensonator.models import Expense
class ExpenseResource(ModelResource):
tags = CharField()
def dehydrate_tags(self, bundle):
return bundle.obj.tags_as_string()
def save(self, bundle, skip_errors=False):
bundle = super(ExpenseResource, self).save(bundle, skip_errors)
bundle.obj.reset_tags_from_string(bundle.data["tags"])
return bundle
class Meta:
queryset = Expense.objects.all()
excludes = ["created", "updated"]
# WARNING: Tastypie docs say that this is VERY INSECURE!
# For development only!
authorization = Authorization()
## Instruction:
Fix key error when no tags are specified
## Code After:
from tastypie.authorization import Authorization
from tastypie.fields import CharField
from tastypie.resources import ModelResource
from expensonator.models import Expense
class ExpenseResource(ModelResource):
tags = CharField()
def dehydrate_tags(self, bundle):
return bundle.obj.tags_as_string()
def save(self, bundle, skip_errors=False):
bundle = super(ExpenseResource, self).save(bundle, skip_errors)
if "tags" in bundle.data:
bundle.obj.reset_tags_from_string(bundle.data["tags"])
return bundle
class Meta:
queryset = Expense.objects.all()
excludes = ["created", "updated"]
# WARNING: Tastypie docs say that this is VERY INSECURE!
# For development only!
authorization = Authorization()
|
from tastypie.authorization import Authorization
from tastypie.fields import CharField
from tastypie.resources import ModelResource
from expensonator.models import Expense
class ExpenseResource(ModelResource):
tags = CharField()
def dehydrate_tags(self, bundle):
return bundle.obj.tags_as_string()
def save(self, bundle, skip_errors=False):
bundle = super(ExpenseResource, self).save(bundle, skip_errors)
+ if "tags" in bundle.data:
- bundle.obj.reset_tags_from_string(bundle.data["tags"])
+ bundle.obj.reset_tags_from_string(bundle.data["tags"])
? ++++
return bundle
class Meta:
queryset = Expense.objects.all()
excludes = ["created", "updated"]
# WARNING: Tastypie docs say that this is VERY INSECURE!
# For development only!
authorization = Authorization()
|
5da32c725200d9f3b319be40ae5c2d302dc72249
|
cloudbridge/cloud/providers/azure/test/test_azure_resource_group.py
|
cloudbridge/cloud/providers/azure/test/test_azure_resource_group.py
|
from cloudbridge.cloud.providers.azure.test.helpers import ProviderTestBase
class AzureResourceGroupTestCase(ProviderTestBase):
def test_resource_group_create(self):
resource_group_params = {'location': self.provider.region_name}
rg = self.provider.azure_client. \
create_resource_group(self.provider.resource_group,
resource_group_params)
print("Create Resource - " + str(rg))
self.assertTrue(
rg.name == "cloudbridge",
"Resource Group should be Cloudbridge")
def test_resource_group_get(self):
rg = self.provider.azure_client.get_resource_group('MyGroup')
print("Get Resource - " + str(rg))
self.assertTrue(
rg.name == "testResourceGroup",
"Resource Group should be Cloudbridge")
|
from cloudbridge.cloud.providers.azure.test.helpers import ProviderTestBase
class AzureResourceGroupTestCase(ProviderTestBase):
def test_resource_group_create(self):
resource_group_params = {'location': self.provider.region_name}
rg = self.provider.azure_client. \
create_resource_group(self.provider.resource_group,
resource_group_params)
print("Create Resource - " + str(rg))
self.assertTrue(
rg.name == self.provider.resource_group,
"Resource Group should be {0}".format(rg.name))
def test_resource_group_get(self):
rg = self.provider.azure_client.get_resource_group('MyGroup')
print("Get Resource - " + str(rg))
self.assertTrue(
rg.name == "testResourceGroup",
"Resource Group should be {0}".format(rg.name))
|
Update resource group unit test
|
Update resource group unit test
|
Python
|
mit
|
gvlproject/libcloudbridge,gvlproject/cloudbridge
|
from cloudbridge.cloud.providers.azure.test.helpers import ProviderTestBase
class AzureResourceGroupTestCase(ProviderTestBase):
def test_resource_group_create(self):
resource_group_params = {'location': self.provider.region_name}
rg = self.provider.azure_client. \
create_resource_group(self.provider.resource_group,
resource_group_params)
print("Create Resource - " + str(rg))
self.assertTrue(
- rg.name == "cloudbridge",
+ rg.name == self.provider.resource_group,
- "Resource Group should be Cloudbridge")
+ "Resource Group should be {0}".format(rg.name))
def test_resource_group_get(self):
rg = self.provider.azure_client.get_resource_group('MyGroup')
print("Get Resource - " + str(rg))
self.assertTrue(
rg.name == "testResourceGroup",
- "Resource Group should be Cloudbridge")
+ "Resource Group should be {0}".format(rg.name))
|
Update resource group unit test
|
## Code Before:
from cloudbridge.cloud.providers.azure.test.helpers import ProviderTestBase
class AzureResourceGroupTestCase(ProviderTestBase):
def test_resource_group_create(self):
resource_group_params = {'location': self.provider.region_name}
rg = self.provider.azure_client. \
create_resource_group(self.provider.resource_group,
resource_group_params)
print("Create Resource - " + str(rg))
self.assertTrue(
rg.name == "cloudbridge",
"Resource Group should be Cloudbridge")
def test_resource_group_get(self):
rg = self.provider.azure_client.get_resource_group('MyGroup')
print("Get Resource - " + str(rg))
self.assertTrue(
rg.name == "testResourceGroup",
"Resource Group should be Cloudbridge")
## Instruction:
Update resource group unit test
## Code After:
from cloudbridge.cloud.providers.azure.test.helpers import ProviderTestBase
class AzureResourceGroupTestCase(ProviderTestBase):
def test_resource_group_create(self):
resource_group_params = {'location': self.provider.region_name}
rg = self.provider.azure_client. \
create_resource_group(self.provider.resource_group,
resource_group_params)
print("Create Resource - " + str(rg))
self.assertTrue(
rg.name == self.provider.resource_group,
"Resource Group should be {0}".format(rg.name))
def test_resource_group_get(self):
rg = self.provider.azure_client.get_resource_group('MyGroup')
print("Get Resource - " + str(rg))
self.assertTrue(
rg.name == "testResourceGroup",
"Resource Group should be {0}".format(rg.name))
|
from cloudbridge.cloud.providers.azure.test.helpers import ProviderTestBase
class AzureResourceGroupTestCase(ProviderTestBase):
def test_resource_group_create(self):
resource_group_params = {'location': self.provider.region_name}
rg = self.provider.azure_client. \
create_resource_group(self.provider.resource_group,
resource_group_params)
print("Create Resource - " + str(rg))
self.assertTrue(
- rg.name == "cloudbridge",
+ rg.name == self.provider.resource_group,
- "Resource Group should be Cloudbridge")
? ^^ --- ^^ -
+ "Resource Group should be {0}".format(rg.name))
? ^^^^^^ ^^^^^ ++++ +
def test_resource_group_get(self):
rg = self.provider.azure_client.get_resource_group('MyGroup')
print("Get Resource - " + str(rg))
self.assertTrue(
rg.name == "testResourceGroup",
- "Resource Group should be Cloudbridge")
? ^^ --- ^^ -
+ "Resource Group should be {0}".format(rg.name))
? ^^^^^^ ^^^^^ ++++ +
|
8200beb4aa68a3e88a95394d2b8146ce264e1055
|
flask_authorization_panda/tests/test_basic_auth.py
|
flask_authorization_panda/tests/test_basic_auth.py
|
import json
from base64 import b64encode
import pytest
from flask import Flask
from flask_authorization_panda import basic_auth
@pytest.fixture
def flask_app():
app = Flask(__name__)
app.config['TESTING'] = True
@app.route('/')
@basic_auth
def hello_world():
return 'Hello World!'
return app
def test_no_credentials_in_application_config(flask_app):
response = flask_app.test_client().get('/', headers={
'Authorization': 'Basic {}'.format(b64encode('admin:secret'))})
assert '500' in response.data
def test_no_credentials_in_request(flask_app):
flask_app.config['basic_auth_credentials'] = dict(username='admin',
password='secret')
response = flask_app.test_client().get('/')
assert "HTTP Basic Auth required for this URL" in response.data
|
from base64 import b64encode
import pytest
from flask import Flask, jsonify
from flask_authorization_panda import basic_auth
@pytest.fixture
def flask_app():
app = Flask(__name__)
app.config['TESTING'] = True
@app.route('/')
@basic_auth
def hello_world():
return jsonify({"statusCode": 200, "message": "Ok"})
return app
def test_no_credentials_in_application_config(flask_app):
response = flask_app.test_client().get('/', headers={
'Authorization': 'Basic {}'.format(b64encode('admin:secret'))})
assert '500' in response.data
def test_no_credentials_in_request(flask_app):
flask_app.config['basic_auth_credentials'] = dict(username='admin',
password='secret')
response = flask_app.test_client().get('/')
assert "HTTP Basic Auth required for this URL" in response.data
def test_basic_auth(flask_app):
flask_app.config['basic_auth_credentials'] = dict(username='admin',
password='secret')
response = flask_app.test_client().get('/', headers={
'Authorization': 'Basic {}'.format(b64encode('admin:secret'))})
assert '200' in response.data
|
Add unit test for successful completion.
|
Add unit test for successful completion.
|
Python
|
mit
|
eikonomega/flask-authorization-panda
|
- import json
from base64 import b64encode
import pytest
- from flask import Flask
+ from flask import Flask, jsonify
from flask_authorization_panda import basic_auth
@pytest.fixture
def flask_app():
app = Flask(__name__)
app.config['TESTING'] = True
@app.route('/')
@basic_auth
def hello_world():
- return 'Hello World!'
+ return jsonify({"statusCode": 200, "message": "Ok"})
return app
def test_no_credentials_in_application_config(flask_app):
response = flask_app.test_client().get('/', headers={
'Authorization': 'Basic {}'.format(b64encode('admin:secret'))})
assert '500' in response.data
def test_no_credentials_in_request(flask_app):
flask_app.config['basic_auth_credentials'] = dict(username='admin',
password='secret')
response = flask_app.test_client().get('/')
assert "HTTP Basic Auth required for this URL" in response.data
+ def test_basic_auth(flask_app):
+ flask_app.config['basic_auth_credentials'] = dict(username='admin',
+ password='secret')
+ response = flask_app.test_client().get('/', headers={
+ 'Authorization': 'Basic {}'.format(b64encode('admin:secret'))})
+ assert '200' in response.data
+
+
|
Add unit test for successful completion.
|
## Code Before:
import json
from base64 import b64encode
import pytest
from flask import Flask
from flask_authorization_panda import basic_auth
@pytest.fixture
def flask_app():
app = Flask(__name__)
app.config['TESTING'] = True
@app.route('/')
@basic_auth
def hello_world():
return 'Hello World!'
return app
def test_no_credentials_in_application_config(flask_app):
response = flask_app.test_client().get('/', headers={
'Authorization': 'Basic {}'.format(b64encode('admin:secret'))})
assert '500' in response.data
def test_no_credentials_in_request(flask_app):
flask_app.config['basic_auth_credentials'] = dict(username='admin',
password='secret')
response = flask_app.test_client().get('/')
assert "HTTP Basic Auth required for this URL" in response.data
## Instruction:
Add unit test for successful completion.
## Code After:
from base64 import b64encode
import pytest
from flask import Flask, jsonify
from flask_authorization_panda import basic_auth
@pytest.fixture
def flask_app():
app = Flask(__name__)
app.config['TESTING'] = True
@app.route('/')
@basic_auth
def hello_world():
return jsonify({"statusCode": 200, "message": "Ok"})
return app
def test_no_credentials_in_application_config(flask_app):
response = flask_app.test_client().get('/', headers={
'Authorization': 'Basic {}'.format(b64encode('admin:secret'))})
assert '500' in response.data
def test_no_credentials_in_request(flask_app):
flask_app.config['basic_auth_credentials'] = dict(username='admin',
password='secret')
response = flask_app.test_client().get('/')
assert "HTTP Basic Auth required for this URL" in response.data
def test_basic_auth(flask_app):
flask_app.config['basic_auth_credentials'] = dict(username='admin',
password='secret')
response = flask_app.test_client().get('/', headers={
'Authorization': 'Basic {}'.format(b64encode('admin:secret'))})
assert '200' in response.data
|
- import json
from base64 import b64encode
import pytest
- from flask import Flask
+ from flask import Flask, jsonify
? +++++++++
from flask_authorization_panda import basic_auth
@pytest.fixture
def flask_app():
app = Flask(__name__)
app.config['TESTING'] = True
@app.route('/')
@basic_auth
def hello_world():
- return 'Hello World!'
+ return jsonify({"statusCode": 200, "message": "Ok"})
return app
def test_no_credentials_in_application_config(flask_app):
response = flask_app.test_client().get('/', headers={
'Authorization': 'Basic {}'.format(b64encode('admin:secret'))})
assert '500' in response.data
def test_no_credentials_in_request(flask_app):
flask_app.config['basic_auth_credentials'] = dict(username='admin',
password='secret')
response = flask_app.test_client().get('/')
assert "HTTP Basic Auth required for this URL" in response.data
+ def test_basic_auth(flask_app):
+ flask_app.config['basic_auth_credentials'] = dict(username='admin',
+ password='secret')
+ response = flask_app.test_client().get('/', headers={
+ 'Authorization': 'Basic {}'.format(b64encode('admin:secret'))})
+ assert '200' in response.data
+
+
|
86678fce3817388641db3d0f4002b3f8d409377d
|
pdcupdater/tests/handler_tests/test_kerberos_auth.py
|
pdcupdater/tests/handler_tests/test_kerberos_auth.py
|
import pytest
import requests_kerberos
from mock import patch, Mock
import pdcupdater.utils
from test.test_support import EnvironmentVarGuard
import os
class TestKerberosAuthentication(object):
@patch('os.path.exists', return_value=True)
@patch('requests_kerberos.HTTPKerberosAuth')
@patch('requests.get')
def test_get_token(self, requests_get, kerb_auth, os_path):
self.url = 'https://pdc.fedoraproject.org/rest_api/v1/'
set_env=patch.dict(os.environ,{'KRB5_CLIENT_KTNAME': '/etc/foo.keytab'})
requests_rv = Mock()
requests_rv.json.return_value = {"token": "12345"}
requests_get.return_value = requests_rv
set_env.start()
rv = pdcupdater.utils.get_token(self.url,
'/etc/foo.keytab')
set_env.stop()
assert rv == '12345'
|
import os
from mock import patch, Mock
import pdcupdater.utils
class TestKerberosAuthentication(object):
@patch('os.path.exists', return_value=True)
@patch('requests_kerberos.HTTPKerberosAuth')
@patch('requests.get')
def test_get_token(self, requests_get, kerb_auth, os_path):
self.url = 'https://pdc.fedoraproject.org/rest_api/v1/'
set_env = patch.dict(
os.environ, {'KRB5_CLIENT_KTNAME': '/etc/foo.keytab'})
requests_rv = Mock()
requests_rv.json.return_value = {"token": "12345"}
requests_get.return_value = requests_rv
set_env.start()
rv = pdcupdater.utils.get_token(self.url, '/etc/foo.keytab')
set_env.stop()
assert rv == '12345'
|
Remove invalid imports for TestKerberosAuthentication and fix its styling
|
Remove invalid imports for TestKerberosAuthentication and fix its styling
|
Python
|
lgpl-2.1
|
fedora-infra/pdc-updater
|
- import pytest
- import requests_kerberos
- from mock import patch, Mock
- import pdcupdater.utils
- from test.test_support import EnvironmentVarGuard
import os
+ from mock import patch, Mock
+
+ import pdcupdater.utils
+
+
class TestKerberosAuthentication(object):
+ @patch('os.path.exists', return_value=True)
+ @patch('requests_kerberos.HTTPKerberosAuth')
+ @patch('requests.get')
+ def test_get_token(self, requests_get, kerb_auth, os_path):
+ self.url = 'https://pdc.fedoraproject.org/rest_api/v1/'
+ set_env = patch.dict(
+ os.environ, {'KRB5_CLIENT_KTNAME': '/etc/foo.keytab'})
+ requests_rv = Mock()
+ requests_rv.json.return_value = {"token": "12345"}
+ requests_get.return_value = requests_rv
+ set_env.start()
+ rv = pdcupdater.utils.get_token(self.url, '/etc/foo.keytab')
+ set_env.stop()
+ assert rv == '12345'
- @patch('os.path.exists', return_value=True)
- @patch('requests_kerberos.HTTPKerberosAuth')
- @patch('requests.get')
- def test_get_token(self, requests_get, kerb_auth, os_path):
- self.url = 'https://pdc.fedoraproject.org/rest_api/v1/'
- set_env=patch.dict(os.environ,{'KRB5_CLIENT_KTNAME': '/etc/foo.keytab'})
- requests_rv = Mock()
- requests_rv.json.return_value = {"token": "12345"}
- requests_get.return_value = requests_rv
- set_env.start()
- rv = pdcupdater.utils.get_token(self.url,
- '/etc/foo.keytab')
- set_env.stop()
- assert rv == '12345'
-
|
Remove invalid imports for TestKerberosAuthentication and fix its styling
|
## Code Before:
import pytest
import requests_kerberos
from mock import patch, Mock
import pdcupdater.utils
from test.test_support import EnvironmentVarGuard
import os
class TestKerberosAuthentication(object):
@patch('os.path.exists', return_value=True)
@patch('requests_kerberos.HTTPKerberosAuth')
@patch('requests.get')
def test_get_token(self, requests_get, kerb_auth, os_path):
self.url = 'https://pdc.fedoraproject.org/rest_api/v1/'
set_env=patch.dict(os.environ,{'KRB5_CLIENT_KTNAME': '/etc/foo.keytab'})
requests_rv = Mock()
requests_rv.json.return_value = {"token": "12345"}
requests_get.return_value = requests_rv
set_env.start()
rv = pdcupdater.utils.get_token(self.url,
'/etc/foo.keytab')
set_env.stop()
assert rv == '12345'
## Instruction:
Remove invalid imports for TestKerberosAuthentication and fix its styling
## Code After:
import os
from mock import patch, Mock
import pdcupdater.utils
class TestKerberosAuthentication(object):
@patch('os.path.exists', return_value=True)
@patch('requests_kerberos.HTTPKerberosAuth')
@patch('requests.get')
def test_get_token(self, requests_get, kerb_auth, os_path):
self.url = 'https://pdc.fedoraproject.org/rest_api/v1/'
set_env = patch.dict(
os.environ, {'KRB5_CLIENT_KTNAME': '/etc/foo.keytab'})
requests_rv = Mock()
requests_rv.json.return_value = {"token": "12345"}
requests_get.return_value = requests_rv
set_env.start()
rv = pdcupdater.utils.get_token(self.url, '/etc/foo.keytab')
set_env.stop()
assert rv == '12345'
|
- import pytest
- import requests_kerberos
- from mock import patch, Mock
- import pdcupdater.utils
- from test.test_support import EnvironmentVarGuard
import os
+ from mock import patch, Mock
+
+ import pdcupdater.utils
+
+
class TestKerberosAuthentication(object):
-
- @patch('os.path.exists', return_value=True)
? ^
+ @patch('os.path.exists', return_value=True)
? ^^^^
- @patch('requests_kerberos.HTTPKerberosAuth')
? ^
+ @patch('requests_kerberos.HTTPKerberosAuth')
? ^^^^
- @patch('requests.get')
? ^
+ @patch('requests.get')
? ^^^^
- def test_get_token(self, requests_get, kerb_auth, os_path):
? ^
+ def test_get_token(self, requests_get, kerb_auth, os_path):
? ^^^^
- self.url = 'https://pdc.fedoraproject.org/rest_api/v1/'
? ^^
+ self.url = 'https://pdc.fedoraproject.org/rest_api/v1/'
? ^^^^^^^^
+ set_env = patch.dict(
- set_env=patch.dict(os.environ,{'KRB5_CLIENT_KTNAME': '/etc/foo.keytab'})
? ^^^^^^^^^^^^^^^^^^^^^
+ os.environ, {'KRB5_CLIENT_KTNAME': '/etc/foo.keytab'})
? ^^^^^^^^^^^^ +
- requests_rv = Mock()
? ^^
+ requests_rv = Mock()
? ^^^^^^^^
- requests_rv.json.return_value = {"token": "12345"}
? ^^
+ requests_rv.json.return_value = {"token": "12345"}
? ^^^^^^^^
- requests_get.return_value = requests_rv
? ^^
+ requests_get.return_value = requests_rv
? ^^^^^^^^
- set_env.start()
? ^^
+ set_env.start()
? ^^^^^^^^
+ rv = pdcupdater.utils.get_token(self.url, '/etc/foo.keytab')
+ set_env.stop()
- rv = pdcupdater.utils.get_token(self.url,
- '/etc/foo.keytab')
- set_env.stop()
- assert rv == '12345'
? ^^
+ assert rv == '12345'
? ^^^^^^^^
|
c559c639f7c3deea4e166dd2f6fee1cb8a1297b7
|
tests/integration/test_metrics.py
|
tests/integration/test_metrics.py
|
from kaneda import Metrics
class TestMetrics(object):
def test_elasticsearch_gauge(self, elasticsearch_backend):
value = 42
metrics = Metrics(backend=elasticsearch_backend)
metrics.gauge('test_gauge', value)
result = elasticsearch_backend.client.search(index=elasticsearch_backend._get_index_name(), doc_type='gauge')
assert result
assert result['hits']['hits'][0]['_source']['value'] == value
assert result['hits']['hits'][0]['_source']['name'] == 'test_gauge'
def test_mongo_gauge(self, mongo_backend):
value = 42
metrics = Metrics(backend=mongo_backend)
metrics.gauge('test_gauge', value)
result = mongo_backend.collection.find_one({"metric": 'gauge'})
assert result
assert result['value'] == value
assert result['name'] == 'test_gauge'
def test_logger_gauge(self, logger_backend, logger_filename):
value = 42
metrics = Metrics(backend=logger_backend)
metrics.gauge('test_gauge', value)
with open(logger_filename) as f:
lines = f.readlines()
assert lines
result = lines[-1].split(' - ')[2]
assert result
assert str(value) in result
assert 'test_gauge' in result
|
from kaneda import Metrics
class TestMetrics(object):
def test_elasticsearch_metric(self, elasticsearch_backend):
metrics = Metrics(backend=elasticsearch_backend)
result = metrics.gauge('test_gauge', 42)
assert result
assert result['_id']
def test_mongo_metric(self, mongo_backend):
metrics = Metrics(backend=mongo_backend)
result = metrics.gauge('test_gauge', 42)
assert result
assert result.inserted_id
def test_logger_metric(self, logger_backend, logger_filename):
metrics = Metrics(backend=logger_backend)
metrics.gauge('test_gauge', 42)
with open(logger_filename) as f:
lines = f.readlines()
assert lines
result = lines[-1].split(' - ')[2]
assert result
assert 'test_gauge' in result
|
Change integration test of metrics
|
Change integration test of metrics
|
Python
|
mit
|
APSL/kaneda
|
from kaneda import Metrics
class TestMetrics(object):
- def test_elasticsearch_gauge(self, elasticsearch_backend):
+ def test_elasticsearch_metric(self, elasticsearch_backend):
- value = 42
metrics = Metrics(backend=elasticsearch_backend)
- metrics.gauge('test_gauge', value)
+ result = metrics.gauge('test_gauge', 42)
- result = elasticsearch_backend.client.search(index=elasticsearch_backend._get_index_name(), doc_type='gauge')
assert result
+ assert result['_id']
- assert result['hits']['hits'][0]['_source']['value'] == value
- assert result['hits']['hits'][0]['_source']['name'] == 'test_gauge'
- def test_mongo_gauge(self, mongo_backend):
+ def test_mongo_metric(self, mongo_backend):
- value = 42
metrics = Metrics(backend=mongo_backend)
- metrics.gauge('test_gauge', value)
+ result = metrics.gauge('test_gauge', 42)
- result = mongo_backend.collection.find_one({"metric": 'gauge'})
assert result
+ assert result.inserted_id
- assert result['value'] == value
- assert result['name'] == 'test_gauge'
- def test_logger_gauge(self, logger_backend, logger_filename):
+ def test_logger_metric(self, logger_backend, logger_filename):
- value = 42
metrics = Metrics(backend=logger_backend)
- metrics.gauge('test_gauge', value)
+ metrics.gauge('test_gauge', 42)
with open(logger_filename) as f:
lines = f.readlines()
assert lines
result = lines[-1].split(' - ')[2]
assert result
- assert str(value) in result
assert 'test_gauge' in result
|
Change integration test of metrics
|
## Code Before:
from kaneda import Metrics
class TestMetrics(object):
def test_elasticsearch_gauge(self, elasticsearch_backend):
value = 42
metrics = Metrics(backend=elasticsearch_backend)
metrics.gauge('test_gauge', value)
result = elasticsearch_backend.client.search(index=elasticsearch_backend._get_index_name(), doc_type='gauge')
assert result
assert result['hits']['hits'][0]['_source']['value'] == value
assert result['hits']['hits'][0]['_source']['name'] == 'test_gauge'
def test_mongo_gauge(self, mongo_backend):
value = 42
metrics = Metrics(backend=mongo_backend)
metrics.gauge('test_gauge', value)
result = mongo_backend.collection.find_one({"metric": 'gauge'})
assert result
assert result['value'] == value
assert result['name'] == 'test_gauge'
def test_logger_gauge(self, logger_backend, logger_filename):
value = 42
metrics = Metrics(backend=logger_backend)
metrics.gauge('test_gauge', value)
with open(logger_filename) as f:
lines = f.readlines()
assert lines
result = lines[-1].split(' - ')[2]
assert result
assert str(value) in result
assert 'test_gauge' in result
## Instruction:
Change integration test of metrics
## Code After:
from kaneda import Metrics
class TestMetrics(object):
def test_elasticsearch_metric(self, elasticsearch_backend):
metrics = Metrics(backend=elasticsearch_backend)
result = metrics.gauge('test_gauge', 42)
assert result
assert result['_id']
def test_mongo_metric(self, mongo_backend):
metrics = Metrics(backend=mongo_backend)
result = metrics.gauge('test_gauge', 42)
assert result
assert result.inserted_id
def test_logger_metric(self, logger_backend, logger_filename):
metrics = Metrics(backend=logger_backend)
metrics.gauge('test_gauge', 42)
with open(logger_filename) as f:
lines = f.readlines()
assert lines
result = lines[-1].split(' - ')[2]
assert result
assert 'test_gauge' in result
|
from kaneda import Metrics
class TestMetrics(object):
- def test_elasticsearch_gauge(self, elasticsearch_backend):
? ^^^^
+ def test_elasticsearch_metric(self, elasticsearch_backend):
? ^ ++++
- value = 42
metrics = Metrics(backend=elasticsearch_backend)
- metrics.gauge('test_gauge', value)
? ^^^^^
+ result = metrics.gauge('test_gauge', 42)
? +++++++++ ^^
- result = elasticsearch_backend.client.search(index=elasticsearch_backend._get_index_name(), doc_type='gauge')
assert result
+ assert result['_id']
- assert result['hits']['hits'][0]['_source']['value'] == value
- assert result['hits']['hits'][0]['_source']['name'] == 'test_gauge'
- def test_mongo_gauge(self, mongo_backend):
? ^^^^
+ def test_mongo_metric(self, mongo_backend):
? ^ ++++
- value = 42
metrics = Metrics(backend=mongo_backend)
- metrics.gauge('test_gauge', value)
? ^^^^^
+ result = metrics.gauge('test_gauge', 42)
? +++++++++ ^^
- result = mongo_backend.collection.find_one({"metric": 'gauge'})
assert result
+ assert result.inserted_id
- assert result['value'] == value
- assert result['name'] == 'test_gauge'
- def test_logger_gauge(self, logger_backend, logger_filename):
? ^^^^
+ def test_logger_metric(self, logger_backend, logger_filename):
? ^ ++++
- value = 42
metrics = Metrics(backend=logger_backend)
- metrics.gauge('test_gauge', value)
? ^^^^^
+ metrics.gauge('test_gauge', 42)
? ^^
with open(logger_filename) as f:
lines = f.readlines()
assert lines
result = lines[-1].split(' - ')[2]
assert result
- assert str(value) in result
assert 'test_gauge' in result
|
2b1cd9a58aa51ef53996dc1897a7a0e50f29d7ca
|
isitopenaccess/plugins/bmc.py
|
isitopenaccess/plugins/bmc.py
|
import requests
from copy import deepcopy
from datetime import datetime
from isitopenaccess.plugins import string_matcher
def page_license(record):
"""
To respond to the provider identifier: http://www.biomedcentral.com
This should determine the licence conditions of the BMC article and populate
the record['bibjson']['license'] (note the US spelling) field.
"""
# licensing statements to look for on this publisher's pages
# take the form of {statement: meaning}
# where meaning['type'] identifies the license (see licenses.py)
# and meaning['version'] identifies the license version (if available)
lic_statements = [
{"This is an Open Access article distributed under the terms of the Creative Commons Attribution License (<a href='http://creativecommons.org/licenses/by/2.0'>http://creativecommons.org/licenses/by/2.0</a>), which permits unrestricted use, distribution, and reproduction in any medium, provided the original work is properly cited.":
{'type': 'cc-by', 'version':'2.0', 'open_access': True, 'BY': True, 'NC': False, 'SA': False, 'ND': False}
}
]
string_matcher.simple_extract(lic_statements, record)
|
import requests
from copy import deepcopy
from datetime import datetime
from isitopenaccess.plugins import string_matcher
def page_license(record):
"""
To respond to the provider identifier: http://www.biomedcentral.com
This should determine the licence conditions of the BMC article and populate
the record['bibjson']['license'] (note the US spelling) field.
"""
# licensing statements to look for on this publisher's pages
# take the form of {statement: meaning}
# where meaning['type'] identifies the license (see licenses.py)
# and meaning['version'] identifies the license version (if available)
lic_statements = [
{"This is an Open Access article distributed under the terms of the Creative Commons Attribution License (<a href='http://creativecommons.org/licenses/by/2.0'>http://creativecommons.org/licenses/by/2.0</a>), which permits unrestricted use, distribution, and reproduction in any medium, provided the original work is properly cited.":
{'type': 'cc-by', 'version':'2.0', 'open_access': True, 'BY': True, 'NC': False, 'SA': False, 'ND': False,
# also declare some properties which override info about this license in the licenses list (see licenses module)
'url': 'http://creativecommons.org/licenses/by/2.0'}
}
]
string_matcher.simple_extract(lic_statements, record)
|
ADD MISSING FILE TO PREV COMMIT "modify BMC plugin: overwrite URL for CC-BY license. We have a MORE specific URL (from the license statement on the BMC pages) than the Open Definition one"
|
ADD MISSING FILE TO PREV COMMIT "modify BMC plugin: overwrite URL for CC-BY license. We have a MORE specific URL (from the license statement on the BMC pages) than the Open Definition one"
|
Python
|
bsd-3-clause
|
CottageLabs/OpenArticleGauge,CottageLabs/OpenArticleGauge,CottageLabs/OpenArticleGauge
|
import requests
from copy import deepcopy
from datetime import datetime
from isitopenaccess.plugins import string_matcher
def page_license(record):
"""
To respond to the provider identifier: http://www.biomedcentral.com
This should determine the licence conditions of the BMC article and populate
the record['bibjson']['license'] (note the US spelling) field.
"""
# licensing statements to look for on this publisher's pages
# take the form of {statement: meaning}
# where meaning['type'] identifies the license (see licenses.py)
# and meaning['version'] identifies the license version (if available)
lic_statements = [
{"This is an Open Access article distributed under the terms of the Creative Commons Attribution License (<a href='http://creativecommons.org/licenses/by/2.0'>http://creativecommons.org/licenses/by/2.0</a>), which permits unrestricted use, distribution, and reproduction in any medium, provided the original work is properly cited.":
- {'type': 'cc-by', 'version':'2.0', 'open_access': True, 'BY': True, 'NC': False, 'SA': False, 'ND': False}
+ {'type': 'cc-by', 'version':'2.0', 'open_access': True, 'BY': True, 'NC': False, 'SA': False, 'ND': False,
+ # also declare some properties which override info about this license in the licenses list (see licenses module)
+ 'url': 'http://creativecommons.org/licenses/by/2.0'}
}
]
string_matcher.simple_extract(lic_statements, record)
|
ADD MISSING FILE TO PREV COMMIT "modify BMC plugin: overwrite URL for CC-BY license. We have a MORE specific URL (from the license statement on the BMC pages) than the Open Definition one"
|
## Code Before:
import requests
from copy import deepcopy
from datetime import datetime
from isitopenaccess.plugins import string_matcher
def page_license(record):
"""
To respond to the provider identifier: http://www.biomedcentral.com
This should determine the licence conditions of the BMC article and populate
the record['bibjson']['license'] (note the US spelling) field.
"""
# licensing statements to look for on this publisher's pages
# take the form of {statement: meaning}
# where meaning['type'] identifies the license (see licenses.py)
# and meaning['version'] identifies the license version (if available)
lic_statements = [
{"This is an Open Access article distributed under the terms of the Creative Commons Attribution License (<a href='http://creativecommons.org/licenses/by/2.0'>http://creativecommons.org/licenses/by/2.0</a>), which permits unrestricted use, distribution, and reproduction in any medium, provided the original work is properly cited.":
{'type': 'cc-by', 'version':'2.0', 'open_access': True, 'BY': True, 'NC': False, 'SA': False, 'ND': False}
}
]
string_matcher.simple_extract(lic_statements, record)
## Instruction:
ADD MISSING FILE TO PREV COMMIT "modify BMC plugin: overwrite URL for CC-BY license. We have a MORE specific URL (from the license statement on the BMC pages) than the Open Definition one"
## Code After:
import requests
from copy import deepcopy
from datetime import datetime
from isitopenaccess.plugins import string_matcher
def page_license(record):
"""
To respond to the provider identifier: http://www.biomedcentral.com
This should determine the licence conditions of the BMC article and populate
the record['bibjson']['license'] (note the US spelling) field.
"""
# licensing statements to look for on this publisher's pages
# take the form of {statement: meaning}
# where meaning['type'] identifies the license (see licenses.py)
# and meaning['version'] identifies the license version (if available)
lic_statements = [
{"This is an Open Access article distributed under the terms of the Creative Commons Attribution License (<a href='http://creativecommons.org/licenses/by/2.0'>http://creativecommons.org/licenses/by/2.0</a>), which permits unrestricted use, distribution, and reproduction in any medium, provided the original work is properly cited.":
{'type': 'cc-by', 'version':'2.0', 'open_access': True, 'BY': True, 'NC': False, 'SA': False, 'ND': False,
# also declare some properties which override info about this license in the licenses list (see licenses module)
'url': 'http://creativecommons.org/licenses/by/2.0'}
}
]
string_matcher.simple_extract(lic_statements, record)
|
import requests
from copy import deepcopy
from datetime import datetime
from isitopenaccess.plugins import string_matcher
def page_license(record):
"""
To respond to the provider identifier: http://www.biomedcentral.com
This should determine the licence conditions of the BMC article and populate
the record['bibjson']['license'] (note the US spelling) field.
"""
# licensing statements to look for on this publisher's pages
# take the form of {statement: meaning}
# where meaning['type'] identifies the license (see licenses.py)
# and meaning['version'] identifies the license version (if available)
lic_statements = [
{"This is an Open Access article distributed under the terms of the Creative Commons Attribution License (<a href='http://creativecommons.org/licenses/by/2.0'>http://creativecommons.org/licenses/by/2.0</a>), which permits unrestricted use, distribution, and reproduction in any medium, provided the original work is properly cited.":
- {'type': 'cc-by', 'version':'2.0', 'open_access': True, 'BY': True, 'NC': False, 'SA': False, 'ND': False}
? ^
+ {'type': 'cc-by', 'version':'2.0', 'open_access': True, 'BY': True, 'NC': False, 'SA': False, 'ND': False,
? ^
+ # also declare some properties which override info about this license in the licenses list (see licenses module)
+ 'url': 'http://creativecommons.org/licenses/by/2.0'}
}
]
string_matcher.simple_extract(lic_statements, record)
|
0d6706383b6414459cf158b213f4102fa3452b5a
|
pmxbot/slack.py
|
pmxbot/slack.py
|
import time
import importlib
import pmxbot
class Bot(pmxbot.core.Bot):
def __init__(self, server, port, nickname, channels, password=None):
token = pmxbot.config['slack token']
sc = importlib.import_module('slackclient')
self.client = sc.SlackClient(token)
def start(self):
res = self.client.rtm_connect()
assert res, "Error connecting"
while True:
for msg in self.client.rtm_read():
self.handle_message(msg)
self.handle_scheduled_tasks()
time.sleep(0.1)
def handle_message(self, msg):
if msg.get('type') != 'message':
return
channel = self.client.server.channels.find(msg['channel']).name
nick = self.client.server.users.find(msg['user']).name
self.handle_action(channel, nick, msg['text'])
def handle_scheduled_tasks(self):
"stubbed"
def transmit(self, channel, message):
channel = self.client.server.channels.find(channel)
channel.send_message(message)
|
import time
import importlib
from tempora import schedule
import pmxbot
class Bot(pmxbot.core.Bot):
def __init__(self, server, port, nickname, channels, password=None):
token = pmxbot.config['slack token']
sc = importlib.import_module('slackclient')
self.client = sc.SlackClient(token)
self.scheduler = schedule.CallbackScheduler(self.handle_scheduled)
def start(self):
res = self.client.rtm_connect()
assert res, "Error connecting"
self.init_schedule(self.scheduler)
while True:
for msg in self.client.rtm_read():
self.handle_message(msg)
self.scheduler.run_pending()
time.sleep(0.1)
def handle_message(self, msg):
if msg.get('type') != 'message':
return
channel = self.client.server.channels.find(msg['channel']).name
nick = self.client.server.users.find(msg['user']).name
self.handle_action(channel, nick, msg['text'])
def transmit(self, channel, message):
channel = self.client.server.channels.find(channel)
channel.send_message(message)
|
Implement scheduled task handling in Slack
|
Implement scheduled task handling in Slack
|
Python
|
mit
|
yougov/pmxbot,yougov/pmxbot,yougov/pmxbot
|
import time
import importlib
+
+ from tempora import schedule
import pmxbot
class Bot(pmxbot.core.Bot):
def __init__(self, server, port, nickname, channels, password=None):
token = pmxbot.config['slack token']
sc = importlib.import_module('slackclient')
self.client = sc.SlackClient(token)
+ self.scheduler = schedule.CallbackScheduler(self.handle_scheduled)
def start(self):
res = self.client.rtm_connect()
assert res, "Error connecting"
+ self.init_schedule(self.scheduler)
while True:
for msg in self.client.rtm_read():
self.handle_message(msg)
- self.handle_scheduled_tasks()
+ self.scheduler.run_pending()
time.sleep(0.1)
def handle_message(self, msg):
if msg.get('type') != 'message':
return
channel = self.client.server.channels.find(msg['channel']).name
nick = self.client.server.users.find(msg['user']).name
self.handle_action(channel, nick, msg['text'])
- def handle_scheduled_tasks(self):
- "stubbed"
-
def transmit(self, channel, message):
channel = self.client.server.channels.find(channel)
channel.send_message(message)
|
Implement scheduled task handling in Slack
|
## Code Before:
import time
import importlib
import pmxbot
class Bot(pmxbot.core.Bot):
def __init__(self, server, port, nickname, channels, password=None):
token = pmxbot.config['slack token']
sc = importlib.import_module('slackclient')
self.client = sc.SlackClient(token)
def start(self):
res = self.client.rtm_connect()
assert res, "Error connecting"
while True:
for msg in self.client.rtm_read():
self.handle_message(msg)
self.handle_scheduled_tasks()
time.sleep(0.1)
def handle_message(self, msg):
if msg.get('type') != 'message':
return
channel = self.client.server.channels.find(msg['channel']).name
nick = self.client.server.users.find(msg['user']).name
self.handle_action(channel, nick, msg['text'])
def handle_scheduled_tasks(self):
"stubbed"
def transmit(self, channel, message):
channel = self.client.server.channels.find(channel)
channel.send_message(message)
## Instruction:
Implement scheduled task handling in Slack
## Code After:
import time
import importlib
from tempora import schedule
import pmxbot
class Bot(pmxbot.core.Bot):
def __init__(self, server, port, nickname, channels, password=None):
token = pmxbot.config['slack token']
sc = importlib.import_module('slackclient')
self.client = sc.SlackClient(token)
self.scheduler = schedule.CallbackScheduler(self.handle_scheduled)
def start(self):
res = self.client.rtm_connect()
assert res, "Error connecting"
self.init_schedule(self.scheduler)
while True:
for msg in self.client.rtm_read():
self.handle_message(msg)
self.scheduler.run_pending()
time.sleep(0.1)
def handle_message(self, msg):
if msg.get('type') != 'message':
return
channel = self.client.server.channels.find(msg['channel']).name
nick = self.client.server.users.find(msg['user']).name
self.handle_action(channel, nick, msg['text'])
def transmit(self, channel, message):
channel = self.client.server.channels.find(channel)
channel.send_message(message)
|
import time
import importlib
+
+ from tempora import schedule
import pmxbot
class Bot(pmxbot.core.Bot):
def __init__(self, server, port, nickname, channels, password=None):
token = pmxbot.config['slack token']
sc = importlib.import_module('slackclient')
self.client = sc.SlackClient(token)
+ self.scheduler = schedule.CallbackScheduler(self.handle_scheduled)
def start(self):
res = self.client.rtm_connect()
assert res, "Error connecting"
+ self.init_schedule(self.scheduler)
while True:
for msg in self.client.rtm_read():
self.handle_message(msg)
- self.handle_scheduled_tasks()
+ self.scheduler.run_pending()
time.sleep(0.1)
def handle_message(self, msg):
if msg.get('type') != 'message':
return
channel = self.client.server.channels.find(msg['channel']).name
nick = self.client.server.users.find(msg['user']).name
self.handle_action(channel, nick, msg['text'])
- def handle_scheduled_tasks(self):
- "stubbed"
-
def transmit(self, channel, message):
channel = self.client.server.channels.find(channel)
channel.send_message(message)
|
b99a8e2fe4a4d26b8b9dfbc4b3a9effad9c89f90
|
calexicon/dates/tests/test_bce.py
|
calexicon/dates/tests/test_bce.py
|
import unittest
from calexicon.dates import BCEDate
class TestBCEDate(unittest.TestCase):
def test_make_bce_date(self):
bd = BCEDate(-4713, 1, 1)
self.assertEqual(bd.julian_representation(), (-4713, 1, 1))
def test_equality(self):
self.assertEqual(BCEDate(-44, 3, 15), BCEDate(-44, 3, 15))
|
import unittest
from datetime import timedelta
from calexicon.dates import BCEDate
class TestBCEDate(unittest.TestCase):
def test_make_bce_date(self):
bd = BCEDate(-4713, 1, 1)
self.assertEqual(bd.julian_representation(), (-4713, 1, 1))
def test_equality(self):
self.assertEqual(BCEDate(-44, 3, 15), BCEDate(-44, 3, 15))
def test_subtraction(self):
self.assertEqual(timedelta(days=4), BCEDate(-44, 3, 15) - BCEDate(-44, 3, 11))
self.assertEqual(timedelta(days=33), BCEDate(-44, 3, 15) - BCEDate(-44, 2, 11))
|
Add tests for the subtraction operator for BCEDate.
|
Add tests for the subtraction operator for BCEDate.
|
Python
|
apache-2.0
|
jwg4/calexicon,jwg4/qual
|
import unittest
+
+ from datetime import timedelta
from calexicon.dates import BCEDate
class TestBCEDate(unittest.TestCase):
def test_make_bce_date(self):
bd = BCEDate(-4713, 1, 1)
self.assertEqual(bd.julian_representation(), (-4713, 1, 1))
def test_equality(self):
self.assertEqual(BCEDate(-44, 3, 15), BCEDate(-44, 3, 15))
+ def test_subtraction(self):
+ self.assertEqual(timedelta(days=4), BCEDate(-44, 3, 15) - BCEDate(-44, 3, 11))
+ self.assertEqual(timedelta(days=33), BCEDate(-44, 3, 15) - BCEDate(-44, 2, 11))
+
|
Add tests for the subtraction operator for BCEDate.
|
## Code Before:
import unittest
from calexicon.dates import BCEDate
class TestBCEDate(unittest.TestCase):
def test_make_bce_date(self):
bd = BCEDate(-4713, 1, 1)
self.assertEqual(bd.julian_representation(), (-4713, 1, 1))
def test_equality(self):
self.assertEqual(BCEDate(-44, 3, 15), BCEDate(-44, 3, 15))
## Instruction:
Add tests for the subtraction operator for BCEDate.
## Code After:
import unittest
from datetime import timedelta
from calexicon.dates import BCEDate
class TestBCEDate(unittest.TestCase):
def test_make_bce_date(self):
bd = BCEDate(-4713, 1, 1)
self.assertEqual(bd.julian_representation(), (-4713, 1, 1))
def test_equality(self):
self.assertEqual(BCEDate(-44, 3, 15), BCEDate(-44, 3, 15))
def test_subtraction(self):
self.assertEqual(timedelta(days=4), BCEDate(-44, 3, 15) - BCEDate(-44, 3, 11))
self.assertEqual(timedelta(days=33), BCEDate(-44, 3, 15) - BCEDate(-44, 2, 11))
|
import unittest
+
+ from datetime import timedelta
from calexicon.dates import BCEDate
class TestBCEDate(unittest.TestCase):
def test_make_bce_date(self):
bd = BCEDate(-4713, 1, 1)
self.assertEqual(bd.julian_representation(), (-4713, 1, 1))
def test_equality(self):
self.assertEqual(BCEDate(-44, 3, 15), BCEDate(-44, 3, 15))
+
+ def test_subtraction(self):
+ self.assertEqual(timedelta(days=4), BCEDate(-44, 3, 15) - BCEDate(-44, 3, 11))
+ self.assertEqual(timedelta(days=33), BCEDate(-44, 3, 15) - BCEDate(-44, 2, 11))
|
557f4129dc50acddd6c80d0a0679d8c82d5d9215
|
linter.py
|
linter.py
|
"""This module exports the Polylint plugin class."""
from SublimeLinter.lint import NodeLinter, util
class Polylint(NodeLinter):
"""Provides an interface to polylint."""
syntax = ('html')
cmd = 'polylint --only-inputs -i'
executable = None
version_args = '--version'
version_re = r'(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 1.0'
regex = r'^[^:]*:(?P<line>\d+):(?P<col>\d+)\r?\n\s*(?P<message>.+)$'
multiline = True
line_col_base = (1, 1)
tempfile_suffix = '-'
error_stream = util.STREAM_BOTH
selectors = {}
word_re = None
defaults = {}
inline_settings = None
inline_overrides = None
comment_re = r'\s*/[/*]'
|
"""This module exports the Polylint plugin class."""
from SublimeLinter.lint import NodeLinter, util
class Polylint(NodeLinter):
"""Provides an interface to polylint."""
syntax = ('html')
cmd = 'polylint --no-recursion -i'
executable = None
version_args = '--version'
version_re = r'(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 1.0'
regex = r'^[^:]*:(?P<line>\d+):(?P<col>\d+)\r?\n\s*(?P<message>.+)$'
multiline = True
line_col_base = (1, 1)
tempfile_suffix = '-'
error_stream = util.STREAM_BOTH
selectors = {}
word_re = None
defaults = {}
inline_settings = None
inline_overrides = None
comment_re = r'\s*/[/*]'
|
Use proper flag for single file error reporting
|
Use proper flag for single file error reporting
"inputs-only" was changed to "no-recursion" before getting merged
|
Python
|
mit
|
nomego/SublimeLinter-contrib-polylint
|
"""This module exports the Polylint plugin class."""
from SublimeLinter.lint import NodeLinter, util
class Polylint(NodeLinter):
"""Provides an interface to polylint."""
syntax = ('html')
- cmd = 'polylint --only-inputs -i'
+ cmd = 'polylint --no-recursion -i'
executable = None
version_args = '--version'
version_re = r'(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 1.0'
regex = r'^[^:]*:(?P<line>\d+):(?P<col>\d+)\r?\n\s*(?P<message>.+)$'
multiline = True
line_col_base = (1, 1)
tempfile_suffix = '-'
error_stream = util.STREAM_BOTH
selectors = {}
word_re = None
defaults = {}
inline_settings = None
inline_overrides = None
comment_re = r'\s*/[/*]'
|
Use proper flag for single file error reporting
|
## Code Before:
"""This module exports the Polylint plugin class."""
from SublimeLinter.lint import NodeLinter, util
class Polylint(NodeLinter):
"""Provides an interface to polylint."""
syntax = ('html')
cmd = 'polylint --only-inputs -i'
executable = None
version_args = '--version'
version_re = r'(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 1.0'
regex = r'^[^:]*:(?P<line>\d+):(?P<col>\d+)\r?\n\s*(?P<message>.+)$'
multiline = True
line_col_base = (1, 1)
tempfile_suffix = '-'
error_stream = util.STREAM_BOTH
selectors = {}
word_re = None
defaults = {}
inline_settings = None
inline_overrides = None
comment_re = r'\s*/[/*]'
## Instruction:
Use proper flag for single file error reporting
## Code After:
"""This module exports the Polylint plugin class."""
from SublimeLinter.lint import NodeLinter, util
class Polylint(NodeLinter):
"""Provides an interface to polylint."""
syntax = ('html')
cmd = 'polylint --no-recursion -i'
executable = None
version_args = '--version'
version_re = r'(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 1.0'
regex = r'^[^:]*:(?P<line>\d+):(?P<col>\d+)\r?\n\s*(?P<message>.+)$'
multiline = True
line_col_base = (1, 1)
tempfile_suffix = '-'
error_stream = util.STREAM_BOTH
selectors = {}
word_re = None
defaults = {}
inline_settings = None
inline_overrides = None
comment_re = r'\s*/[/*]'
|
"""This module exports the Polylint plugin class."""
from SublimeLinter.lint import NodeLinter, util
class Polylint(NodeLinter):
"""Provides an interface to polylint."""
syntax = ('html')
- cmd = 'polylint --only-inputs -i'
+ cmd = 'polylint --no-recursion -i'
executable = None
version_args = '--version'
version_re = r'(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 1.0'
regex = r'^[^:]*:(?P<line>\d+):(?P<col>\d+)\r?\n\s*(?P<message>.+)$'
multiline = True
line_col_base = (1, 1)
tempfile_suffix = '-'
error_stream = util.STREAM_BOTH
selectors = {}
word_re = None
defaults = {}
inline_settings = None
inline_overrides = None
comment_re = r'\s*/[/*]'
|
34b57742801f888af7597378bd00f9d06c2d3b66
|
packages/Python/lldbsuite/test/repl/quicklookobject/TestREPLQuickLookObject.py
|
packages/Python/lldbsuite/test/repl/quicklookobject/TestREPLQuickLookObject.py
|
"""Test that QuickLookObject works correctly in the REPL"""
import os, time
import unittest2
import lldb
from lldbsuite.test.lldbrepl import REPLTest, load_tests
import lldbsuite.test.lldbtest as lldbtest
class REPLQuickLookTestCase (REPLTest):
mydir = REPLTest.compute_mydir(__file__)
def doTest(self):
self.command('true.customPlaygroundQuickLook()', patterns=['Logical = true'])
self.command('1.25.customPlaygroundQuickLook()', patterns=['Double = 1.25'])
self.command('Float(1.25).customPlaygroundQuickLook()', patterns=['Float = 1.25'])
self.command('"Hello".customPlaygroundQuickLook()', patterns=['Text = \"Hello\"'])
|
"""Test that QuickLookObject works correctly in the REPL"""
import os, time
import unittest2
import lldb
from lldbsuite.test.lldbrepl import REPLTest, load_tests
import lldbsuite.test.lldbtest as lldbtest
class REPLQuickLookTestCase (REPLTest):
mydir = REPLTest.compute_mydir(__file__)
def doTest(self):
self.command('PlaygroundQuickLook(reflecting: true)', patterns=['Logical = true'])
self.command('PlaygroundQuickLook(reflecting: 1.25)', patterns=['Double = 1.25'])
self.command('PlaygroundQuickLook(reflecting: Float(1.25))', patterns=['Float = 1.25'])
self.command('PlaygroundQuickLook(reflecting: "Hello")', patterns=['Text = \"Hello\"'])
|
Use the PlaygroundQuickLook(reflecting:) constructor in this test case
|
Use the PlaygroundQuickLook(reflecting:) constructor in this test case
|
Python
|
apache-2.0
|
apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb
|
"""Test that QuickLookObject works correctly in the REPL"""
import os, time
import unittest2
import lldb
from lldbsuite.test.lldbrepl import REPLTest, load_tests
import lldbsuite.test.lldbtest as lldbtest
class REPLQuickLookTestCase (REPLTest):
mydir = REPLTest.compute_mydir(__file__)
def doTest(self):
- self.command('true.customPlaygroundQuickLook()', patterns=['Logical = true'])
+ self.command('PlaygroundQuickLook(reflecting: true)', patterns=['Logical = true'])
- self.command('1.25.customPlaygroundQuickLook()', patterns=['Double = 1.25'])
+ self.command('PlaygroundQuickLook(reflecting: 1.25)', patterns=['Double = 1.25'])
- self.command('Float(1.25).customPlaygroundQuickLook()', patterns=['Float = 1.25'])
+ self.command('PlaygroundQuickLook(reflecting: Float(1.25))', patterns=['Float = 1.25'])
- self.command('"Hello".customPlaygroundQuickLook()', patterns=['Text = \"Hello\"'])
+ self.command('PlaygroundQuickLook(reflecting: "Hello")', patterns=['Text = \"Hello\"'])
-
-
|
Use the PlaygroundQuickLook(reflecting:) constructor in this test case
|
## Code Before:
"""Test that QuickLookObject works correctly in the REPL"""
import os, time
import unittest2
import lldb
from lldbsuite.test.lldbrepl import REPLTest, load_tests
import lldbsuite.test.lldbtest as lldbtest
class REPLQuickLookTestCase (REPLTest):
mydir = REPLTest.compute_mydir(__file__)
def doTest(self):
self.command('true.customPlaygroundQuickLook()', patterns=['Logical = true'])
self.command('1.25.customPlaygroundQuickLook()', patterns=['Double = 1.25'])
self.command('Float(1.25).customPlaygroundQuickLook()', patterns=['Float = 1.25'])
self.command('"Hello".customPlaygroundQuickLook()', patterns=['Text = \"Hello\"'])
## Instruction:
Use the PlaygroundQuickLook(reflecting:) constructor in this test case
## Code After:
"""Test that QuickLookObject works correctly in the REPL"""
import os, time
import unittest2
import lldb
from lldbsuite.test.lldbrepl import REPLTest, load_tests
import lldbsuite.test.lldbtest as lldbtest
class REPLQuickLookTestCase (REPLTest):
mydir = REPLTest.compute_mydir(__file__)
def doTest(self):
self.command('PlaygroundQuickLook(reflecting: true)', patterns=['Logical = true'])
self.command('PlaygroundQuickLook(reflecting: 1.25)', patterns=['Double = 1.25'])
self.command('PlaygroundQuickLook(reflecting: Float(1.25))', patterns=['Float = 1.25'])
self.command('PlaygroundQuickLook(reflecting: "Hello")', patterns=['Text = \"Hello\"'])
|
"""Test that QuickLookObject works correctly in the REPL"""
import os, time
import unittest2
import lldb
from lldbsuite.test.lldbrepl import REPLTest, load_tests
import lldbsuite.test.lldbtest as lldbtest
class REPLQuickLookTestCase (REPLTest):
mydir = REPLTest.compute_mydir(__file__)
def doTest(self):
- self.command('true.customPlaygroundQuickLook()', patterns=['Logical = true'])
? -----------
+ self.command('PlaygroundQuickLook(reflecting: true)', patterns=['Logical = true'])
? ++++++++++++++++
- self.command('1.25.customPlaygroundQuickLook()', patterns=['Double = 1.25'])
? -----------
+ self.command('PlaygroundQuickLook(reflecting: 1.25)', patterns=['Double = 1.25'])
? ++++++++++++++++
- self.command('Float(1.25).customPlaygroundQuickLook()', patterns=['Float = 1.25'])
? ------------------
+ self.command('PlaygroundQuickLook(reflecting: Float(1.25))', patterns=['Float = 1.25'])
? +++++++++++++++++++++++
- self.command('"Hello".customPlaygroundQuickLook()', patterns=['Text = \"Hello\"'])
? --------------
+ self.command('PlaygroundQuickLook(reflecting: "Hello")', patterns=['Text = \"Hello\"'])
? +++++++++++++++++++
-
-
|
6bdec176a7a43b3e6eb65a2c7e639c09a89d43bc
|
data_models/data_refinery_models/models.py
|
data_models/data_refinery_models/models.py
|
from django.db import models
from django.utils import timezone
class TimeTrackedModel(models.Model):
created_at = models.DateTimeField(editable=False)
updated_at = models.DateTimeField()
def save(self, *args, **kwargs):
''' On save, update timestamps '''
if not self.id:
self.created_at = timezone.now()
self.updated_at = timezone.now()
return super(TimeTrackedModel, self).save(*args, **kwargs)
class Meta:
abstract = True
# This model still has a prototypical status, but I needed something to
# test with and it's at least in the right ballpark
class Batch(TimeTrackedModel):
source_type = models.CharField(max_length=256)
size_in_bytes = models.IntegerField()
download_url = models.CharField(max_length=2048)
raw_format = models.CharField(max_length=256)
processed_format = models.CharField(max_length=256)
processor_required = models.IntegerField()
accession_code = models.CharField(max_length=256)
# This field will denote where in our system the file can be found
internal_location = models.CharField(max_length=256)
# This will at some be a meaningful integer->organism lookup thing
organism = models.IntegerField()
STATUSES = (
("NEW", "New"),
("DOWNLOADED", "Downloaded"),
("PROCESSED", "Proccessed"),
)
status = models.CharField(max_length=10, choices=STATUSES)
|
from django.db import models
from django.utils import timezone
class TimeTrackedModel(models.Model):
created_at = models.DateTimeField(editable=False)
updated_at = models.DateTimeField()
def save(self, *args, **kwargs):
''' On save, update timestamps '''
if not self.id:
self.created_at = timezone.now()
self.updated_at = timezone.now()
return super(TimeTrackedModel, self).save(*args, **kwargs)
class Meta:
abstract = True
# This model still has a prototypical status, but I needed something to
# test with and it's at least in the right ballpark
class Batch(TimeTrackedModel):
source_type = models.CharField(max_length=256)
size_in_bytes = models.IntegerField()
download_url = models.CharField(max_length=2048)
raw_format = models.CharField(max_length=256)
processed_format = models.CharField(max_length=256)
processor_required = models.IntegerField()
accession_code = models.CharField(max_length=256)
# This field will denote where in our system the file can be found
internal_location = models.CharField(max_length=256)
# This will utilize the organism taxonomy ID from NCBI
organism = models.IntegerField()
STATUSES = (
("NEW", "New"),
("DOWNLOADED", "Downloaded"),
("PROCESSED", "Proccessed"),
)
status = models.CharField(max_length=10, choices=STATUSES)
|
Change a comment to mention the organism taxonomy ID from NCBI.
|
Change a comment to mention the organism taxonomy ID from NCBI.
|
Python
|
bsd-3-clause
|
data-refinery/data_refinery,data-refinery/data_refinery,data-refinery/data_refinery
|
from django.db import models
from django.utils import timezone
class TimeTrackedModel(models.Model):
created_at = models.DateTimeField(editable=False)
updated_at = models.DateTimeField()
def save(self, *args, **kwargs):
''' On save, update timestamps '''
if not self.id:
self.created_at = timezone.now()
self.updated_at = timezone.now()
return super(TimeTrackedModel, self).save(*args, **kwargs)
class Meta:
abstract = True
# This model still has a prototypical status, but I needed something to
# test with and it's at least in the right ballpark
class Batch(TimeTrackedModel):
source_type = models.CharField(max_length=256)
size_in_bytes = models.IntegerField()
download_url = models.CharField(max_length=2048)
raw_format = models.CharField(max_length=256)
processed_format = models.CharField(max_length=256)
processor_required = models.IntegerField()
accession_code = models.CharField(max_length=256)
# This field will denote where in our system the file can be found
internal_location = models.CharField(max_length=256)
- # This will at some be a meaningful integer->organism lookup thing
+ # This will utilize the organism taxonomy ID from NCBI
organism = models.IntegerField()
STATUSES = (
("NEW", "New"),
("DOWNLOADED", "Downloaded"),
("PROCESSED", "Proccessed"),
)
status = models.CharField(max_length=10, choices=STATUSES)
|
Change a comment to mention the organism taxonomy ID from NCBI.
|
## Code Before:
from django.db import models
from django.utils import timezone
class TimeTrackedModel(models.Model):
created_at = models.DateTimeField(editable=False)
updated_at = models.DateTimeField()
def save(self, *args, **kwargs):
''' On save, update timestamps '''
if not self.id:
self.created_at = timezone.now()
self.updated_at = timezone.now()
return super(TimeTrackedModel, self).save(*args, **kwargs)
class Meta:
abstract = True
# This model still has a prototypical status, but I needed something to
# test with and it's at least in the right ballpark
class Batch(TimeTrackedModel):
source_type = models.CharField(max_length=256)
size_in_bytes = models.IntegerField()
download_url = models.CharField(max_length=2048)
raw_format = models.CharField(max_length=256)
processed_format = models.CharField(max_length=256)
processor_required = models.IntegerField()
accession_code = models.CharField(max_length=256)
# This field will denote where in our system the file can be found
internal_location = models.CharField(max_length=256)
# This will at some be a meaningful integer->organism lookup thing
organism = models.IntegerField()
STATUSES = (
("NEW", "New"),
("DOWNLOADED", "Downloaded"),
("PROCESSED", "Proccessed"),
)
status = models.CharField(max_length=10, choices=STATUSES)
## Instruction:
Change a comment to mention the organism taxonomy ID from NCBI.
## Code After:
from django.db import models
from django.utils import timezone
class TimeTrackedModel(models.Model):
created_at = models.DateTimeField(editable=False)
updated_at = models.DateTimeField()
def save(self, *args, **kwargs):
''' On save, update timestamps '''
if not self.id:
self.created_at = timezone.now()
self.updated_at = timezone.now()
return super(TimeTrackedModel, self).save(*args, **kwargs)
class Meta:
abstract = True
# This model still has a prototypical status, but I needed something to
# test with and it's at least in the right ballpark
class Batch(TimeTrackedModel):
source_type = models.CharField(max_length=256)
size_in_bytes = models.IntegerField()
download_url = models.CharField(max_length=2048)
raw_format = models.CharField(max_length=256)
processed_format = models.CharField(max_length=256)
processor_required = models.IntegerField()
accession_code = models.CharField(max_length=256)
# This field will denote where in our system the file can be found
internal_location = models.CharField(max_length=256)
# This will utilize the organism taxonomy ID from NCBI
organism = models.IntegerField()
STATUSES = (
("NEW", "New"),
("DOWNLOADED", "Downloaded"),
("PROCESSED", "Proccessed"),
)
status = models.CharField(max_length=10, choices=STATUSES)
|
from django.db import models
from django.utils import timezone
class TimeTrackedModel(models.Model):
created_at = models.DateTimeField(editable=False)
updated_at = models.DateTimeField()
def save(self, *args, **kwargs):
''' On save, update timestamps '''
if not self.id:
self.created_at = timezone.now()
self.updated_at = timezone.now()
return super(TimeTrackedModel, self).save(*args, **kwargs)
class Meta:
abstract = True
# This model still has a prototypical status, but I needed something to
# test with and it's at least in the right ballpark
class Batch(TimeTrackedModel):
source_type = models.CharField(max_length=256)
size_in_bytes = models.IntegerField()
download_url = models.CharField(max_length=2048)
raw_format = models.CharField(max_length=256)
processed_format = models.CharField(max_length=256)
processor_required = models.IntegerField()
accession_code = models.CharField(max_length=256)
# This field will denote where in our system the file can be found
internal_location = models.CharField(max_length=256)
- # This will at some be a meaningful integer->organism lookup thing
+ # This will utilize the organism taxonomy ID from NCBI
organism = models.IntegerField()
STATUSES = (
("NEW", "New"),
("DOWNLOADED", "Downloaded"),
("PROCESSED", "Proccessed"),
)
status = models.CharField(max_length=10, choices=STATUSES)
|
8351b73693019360c3f0ea3c60531ac13bef1c24
|
structure/models.py
|
structure/models.py
|
from django.db import models
from django.contrib.auth.models import AbstractUser
from django.utils.translation import ugettext_lazy as _
class Organization(models.Model):
name = models.CharField(_('Name'), max_length=80)
slug = models.SlugField()
def __unicode__(self):
return self.name
class Team(models.Model):
name = models.CharField(_('Name'), max_length=80)
organization = models.ForeignKey('Organization')
def __unicode__(self):
return self.name
class User(AbstractUser):
teams = models.ManyToManyField(Team, blank=True, related_name='users')
def __unicode__(self):
return self.username
class Contract(models.Model):
name = models.CharField(_('Name'), max_length=150)
def __unicode__(self):
return self.name
class ContractOrganization(models.Model):
contract = models.ForeignKey('Contract')
organization = models.ForeignKey('Organization')
default_team = models.ForeignKey('Team')
def __unicode__(self):
return self.contract.name + ' - ' + self.organization.name
class Meta:
verbose_name = _('Contracted organization')
verbose_name_plural = _('Contracted organizations')
class ContractTeam(models.Model):
contract = models.ForeignKey('Contract')
team = models.ForeignKey('Team')
def __unicode__(self):
return self.contract.name + ' - ' + self.team.name
class Meta:
verbose_name = _('Contracted team')
verbose_name_plural = _('Contracted teams')
|
from django.db import models
from django.contrib.auth.models import AbstractUser
from django.utils.translation import ugettext_lazy as _
class Organization(models.Model):
name = models.CharField(_('Name'), max_length=80)
slug = models.SlugField()
def __unicode__(self):
return self.name
class Team(models.Model):
name = models.CharField(_('Name'), max_length=80)
organization = models.ForeignKey('Organization')
def __unicode__(self):
return self.name
class User(AbstractUser):
name = models.CharField(_('Name'), max_length=40)
teams = models.ManyToManyField(Team, blank=True, related_name='users')
def __unicode__(self):
return self.name
class Contract(models.Model):
name = models.CharField(_('Name'), max_length=150)
def __unicode__(self):
return self.name
class ContractOrganization(models.Model):
contract = models.ForeignKey('Contract')
organization = models.ForeignKey('Organization')
default_team = models.ForeignKey('Team')
def __unicode__(self):
return self.contract.name + ' - ' + self.organization.name
class Meta:
verbose_name = _('Contracted organization')
verbose_name_plural = _('Contracted organizations')
class ContractTeam(models.Model):
contract = models.ForeignKey('Contract')
team = models.ForeignKey('Team')
def __unicode__(self):
return self.contract.name + ' - ' + self.team.name
class Meta:
verbose_name = _('Contracted team')
verbose_name_plural = _('Contracted teams')
|
Add name to User model.
|
Add name to User model.
|
Python
|
bsd-3-clause
|
RocknRoot/LIIT
|
from django.db import models
from django.contrib.auth.models import AbstractUser
from django.utils.translation import ugettext_lazy as _
class Organization(models.Model):
name = models.CharField(_('Name'), max_length=80)
slug = models.SlugField()
def __unicode__(self):
return self.name
class Team(models.Model):
name = models.CharField(_('Name'), max_length=80)
organization = models.ForeignKey('Organization')
def __unicode__(self):
return self.name
class User(AbstractUser):
+ name = models.CharField(_('Name'), max_length=40)
teams = models.ManyToManyField(Team, blank=True, related_name='users')
def __unicode__(self):
- return self.username
+ return self.name
class Contract(models.Model):
name = models.CharField(_('Name'), max_length=150)
def __unicode__(self):
return self.name
class ContractOrganization(models.Model):
contract = models.ForeignKey('Contract')
organization = models.ForeignKey('Organization')
default_team = models.ForeignKey('Team')
def __unicode__(self):
return self.contract.name + ' - ' + self.organization.name
class Meta:
verbose_name = _('Contracted organization')
verbose_name_plural = _('Contracted organizations')
class ContractTeam(models.Model):
contract = models.ForeignKey('Contract')
team = models.ForeignKey('Team')
def __unicode__(self):
return self.contract.name + ' - ' + self.team.name
class Meta:
verbose_name = _('Contracted team')
verbose_name_plural = _('Contracted teams')
|
Add name to User model.
|
## Code Before:
from django.db import models
from django.contrib.auth.models import AbstractUser
from django.utils.translation import ugettext_lazy as _
class Organization(models.Model):
name = models.CharField(_('Name'), max_length=80)
slug = models.SlugField()
def __unicode__(self):
return self.name
class Team(models.Model):
name = models.CharField(_('Name'), max_length=80)
organization = models.ForeignKey('Organization')
def __unicode__(self):
return self.name
class User(AbstractUser):
teams = models.ManyToManyField(Team, blank=True, related_name='users')
def __unicode__(self):
return self.username
class Contract(models.Model):
name = models.CharField(_('Name'), max_length=150)
def __unicode__(self):
return self.name
class ContractOrganization(models.Model):
contract = models.ForeignKey('Contract')
organization = models.ForeignKey('Organization')
default_team = models.ForeignKey('Team')
def __unicode__(self):
return self.contract.name + ' - ' + self.organization.name
class Meta:
verbose_name = _('Contracted organization')
verbose_name_plural = _('Contracted organizations')
class ContractTeam(models.Model):
contract = models.ForeignKey('Contract')
team = models.ForeignKey('Team')
def __unicode__(self):
return self.contract.name + ' - ' + self.team.name
class Meta:
verbose_name = _('Contracted team')
verbose_name_plural = _('Contracted teams')
## Instruction:
Add name to User model.
## Code After:
from django.db import models
from django.contrib.auth.models import AbstractUser
from django.utils.translation import ugettext_lazy as _
class Organization(models.Model):
name = models.CharField(_('Name'), max_length=80)
slug = models.SlugField()
def __unicode__(self):
return self.name
class Team(models.Model):
name = models.CharField(_('Name'), max_length=80)
organization = models.ForeignKey('Organization')
def __unicode__(self):
return self.name
class User(AbstractUser):
name = models.CharField(_('Name'), max_length=40)
teams = models.ManyToManyField(Team, blank=True, related_name='users')
def __unicode__(self):
return self.name
class Contract(models.Model):
name = models.CharField(_('Name'), max_length=150)
def __unicode__(self):
return self.name
class ContractOrganization(models.Model):
contract = models.ForeignKey('Contract')
organization = models.ForeignKey('Organization')
default_team = models.ForeignKey('Team')
def __unicode__(self):
return self.contract.name + ' - ' + self.organization.name
class Meta:
verbose_name = _('Contracted organization')
verbose_name_plural = _('Contracted organizations')
class ContractTeam(models.Model):
contract = models.ForeignKey('Contract')
team = models.ForeignKey('Team')
def __unicode__(self):
return self.contract.name + ' - ' + self.team.name
class Meta:
verbose_name = _('Contracted team')
verbose_name_plural = _('Contracted teams')
|
from django.db import models
from django.contrib.auth.models import AbstractUser
from django.utils.translation import ugettext_lazy as _
class Organization(models.Model):
name = models.CharField(_('Name'), max_length=80)
slug = models.SlugField()
def __unicode__(self):
return self.name
class Team(models.Model):
name = models.CharField(_('Name'), max_length=80)
organization = models.ForeignKey('Organization')
def __unicode__(self):
return self.name
class User(AbstractUser):
+ name = models.CharField(_('Name'), max_length=40)
teams = models.ManyToManyField(Team, blank=True, related_name='users')
def __unicode__(self):
- return self.username
? ----
+ return self.name
class Contract(models.Model):
name = models.CharField(_('Name'), max_length=150)
def __unicode__(self):
return self.name
class ContractOrganization(models.Model):
contract = models.ForeignKey('Contract')
organization = models.ForeignKey('Organization')
default_team = models.ForeignKey('Team')
def __unicode__(self):
return self.contract.name + ' - ' + self.organization.name
class Meta:
verbose_name = _('Contracted organization')
verbose_name_plural = _('Contracted organizations')
class ContractTeam(models.Model):
contract = models.ForeignKey('Contract')
team = models.ForeignKey('Team')
def __unicode__(self):
return self.contract.name + ' - ' + self.team.name
class Meta:
verbose_name = _('Contracted team')
verbose_name_plural = _('Contracted teams')
|
fdf05b0fa93c350d2cd030e451b0e26ed7393209
|
tests/clientlib/validate_manifest_test.py
|
tests/clientlib/validate_manifest_test.py
|
import pytest
from pre_commit.clientlib.validate_manifest import additional_manifest_check
from pre_commit.clientlib.validate_manifest import InvalidManifestError
from pre_commit.clientlib.validate_manifest import run
def test_returns_0_for_valid_manifest():
assert run(['example_manifest.yaml']) == 0
def test_returns_0_for_our_manifest():
assert run([]) == 0
def test_returns_1_for_failing():
assert run(['tests/data/valid_yaml_but_invalid_manifest.yaml']) == 1
def test_additional_manifest_check_raises_for_bad_language():
with pytest.raises(InvalidManifestError):
additional_manifest_check([{'id': 'foo', 'language': 'not valid'}])
@pytest.mark.parametrize(('obj'), (
[{}],
[{'language': 'python'}],
[{'language': 'python>2.6'}],
))
def test_additional_manifest_check_is_ok_with_missing_language(obj):
additional_manifest_check(obj)
|
import jsonschema
import jsonschema.exceptions
import pytest
from pre_commit.clientlib.validate_manifest import additional_manifest_check
from pre_commit.clientlib.validate_manifest import InvalidManifestError
from pre_commit.clientlib.validate_manifest import MANIFEST_JSON_SCHEMA
from pre_commit.clientlib.validate_manifest import run
def test_returns_0_for_valid_manifest():
assert run(['example_manifest.yaml']) == 0
def test_returns_0_for_our_manifest():
assert run([]) == 0
def test_returns_1_for_failing():
assert run(['tests/data/valid_yaml_but_invalid_manifest.yaml']) == 1
def test_additional_manifest_check_raises_for_bad_language():
with pytest.raises(InvalidManifestError):
additional_manifest_check([{'id': 'foo', 'language': 'not valid'}])
@pytest.mark.parametrize(('obj'), (
[{}],
[{'language': 'python'}],
[{'language': 'python>2.6'}],
))
def test_additional_manifest_check_is_ok_with_missing_language(obj):
additional_manifest_check(obj)
def is_valid_according_to_schema(obj, schema):
try:
jsonschema.validate(obj, schema)
return True
except jsonschema.exceptions.ValidationError:
return False
@pytest.mark.parametrize(('manifest_obj', 'expected'), (
([], False),
([{'id': 'a', 'name': 'b', 'entry': 'c'}], True),
(
[{
'id': 'a',
'name': 'b',
'entry': 'c',
'language': 'python',
'expected_return_value': 0,
}],
True,
),
))
def test_is_valid_according_to_schema(manifest_obj, expected):
ret = is_valid_according_to_schema(manifest_obj, MANIFEST_JSON_SCHEMA)
assert ret is expected
|
Add better tests for manifest json schema
|
Add better tests for manifest json schema
|
Python
|
mit
|
chriskuehl/pre-commit,pre-commit/pre-commit,philipgian/pre-commit,beni55/pre-commit,Lucas-C/pre-commit,barrysteyn/pre-commit,Lucas-C/pre-commit,Lucas-C/pre-commit,dnephin/pre-commit,philipgian/pre-commit,dnephin/pre-commit,Teino1978-Corp/pre-commit,philipgian/pre-commit,chriskuehl/pre-commit,chriskuehl/pre-commit-1,dnephin/pre-commit,Teino1978-Corp/pre-commit,Lucas-C/pre-commit,barrysteyn/pre-commit,Teino1978-Corp/pre-commit,pre-commit/pre-commit,philipgian/pre-commit,pre-commit/pre-commit,beni55/pre-commit,pre-commit/pre-commit,philipgian/pre-commit,Lucas-C/pre-commit,Teino1978-Corp/pre-commit,pre-commit/pre-commit,chriskuehl/pre-commit-1,beni55/pre-commit,pre-commit/pre-commit,chriskuehl/pre-commit-1,Lucas-C/pre-commit,beni55/pre-commit,barrysteyn/pre-commit,pre-commit/pre-commit,chriskuehl/pre-commit,chriskuehl/pre-commit-1,pre-commit/pre-commit,pre-commit/pre-commit,barrysteyn/pre-commit,chriskuehl/pre-commit,pre-commit/pre-commit,philipgian/pre-commit,dnephin/pre-commit,pre-commit/pre-commit
|
+ import jsonschema
+ import jsonschema.exceptions
import pytest
from pre_commit.clientlib.validate_manifest import additional_manifest_check
from pre_commit.clientlib.validate_manifest import InvalidManifestError
+ from pre_commit.clientlib.validate_manifest import MANIFEST_JSON_SCHEMA
from pre_commit.clientlib.validate_manifest import run
def test_returns_0_for_valid_manifest():
assert run(['example_manifest.yaml']) == 0
def test_returns_0_for_our_manifest():
assert run([]) == 0
def test_returns_1_for_failing():
assert run(['tests/data/valid_yaml_but_invalid_manifest.yaml']) == 1
def test_additional_manifest_check_raises_for_bad_language():
with pytest.raises(InvalidManifestError):
additional_manifest_check([{'id': 'foo', 'language': 'not valid'}])
@pytest.mark.parametrize(('obj'), (
[{}],
[{'language': 'python'}],
[{'language': 'python>2.6'}],
))
def test_additional_manifest_check_is_ok_with_missing_language(obj):
additional_manifest_check(obj)
+
+
+ def is_valid_according_to_schema(obj, schema):
+ try:
+ jsonschema.validate(obj, schema)
+ return True
+ except jsonschema.exceptions.ValidationError:
+ return False
+
+
+ @pytest.mark.parametrize(('manifest_obj', 'expected'), (
+ ([], False),
+ ([{'id': 'a', 'name': 'b', 'entry': 'c'}], True),
+ (
+ [{
+ 'id': 'a',
+ 'name': 'b',
+ 'entry': 'c',
+ 'language': 'python',
+ 'expected_return_value': 0,
+ }],
+ True,
+ ),
+ ))
+ def test_is_valid_according_to_schema(manifest_obj, expected):
+ ret = is_valid_according_to_schema(manifest_obj, MANIFEST_JSON_SCHEMA)
+ assert ret is expected
|
Add better tests for manifest json schema
|
## Code Before:
import pytest
from pre_commit.clientlib.validate_manifest import additional_manifest_check
from pre_commit.clientlib.validate_manifest import InvalidManifestError
from pre_commit.clientlib.validate_manifest import run
def test_returns_0_for_valid_manifest():
assert run(['example_manifest.yaml']) == 0
def test_returns_0_for_our_manifest():
assert run([]) == 0
def test_returns_1_for_failing():
assert run(['tests/data/valid_yaml_but_invalid_manifest.yaml']) == 1
def test_additional_manifest_check_raises_for_bad_language():
with pytest.raises(InvalidManifestError):
additional_manifest_check([{'id': 'foo', 'language': 'not valid'}])
@pytest.mark.parametrize(('obj'), (
[{}],
[{'language': 'python'}],
[{'language': 'python>2.6'}],
))
def test_additional_manifest_check_is_ok_with_missing_language(obj):
additional_manifest_check(obj)
## Instruction:
Add better tests for manifest json schema
## Code After:
import jsonschema
import jsonschema.exceptions
import pytest
from pre_commit.clientlib.validate_manifest import additional_manifest_check
from pre_commit.clientlib.validate_manifest import InvalidManifestError
from pre_commit.clientlib.validate_manifest import MANIFEST_JSON_SCHEMA
from pre_commit.clientlib.validate_manifest import run
def test_returns_0_for_valid_manifest():
assert run(['example_manifest.yaml']) == 0
def test_returns_0_for_our_manifest():
assert run([]) == 0
def test_returns_1_for_failing():
assert run(['tests/data/valid_yaml_but_invalid_manifest.yaml']) == 1
def test_additional_manifest_check_raises_for_bad_language():
with pytest.raises(InvalidManifestError):
additional_manifest_check([{'id': 'foo', 'language': 'not valid'}])
@pytest.mark.parametrize(('obj'), (
[{}],
[{'language': 'python'}],
[{'language': 'python>2.6'}],
))
def test_additional_manifest_check_is_ok_with_missing_language(obj):
additional_manifest_check(obj)
def is_valid_according_to_schema(obj, schema):
try:
jsonschema.validate(obj, schema)
return True
except jsonschema.exceptions.ValidationError:
return False
@pytest.mark.parametrize(('manifest_obj', 'expected'), (
([], False),
([{'id': 'a', 'name': 'b', 'entry': 'c'}], True),
(
[{
'id': 'a',
'name': 'b',
'entry': 'c',
'language': 'python',
'expected_return_value': 0,
}],
True,
),
))
def test_is_valid_according_to_schema(manifest_obj, expected):
ret = is_valid_according_to_schema(manifest_obj, MANIFEST_JSON_SCHEMA)
assert ret is expected
|
+ import jsonschema
+ import jsonschema.exceptions
import pytest
from pre_commit.clientlib.validate_manifest import additional_manifest_check
from pre_commit.clientlib.validate_manifest import InvalidManifestError
+ from pre_commit.clientlib.validate_manifest import MANIFEST_JSON_SCHEMA
from pre_commit.clientlib.validate_manifest import run
def test_returns_0_for_valid_manifest():
assert run(['example_manifest.yaml']) == 0
def test_returns_0_for_our_manifest():
assert run([]) == 0
def test_returns_1_for_failing():
assert run(['tests/data/valid_yaml_but_invalid_manifest.yaml']) == 1
def test_additional_manifest_check_raises_for_bad_language():
with pytest.raises(InvalidManifestError):
additional_manifest_check([{'id': 'foo', 'language': 'not valid'}])
@pytest.mark.parametrize(('obj'), (
[{}],
[{'language': 'python'}],
[{'language': 'python>2.6'}],
))
def test_additional_manifest_check_is_ok_with_missing_language(obj):
additional_manifest_check(obj)
+
+
+ def is_valid_according_to_schema(obj, schema):
+ try:
+ jsonschema.validate(obj, schema)
+ return True
+ except jsonschema.exceptions.ValidationError:
+ return False
+
+
+ @pytest.mark.parametrize(('manifest_obj', 'expected'), (
+ ([], False),
+ ([{'id': 'a', 'name': 'b', 'entry': 'c'}], True),
+ (
+ [{
+ 'id': 'a',
+ 'name': 'b',
+ 'entry': 'c',
+ 'language': 'python',
+ 'expected_return_value': 0,
+ }],
+ True,
+ ),
+ ))
+ def test_is_valid_according_to_schema(manifest_obj, expected):
+ ret = is_valid_according_to_schema(manifest_obj, MANIFEST_JSON_SCHEMA)
+ assert ret is expected
|
fe05b5f694671a46dd3391b9cb6561923345c4b7
|
rpi_gpio_http/app.py
|
rpi_gpio_http/app.py
|
from flask import Flask
import logging
import logging.config
import RPi.GPIO as GPIO
from .config import config, config_loader
from .channel import ChannelFactory
app = Flask('rpi_gpio_http')
logging.config.dictConfig(config['logger'])
logger = logging.getLogger(__name__)
logger.info("Config loaded from %s" % config_loader.filename)
channels = {}
GPIO.setmode(GPIO.BOARD)
for ch in config['channels']:
if ch['enabled'] != True:
continue
channel = ChannelFactory.create(ch)
if channel:
channels[channel.pin] = channel
import controllers
|
from flask import Flask
import logging
import logging.config
import RPi.GPIO as GPIO
from .config import config, config_loader
from .channel import ChannelFactory
app = Flask('rpi_gpio_http')
logging.config.dictConfig(config['logger'])
logger = logging.getLogger(__name__)
logger.info("Config loaded from %s" % config_loader.filename)
channels = {}
GPIO.setwarnings(False)
GPIO.setmode(GPIO.BOARD)
for ch in config['channels']:
if ch['enabled'] != True:
continue
channel = ChannelFactory.create(ch)
if channel:
channels[channel.pin] = channel
import controllers
|
Disable warnings in GPIO lib
|
Disable warnings in GPIO lib
|
Python
|
mit
|
voidpp/rpi-gpio-http
|
from flask import Flask
import logging
import logging.config
import RPi.GPIO as GPIO
from .config import config, config_loader
from .channel import ChannelFactory
app = Flask('rpi_gpio_http')
logging.config.dictConfig(config['logger'])
logger = logging.getLogger(__name__)
logger.info("Config loaded from %s" % config_loader.filename)
channels = {}
+ GPIO.setwarnings(False)
GPIO.setmode(GPIO.BOARD)
for ch in config['channels']:
if ch['enabled'] != True:
continue
channel = ChannelFactory.create(ch)
if channel:
channels[channel.pin] = channel
import controllers
|
Disable warnings in GPIO lib
|
## Code Before:
from flask import Flask
import logging
import logging.config
import RPi.GPIO as GPIO
from .config import config, config_loader
from .channel import ChannelFactory
app = Flask('rpi_gpio_http')
logging.config.dictConfig(config['logger'])
logger = logging.getLogger(__name__)
logger.info("Config loaded from %s" % config_loader.filename)
channels = {}
GPIO.setmode(GPIO.BOARD)
for ch in config['channels']:
if ch['enabled'] != True:
continue
channel = ChannelFactory.create(ch)
if channel:
channels[channel.pin] = channel
import controllers
## Instruction:
Disable warnings in GPIO lib
## Code After:
from flask import Flask
import logging
import logging.config
import RPi.GPIO as GPIO
from .config import config, config_loader
from .channel import ChannelFactory
app = Flask('rpi_gpio_http')
logging.config.dictConfig(config['logger'])
logger = logging.getLogger(__name__)
logger.info("Config loaded from %s" % config_loader.filename)
channels = {}
GPIO.setwarnings(False)
GPIO.setmode(GPIO.BOARD)
for ch in config['channels']:
if ch['enabled'] != True:
continue
channel = ChannelFactory.create(ch)
if channel:
channels[channel.pin] = channel
import controllers
|
from flask import Flask
import logging
import logging.config
import RPi.GPIO as GPIO
from .config import config, config_loader
from .channel import ChannelFactory
app = Flask('rpi_gpio_http')
logging.config.dictConfig(config['logger'])
logger = logging.getLogger(__name__)
logger.info("Config loaded from %s" % config_loader.filename)
channels = {}
+ GPIO.setwarnings(False)
GPIO.setmode(GPIO.BOARD)
for ch in config['channels']:
if ch['enabled'] != True:
continue
channel = ChannelFactory.create(ch)
if channel:
channels[channel.pin] = channel
import controllers
|
c711d5e2dbca4b95bebc0eed4d48a35eb3c7a998
|
website/addons/dropbox/settings/local-dist.py
|
website/addons/dropbox/settings/local-dist.py
|
# Get an app key and secret at https://www.dropbox.com/developers/apps
DROPBOX_KEY = 'changeme'
DROPBOX_SECRET = 'changeme'
|
# Get an app key and secret at https://www.dropbox.com/developers/apps
DROPBOX_KEY = 'jnpncg5s2fc7cj8'
DROPBOX_SECRET = 'sjqv1hrk7sonhu1'
|
Add dropbox credentials for testing.
|
Add dropbox credentials for testing.
|
Python
|
apache-2.0
|
crcresearch/osf.io,acshi/osf.io,felliott/osf.io,TomHeatwole/osf.io,RomanZWang/osf.io,jnayak1/osf.io,baylee-d/osf.io,TomBaxter/osf.io,mluke93/osf.io,mluo613/osf.io,pattisdr/osf.io,samchrisinger/osf.io,wearpants/osf.io,mfraezz/osf.io,kch8qx/osf.io,Nesiehr/osf.io,adlius/osf.io,RomanZWang/osf.io,abought/osf.io,felliott/osf.io,jnayak1/osf.io,caseyrollins/osf.io,doublebits/osf.io,cslzchen/osf.io,kwierman/osf.io,monikagrabowska/osf.io,rdhyee/osf.io,laurenrevere/osf.io,Nesiehr/osf.io,HalcyonChimera/osf.io,icereval/osf.io,zamattiac/osf.io,CenterForOpenScience/osf.io,jnayak1/osf.io,cwisecarver/osf.io,SSJohns/osf.io,icereval/osf.io,monikagrabowska/osf.io,wearpants/osf.io,chrisseto/osf.io,binoculars/osf.io,monikagrabowska/osf.io,mluo613/osf.io,adlius/osf.io,aaxelb/osf.io,monikagrabowska/osf.io,asanfilippo7/osf.io,icereval/osf.io,brianjgeiger/osf.io,amyshi188/osf.io,cwisecarver/osf.io,DanielSBrown/osf.io,crcresearch/osf.io,kch8qx/osf.io,SSJohns/osf.io,abought/osf.io,crcresearch/osf.io,laurenrevere/osf.io,mluo613/osf.io,baylee-d/osf.io,alexschiller/osf.io,zachjanicki/osf.io,aaxelb/osf.io,rdhyee/osf.io,doublebits/osf.io,amyshi188/osf.io,Nesiehr/osf.io,sloria/osf.io,hmoco/osf.io,HalcyonChimera/osf.io,aaxelb/osf.io,RomanZWang/osf.io,TomHeatwole/osf.io,kch8qx/osf.io,chrisseto/osf.io,TomBaxter/osf.io,aaxelb/osf.io,DanielSBrown/osf.io,mattclark/osf.io,emetsger/osf.io,emetsger/osf.io,binoculars/osf.io,zachjanicki/osf.io,kwierman/osf.io,kwierman/osf.io,kwierman/osf.io,sloria/osf.io,mfraezz/osf.io,kch8qx/osf.io,acshi/osf.io,chennan47/osf.io,caneruguz/osf.io,doublebits/osf.io,mluke93/osf.io,erinspace/osf.io,alexschiller/osf.io,mluo613/osf.io,zamattiac/osf.io,alexschiller/osf.io,caseyrollins/osf.io,zachjanicki/osf.io,cwisecarver/osf.io,samchrisinger/osf.io,TomBaxter/osf.io,wearpants/osf.io,amyshi188/osf.io,Johnetordoff/osf.io,HalcyonChimera/osf.io,laurenrevere/osf.io,cslzchen/osf.io,SSJohns/osf.io,monikagrabowska/osf.io,Johnetordoff/osf.io,asanfilippo7/osf.io,zamattiac/osf.io,CenterForOpenScience/osf.io,abought/osf.io,jnayak1/osf.io,Johnetordoff/osf.io,mfraezz/osf.io,samchrisinger/osf.io,DanielSBrown/osf.io,rdhyee/osf.io,leb2dg/osf.io,acshi/osf.io,mattclark/osf.io,chrisseto/osf.io,leb2dg/osf.io,brianjgeiger/osf.io,zachjanicki/osf.io,mluke93/osf.io,binoculars/osf.io,asanfilippo7/osf.io,felliott/osf.io,DanielSBrown/osf.io,TomHeatwole/osf.io,hmoco/osf.io,kch8qx/osf.io,caneruguz/osf.io,saradbowman/osf.io,felliott/osf.io,adlius/osf.io,doublebits/osf.io,caneruguz/osf.io,samchrisinger/osf.io,HalcyonChimera/osf.io,RomanZWang/osf.io,emetsger/osf.io,mluo613/osf.io,hmoco/osf.io,hmoco/osf.io,RomanZWang/osf.io,emetsger/osf.io,rdhyee/osf.io,mluke93/osf.io,acshi/osf.io,leb2dg/osf.io,zamattiac/osf.io,saradbowman/osf.io,leb2dg/osf.io,pattisdr/osf.io,chennan47/osf.io,acshi/osf.io,cslzchen/osf.io,alexschiller/osf.io,SSJohns/osf.io,chennan47/osf.io,erinspace/osf.io,mattclark/osf.io,CenterForOpenScience/osf.io,chrisseto/osf.io,brianjgeiger/osf.io,caseyrollins/osf.io,brianjgeiger/osf.io,alexschiller/osf.io,Nesiehr/osf.io,amyshi188/osf.io,baylee-d/osf.io,CenterForOpenScience/osf.io,erinspace/osf.io,abought/osf.io,wearpants/osf.io,asanfilippo7/osf.io,cslzchen/osf.io,adlius/osf.io,TomHeatwole/osf.io,pattisdr/osf.io,cwisecarver/osf.io,mfraezz/osf.io,caneruguz/osf.io,Johnetordoff/osf.io,doublebits/osf.io,sloria/osf.io
|
# Get an app key and secret at https://www.dropbox.com/developers/apps
+ DROPBOX_KEY = 'jnpncg5s2fc7cj8'
+ DROPBOX_SECRET = 'sjqv1hrk7sonhu1'
- DROPBOX_KEY = 'changeme'
- DROPBOX_SECRET = 'changeme'
-
|
Add dropbox credentials for testing.
|
## Code Before:
# Get an app key and secret at https://www.dropbox.com/developers/apps
DROPBOX_KEY = 'changeme'
DROPBOX_SECRET = 'changeme'
## Instruction:
Add dropbox credentials for testing.
## Code After:
# Get an app key and secret at https://www.dropbox.com/developers/apps
DROPBOX_KEY = 'jnpncg5s2fc7cj8'
DROPBOX_SECRET = 'sjqv1hrk7sonhu1'
|
# Get an app key and secret at https://www.dropbox.com/developers/apps
- DROPBOX_KEY = 'changeme'
- DROPBOX_SECRET = 'changeme'
+ DROPBOX_KEY = 'jnpncg5s2fc7cj8'
+ DROPBOX_SECRET = 'sjqv1hrk7sonhu1'
|
54674b79fecf7eec4f09e885e7d68c9b6181efcf
|
mollie/api/objects/base.py
|
mollie/api/objects/base.py
|
class Base(dict):
def _get_property(self, name):
"""Return the named property from dictionary values."""
if not self[name]:
return None
return self[name]
|
class Base(dict):
def _get_property(self, name):
"""Return the named property from dictionary values."""
if name not in self:
return None
return self[name]
|
Make _get_property to return none when name does not exist
|
Make _get_property to return none when name does not exist
|
Python
|
bsd-2-clause
|
mollie/mollie-api-python
|
class Base(dict):
def _get_property(self, name):
"""Return the named property from dictionary values."""
- if not self[name]:
+ if name not in self:
return None
return self[name]
|
Make _get_property to return none when name does not exist
|
## Code Before:
class Base(dict):
def _get_property(self, name):
"""Return the named property from dictionary values."""
if not self[name]:
return None
return self[name]
## Instruction:
Make _get_property to return none when name does not exist
## Code After:
class Base(dict):
def _get_property(self, name):
"""Return the named property from dictionary values."""
if name not in self:
return None
return self[name]
|
class Base(dict):
def _get_property(self, name):
"""Return the named property from dictionary values."""
- if not self[name]:
+ if name not in self:
return None
return self[name]
|
3875b1ec7d056d337cc1c02d9567cd7ff1ae9748
|
utils/sub8_ros_tools/sub8_ros_tools/init_helpers.py
|
utils/sub8_ros_tools/sub8_ros_tools/init_helpers.py
|
import rospy
from time import time
def wait_for_param(param_name, timeout=None, poll_rate=0.1):
'''Blocking wait for a parameter named $parameter_name to exist
Poll at frequency $poll_rate
Once the parameter exists, return get and return it.
This function intentionally leaves failure logging duties to the developer
'''
start_time = time()
rate = rospy.Rate(poll_rate)
while not rospy.is_shutdown():
# Check if the parameter now exists
if rospy.has_param(param_name):
return rospy.get_param(param_name)
# If we exceed a defined timeout, return None
if timeout is not None:
if time() - start_time > timeout:
return None
# Continue to poll at poll_rate
rate.sleep()
|
import rospy
import rostest
import time
def wait_for_param(param_name, timeout=None, poll_rate=0.1):
'''Blocking wait for a parameter named $parameter_name to exist
Poll at frequency $poll_rate
Once the parameter exists, return get and return it.
This function intentionally leaves failure logging duties to the developer
'''
start_time = time.time()
rate = rospy.Rate(poll_rate)
while not rospy.is_shutdown():
# Check if the parameter now exists
if rospy.has_param(param_name):
return rospy.get_param(param_name)
# If we exceed a defined timeout, return None
if timeout is not None:
if time.time() - start_time > timeout:
return None
# Continue to poll at poll_rate
rate.sleep()
def wait_for_subscriber(node_name, topic, timeout=5.0):
'''Blocks until $node_name subscribes to $topic
Useful mostly in integration tests --
I would counsel against use elsewhere
'''
end_time = time.time() + timeout
resolved_topic = rospy.resolve_name(topic)
resolved_node = rospy.resolve_name(node_name)
# Wait for time-out or ros-shutdown
while (time.time() < end_time) and (not rospy.is_shutdown()):
subscribed = rostest.is_subscriber(
rospy.resolve_name(topic),
rospy.resolve_name(node_name)
)
# Success scenario: node subscribes
if subscribed:
break
time.sleep(0.1)
# Could do this with a while/else
# But chose to explicitly check
success = rostest.is_subscriber(
rospy.resolve_name(topic),
rospy.resolve_name(node_name)
)
return success
|
Add init-helper 'wait for subscriber'
|
UTILS: Add init-helper 'wait for subscriber'
For integration-testing purposes it is often useful to wait until a
particular node subscribes to you
|
Python
|
mit
|
pemami4911/Sub8,pemami4911/Sub8,pemami4911/Sub8
|
import rospy
- from time import time
+ import rostest
+ import time
def wait_for_param(param_name, timeout=None, poll_rate=0.1):
'''Blocking wait for a parameter named $parameter_name to exist
Poll at frequency $poll_rate
Once the parameter exists, return get and return it.
This function intentionally leaves failure logging duties to the developer
'''
- start_time = time()
+ start_time = time.time()
rate = rospy.Rate(poll_rate)
while not rospy.is_shutdown():
# Check if the parameter now exists
if rospy.has_param(param_name):
return rospy.get_param(param_name)
# If we exceed a defined timeout, return None
if timeout is not None:
- if time() - start_time > timeout:
+ if time.time() - start_time > timeout:
return None
# Continue to poll at poll_rate
rate.sleep()
+
+
+ def wait_for_subscriber(node_name, topic, timeout=5.0):
+ '''Blocks until $node_name subscribes to $topic
+ Useful mostly in integration tests --
+ I would counsel against use elsewhere
+ '''
+ end_time = time.time() + timeout
+
+ resolved_topic = rospy.resolve_name(topic)
+ resolved_node = rospy.resolve_name(node_name)
+
+ # Wait for time-out or ros-shutdown
+ while (time.time() < end_time) and (not rospy.is_shutdown()):
+ subscribed = rostest.is_subscriber(
+ rospy.resolve_name(topic),
+ rospy.resolve_name(node_name)
+ )
+ # Success scenario: node subscribes
+ if subscribed:
+ break
+ time.sleep(0.1)
+
+ # Could do this with a while/else
+ # But chose to explicitly check
+ success = rostest.is_subscriber(
+ rospy.resolve_name(topic),
+ rospy.resolve_name(node_name)
+ )
+ return success
|
Add init-helper 'wait for subscriber'
|
## Code Before:
import rospy
from time import time
def wait_for_param(param_name, timeout=None, poll_rate=0.1):
'''Blocking wait for a parameter named $parameter_name to exist
Poll at frequency $poll_rate
Once the parameter exists, return get and return it.
This function intentionally leaves failure logging duties to the developer
'''
start_time = time()
rate = rospy.Rate(poll_rate)
while not rospy.is_shutdown():
# Check if the parameter now exists
if rospy.has_param(param_name):
return rospy.get_param(param_name)
# If we exceed a defined timeout, return None
if timeout is not None:
if time() - start_time > timeout:
return None
# Continue to poll at poll_rate
rate.sleep()
## Instruction:
Add init-helper 'wait for subscriber'
## Code After:
import rospy
import rostest
import time
def wait_for_param(param_name, timeout=None, poll_rate=0.1):
'''Blocking wait for a parameter named $parameter_name to exist
Poll at frequency $poll_rate
Once the parameter exists, return get and return it.
This function intentionally leaves failure logging duties to the developer
'''
start_time = time.time()
rate = rospy.Rate(poll_rate)
while not rospy.is_shutdown():
# Check if the parameter now exists
if rospy.has_param(param_name):
return rospy.get_param(param_name)
# If we exceed a defined timeout, return None
if timeout is not None:
if time.time() - start_time > timeout:
return None
# Continue to poll at poll_rate
rate.sleep()
def wait_for_subscriber(node_name, topic, timeout=5.0):
'''Blocks until $node_name subscribes to $topic
Useful mostly in integration tests --
I would counsel against use elsewhere
'''
end_time = time.time() + timeout
resolved_topic = rospy.resolve_name(topic)
resolved_node = rospy.resolve_name(node_name)
# Wait for time-out or ros-shutdown
while (time.time() < end_time) and (not rospy.is_shutdown()):
subscribed = rostest.is_subscriber(
rospy.resolve_name(topic),
rospy.resolve_name(node_name)
)
# Success scenario: node subscribes
if subscribed:
break
time.sleep(0.1)
# Could do this with a while/else
# But chose to explicitly check
success = rostest.is_subscriber(
rospy.resolve_name(topic),
rospy.resolve_name(node_name)
)
return success
|
import rospy
- from time import time
+ import rostest
+ import time
def wait_for_param(param_name, timeout=None, poll_rate=0.1):
'''Blocking wait for a parameter named $parameter_name to exist
Poll at frequency $poll_rate
Once the parameter exists, return get and return it.
This function intentionally leaves failure logging duties to the developer
'''
- start_time = time()
+ start_time = time.time()
? +++++
rate = rospy.Rate(poll_rate)
while not rospy.is_shutdown():
# Check if the parameter now exists
if rospy.has_param(param_name):
return rospy.get_param(param_name)
# If we exceed a defined timeout, return None
if timeout is not None:
- if time() - start_time > timeout:
+ if time.time() - start_time > timeout:
? +++++
return None
# Continue to poll at poll_rate
rate.sleep()
+
+
+ def wait_for_subscriber(node_name, topic, timeout=5.0):
+ '''Blocks until $node_name subscribes to $topic
+ Useful mostly in integration tests --
+ I would counsel against use elsewhere
+ '''
+ end_time = time.time() + timeout
+
+ resolved_topic = rospy.resolve_name(topic)
+ resolved_node = rospy.resolve_name(node_name)
+
+ # Wait for time-out or ros-shutdown
+ while (time.time() < end_time) and (not rospy.is_shutdown()):
+ subscribed = rostest.is_subscriber(
+ rospy.resolve_name(topic),
+ rospy.resolve_name(node_name)
+ )
+ # Success scenario: node subscribes
+ if subscribed:
+ break
+ time.sleep(0.1)
+
+ # Could do this with a while/else
+ # But chose to explicitly check
+ success = rostest.is_subscriber(
+ rospy.resolve_name(topic),
+ rospy.resolve_name(node_name)
+ )
+ return success
|
9672bd20203bc4235910080cca6d79c3b8e126b1
|
nupic/research/frameworks/dendrites/modules/__init__.py
|
nupic/research/frameworks/dendrites/modules/__init__.py
|
from .apply_dendrites import *
from .boosted_dendrites import *
from .dendrite_segments import DendriteSegments
from .dendritic_layers import (
AbsoluteMaxGatingDendriticLayer,
AbsoluteMaxGatingDendriticLayer2d,
BiasingDendriticLayer,
GatingDendriticLayer,
GatingDendriticLayer2d,
)
|
from .apply_dendrites import *
from .boosted_dendrites import *
from .dendrite_segments import DendriteSegments
from .dendritic_layers import (
AbsoluteMaxGatingDendriticLayer,
AbsoluteMaxGatingDendriticLayer2d,
BiasingDendriticLayer,
GatingDendriticLayer,
GatingDendriticLayer2d,
DendriticLayerBase,
)
|
Add DendriticLayerBase to init to ease experimentation
|
Add DendriticLayerBase to init to ease experimentation
|
Python
|
agpl-3.0
|
mrcslws/nupic.research,subutai/nupic.research,numenta/nupic.research,subutai/nupic.research,numenta/nupic.research,mrcslws/nupic.research
|
from .apply_dendrites import *
from .boosted_dendrites import *
from .dendrite_segments import DendriteSegments
from .dendritic_layers import (
AbsoluteMaxGatingDendriticLayer,
AbsoluteMaxGatingDendriticLayer2d,
BiasingDendriticLayer,
GatingDendriticLayer,
GatingDendriticLayer2d,
+ DendriticLayerBase,
)
|
Add DendriticLayerBase to init to ease experimentation
|
## Code Before:
from .apply_dendrites import *
from .boosted_dendrites import *
from .dendrite_segments import DendriteSegments
from .dendritic_layers import (
AbsoluteMaxGatingDendriticLayer,
AbsoluteMaxGatingDendriticLayer2d,
BiasingDendriticLayer,
GatingDendriticLayer,
GatingDendriticLayer2d,
)
## Instruction:
Add DendriticLayerBase to init to ease experimentation
## Code After:
from .apply_dendrites import *
from .boosted_dendrites import *
from .dendrite_segments import DendriteSegments
from .dendritic_layers import (
AbsoluteMaxGatingDendriticLayer,
AbsoluteMaxGatingDendriticLayer2d,
BiasingDendriticLayer,
GatingDendriticLayer,
GatingDendriticLayer2d,
DendriticLayerBase,
)
|
from .apply_dendrites import *
from .boosted_dendrites import *
from .dendrite_segments import DendriteSegments
from .dendritic_layers import (
AbsoluteMaxGatingDendriticLayer,
AbsoluteMaxGatingDendriticLayer2d,
BiasingDendriticLayer,
GatingDendriticLayer,
GatingDendriticLayer2d,
+ DendriticLayerBase,
)
|
722228a023aca35660bc493b812727f6c665b3cb
|
posts.py
|
posts.py
|
import json
import pprint
import requests
def sample_valid_reddit_response():
r = requests.get('http://www.reddit.com/r/cscareerquestions/top.json')
response_json = r.json()
if 'data' not in response_json:
print("Trying again")
response_json = sample_valid_reddit_response()
return response_json
def save_sample():
response_json = sample_valid_reddit_response()
del response_json['data']['children']
with open('sample_response.json', 'w+') as f:
json.dump(response_json, f, indent=5)
def get_next_reddit_response():
response = {}
with open('sample_response.json', 'r') as f:
response = json.load(f)
after = response['data']['after']
print(after)
if '__main__' == __name__:
get_next_reddit_response()
|
import json
import pprint
import requests
SAMPLE_REDDIT_URL = 'http://www.reddit.com/r/cscareerquestions/top.json'
def sample_valid_reddit_response():
r = requests.get(SAMPLE_REDDIT_URL)
response_json = r.json()
if 'data' not in response_json:
print("Trying again")
response_json = sample_valid_reddit_response()
return response_json
def save_sample():
response_json = sample_valid_reddit_response()
del response_json['data']['children']
with open('sample_response.json', 'w+') as f:
json.dump(response_json, f, indent=5)
def get_next_reddit_response():
response = {}
with open('sample_response.json', 'r') as f:
response = json.load(f)
after = response['data']['after']
print(after)
if '__main__' == __name__:
get_next_reddit_response()
|
Make reddit url a constant
|
Make reddit url a constant
|
Python
|
mit
|
RossCarriga/repost-data
|
import json
import pprint
import requests
+ SAMPLE_REDDIT_URL = 'http://www.reddit.com/r/cscareerquestions/top.json'
+
def sample_valid_reddit_response():
- r = requests.get('http://www.reddit.com/r/cscareerquestions/top.json')
+ r = requests.get(SAMPLE_REDDIT_URL)
response_json = r.json()
if 'data' not in response_json:
print("Trying again")
response_json = sample_valid_reddit_response()
return response_json
def save_sample():
response_json = sample_valid_reddit_response()
del response_json['data']['children']
with open('sample_response.json', 'w+') as f:
json.dump(response_json, f, indent=5)
def get_next_reddit_response():
response = {}
with open('sample_response.json', 'r') as f:
response = json.load(f)
after = response['data']['after']
print(after)
if '__main__' == __name__:
get_next_reddit_response()
|
Make reddit url a constant
|
## Code Before:
import json
import pprint
import requests
def sample_valid_reddit_response():
r = requests.get('http://www.reddit.com/r/cscareerquestions/top.json')
response_json = r.json()
if 'data' not in response_json:
print("Trying again")
response_json = sample_valid_reddit_response()
return response_json
def save_sample():
response_json = sample_valid_reddit_response()
del response_json['data']['children']
with open('sample_response.json', 'w+') as f:
json.dump(response_json, f, indent=5)
def get_next_reddit_response():
response = {}
with open('sample_response.json', 'r') as f:
response = json.load(f)
after = response['data']['after']
print(after)
if '__main__' == __name__:
get_next_reddit_response()
## Instruction:
Make reddit url a constant
## Code After:
import json
import pprint
import requests
SAMPLE_REDDIT_URL = 'http://www.reddit.com/r/cscareerquestions/top.json'
def sample_valid_reddit_response():
r = requests.get(SAMPLE_REDDIT_URL)
response_json = r.json()
if 'data' not in response_json:
print("Trying again")
response_json = sample_valid_reddit_response()
return response_json
def save_sample():
response_json = sample_valid_reddit_response()
del response_json['data']['children']
with open('sample_response.json', 'w+') as f:
json.dump(response_json, f, indent=5)
def get_next_reddit_response():
response = {}
with open('sample_response.json', 'r') as f:
response = json.load(f)
after = response['data']['after']
print(after)
if '__main__' == __name__:
get_next_reddit_response()
|
import json
import pprint
import requests
+ SAMPLE_REDDIT_URL = 'http://www.reddit.com/r/cscareerquestions/top.json'
+
def sample_valid_reddit_response():
- r = requests.get('http://www.reddit.com/r/cscareerquestions/top.json')
+ r = requests.get(SAMPLE_REDDIT_URL)
response_json = r.json()
if 'data' not in response_json:
print("Trying again")
response_json = sample_valid_reddit_response()
return response_json
def save_sample():
response_json = sample_valid_reddit_response()
del response_json['data']['children']
with open('sample_response.json', 'w+') as f:
json.dump(response_json, f, indent=5)
def get_next_reddit_response():
response = {}
with open('sample_response.json', 'r') as f:
response = json.load(f)
after = response['data']['after']
print(after)
if '__main__' == __name__:
get_next_reddit_response()
|
181ac9d91d826b1c1a71ec14ff8f500cb79261d2
|
Code/Evaluator.py
|
Code/Evaluator.py
|
import subprocess
ENGINE_BIN = "stockfish"
DEPTH = 20
def evaluate_position(board, depth=DEPTH):
"""Evaluates the board's current position.
Returns the Stockfish scalar score, at the given depth, in centipawns.
"""
engine = subprocess.Popen(ENGINE_BIN, bufsize=0, universal_newlines=True,
stdin=subprocess.PIPE, stdout=subprocess.PIPE)
# take care of initial (credits) line
engine.stdout.readline()
# search from current position to given depth
engine.stdin.write("position fen "+board.fen()+"\n")
engine.stdin.write("go depth "+str(DEPTH)+"\n")
last_line = ""
while True:
line = engine.stdout.readline().strip()
if "bestmove" in line:
break
else:
last_line = line
engine.stdin.write("quit\n")
# score in centipawns
score = last_line.split()[9]
return score
|
import subprocess
import re
ENGINE_BIN = "stockfish"
DEPTH = 20
def evaluate_position(board, depth=DEPTH):
"""Evaluates the board's current position.
Returns the Stockfish scalar score, at the given depth, in centipawns.
"""
engine = subprocess.Popen(ENGINE_BIN, bufsize=0, universal_newlines=True,
stdin=subprocess.PIPE, stdout=subprocess.PIPE)
# take care of initial (credits) line
engine.stdout.readline()
# search from current position to given depth
engine.stdin.write("position fen "+board.fen()+"\n")
engine.stdin.write("go depth "+str(DEPTH)+"\n")
while True:
line = engine.stdout.readline().strip()
if line.startswith("info") and (" depth "+str(DEPTH)) in line \
and "score cp" in line and "bound" not in line:
break
engine.stdin.write("quit\n")
# score in centipawns
matcher = re.match(".*score cp ([0-9]+).*", line)
score = int(matcher.group(1))
return score
|
Correct UCI parsing in board state evaluation function
|
Correct UCI parsing in board state evaluation function
|
Python
|
mit
|
Bojanovski/ChessANN
|
import subprocess
+ import re
ENGINE_BIN = "stockfish"
DEPTH = 20
def evaluate_position(board, depth=DEPTH):
"""Evaluates the board's current position.
Returns the Stockfish scalar score, at the given depth, in centipawns.
"""
engine = subprocess.Popen(ENGINE_BIN, bufsize=0, universal_newlines=True,
stdin=subprocess.PIPE, stdout=subprocess.PIPE)
# take care of initial (credits) line
engine.stdout.readline()
# search from current position to given depth
engine.stdin.write("position fen "+board.fen()+"\n")
engine.stdin.write("go depth "+str(DEPTH)+"\n")
- last_line = ""
while True:
line = engine.stdout.readline().strip()
- if "bestmove" in line:
+ if line.startswith("info") and (" depth "+str(DEPTH)) in line \
+ and "score cp" in line and "bound" not in line:
break
- else:
- last_line = line
engine.stdin.write("quit\n")
# score in centipawns
- score = last_line.split()[9]
-
+ matcher = re.match(".*score cp ([0-9]+).*", line)
+ score = int(matcher.group(1))
return score
|
Correct UCI parsing in board state evaluation function
|
## Code Before:
import subprocess
ENGINE_BIN = "stockfish"
DEPTH = 20
def evaluate_position(board, depth=DEPTH):
"""Evaluates the board's current position.
Returns the Stockfish scalar score, at the given depth, in centipawns.
"""
engine = subprocess.Popen(ENGINE_BIN, bufsize=0, universal_newlines=True,
stdin=subprocess.PIPE, stdout=subprocess.PIPE)
# take care of initial (credits) line
engine.stdout.readline()
# search from current position to given depth
engine.stdin.write("position fen "+board.fen()+"\n")
engine.stdin.write("go depth "+str(DEPTH)+"\n")
last_line = ""
while True:
line = engine.stdout.readline().strip()
if "bestmove" in line:
break
else:
last_line = line
engine.stdin.write("quit\n")
# score in centipawns
score = last_line.split()[9]
return score
## Instruction:
Correct UCI parsing in board state evaluation function
## Code After:
import subprocess
import re
ENGINE_BIN = "stockfish"
DEPTH = 20
def evaluate_position(board, depth=DEPTH):
"""Evaluates the board's current position.
Returns the Stockfish scalar score, at the given depth, in centipawns.
"""
engine = subprocess.Popen(ENGINE_BIN, bufsize=0, universal_newlines=True,
stdin=subprocess.PIPE, stdout=subprocess.PIPE)
# take care of initial (credits) line
engine.stdout.readline()
# search from current position to given depth
engine.stdin.write("position fen "+board.fen()+"\n")
engine.stdin.write("go depth "+str(DEPTH)+"\n")
while True:
line = engine.stdout.readline().strip()
if line.startswith("info") and (" depth "+str(DEPTH)) in line \
and "score cp" in line and "bound" not in line:
break
engine.stdin.write("quit\n")
# score in centipawns
matcher = re.match(".*score cp ([0-9]+).*", line)
score = int(matcher.group(1))
return score
|
import subprocess
+ import re
ENGINE_BIN = "stockfish"
DEPTH = 20
def evaluate_position(board, depth=DEPTH):
"""Evaluates the board's current position.
Returns the Stockfish scalar score, at the given depth, in centipawns.
"""
engine = subprocess.Popen(ENGINE_BIN, bufsize=0, universal_newlines=True,
stdin=subprocess.PIPE, stdout=subprocess.PIPE)
# take care of initial (credits) line
engine.stdout.readline()
# search from current position to given depth
engine.stdin.write("position fen "+board.fen()+"\n")
engine.stdin.write("go depth "+str(DEPTH)+"\n")
- last_line = ""
while True:
line = engine.stdout.readline().strip()
- if "bestmove" in line:
+ if line.startswith("info") and (" depth "+str(DEPTH)) in line \
+ and "score cp" in line and "bound" not in line:
break
- else:
- last_line = line
engine.stdin.write("quit\n")
# score in centipawns
- score = last_line.split()[9]
-
+ matcher = re.match(".*score cp ([0-9]+).*", line)
+ score = int(matcher.group(1))
return score
|
3ca7c667cbf37499dc959b336b9ff0e88f5d4275
|
dbarray/tests/run.py
|
dbarray/tests/run.py
|
"""From http://stackoverflow.com/a/12260597/400691"""
import sys
from django.conf import settings
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'dbarray',
'HOST': 'localhost'
}
},
INSTALLED_APPS=(
'dbarray.tests',
# 'django.contrib.auth',
# 'django.contrib.contenttypes',
# 'django.contrib.sessions',
# 'django.contrib.admin',
),
)
try:
from django.test.runner import DiscoverRunner
except ImportError:
# Fallback for django < 1.6
from discover_runner import DiscoverRunner
test_runner = DiscoverRunner(verbosity=1)
failures = test_runner.run_tests(['dbarray'])
if failures:
sys.exit(1)
|
"""From http://stackoverflow.com/a/12260597/400691"""
import sys
from django.conf import settings
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'dbarray',
'HOST': 'localhost'
}
},
INSTALLED_APPS=('dbarray.tests',),
)
try:
from django.test.runner import DiscoverRunner
except ImportError:
# Fallback for django < 1.6
from discover_runner import DiscoverRunner
test_runner = DiscoverRunner(verbosity=1)
failures = test_runner.run_tests(['dbarray'])
if failures:
sys.exit(1)
|
Remove commented code .
|
Remove commented code [ci skip].
|
Python
|
bsd-3-clause
|
ecometrica/django-dbarray
|
"""From http://stackoverflow.com/a/12260597/400691"""
import sys
from django.conf import settings
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'dbarray',
'HOST': 'localhost'
}
},
+ INSTALLED_APPS=('dbarray.tests',),
- INSTALLED_APPS=(
- 'dbarray.tests',
- # 'django.contrib.auth',
- # 'django.contrib.contenttypes',
- # 'django.contrib.sessions',
- # 'django.contrib.admin',
- ),
)
+
try:
from django.test.runner import DiscoverRunner
except ImportError:
# Fallback for django < 1.6
from discover_runner import DiscoverRunner
test_runner = DiscoverRunner(verbosity=1)
failures = test_runner.run_tests(['dbarray'])
if failures:
sys.exit(1)
|
Remove commented code .
|
## Code Before:
"""From http://stackoverflow.com/a/12260597/400691"""
import sys
from django.conf import settings
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'dbarray',
'HOST': 'localhost'
}
},
INSTALLED_APPS=(
'dbarray.tests',
# 'django.contrib.auth',
# 'django.contrib.contenttypes',
# 'django.contrib.sessions',
# 'django.contrib.admin',
),
)
try:
from django.test.runner import DiscoverRunner
except ImportError:
# Fallback for django < 1.6
from discover_runner import DiscoverRunner
test_runner = DiscoverRunner(verbosity=1)
failures = test_runner.run_tests(['dbarray'])
if failures:
sys.exit(1)
## Instruction:
Remove commented code .
## Code After:
"""From http://stackoverflow.com/a/12260597/400691"""
import sys
from django.conf import settings
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'dbarray',
'HOST': 'localhost'
}
},
INSTALLED_APPS=('dbarray.tests',),
)
try:
from django.test.runner import DiscoverRunner
except ImportError:
# Fallback for django < 1.6
from discover_runner import DiscoverRunner
test_runner = DiscoverRunner(verbosity=1)
failures = test_runner.run_tests(['dbarray'])
if failures:
sys.exit(1)
|
"""From http://stackoverflow.com/a/12260597/400691"""
import sys
from django.conf import settings
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'dbarray',
'HOST': 'localhost'
}
},
+ INSTALLED_APPS=('dbarray.tests',),
- INSTALLED_APPS=(
- 'dbarray.tests',
- # 'django.contrib.auth',
- # 'django.contrib.contenttypes',
- # 'django.contrib.sessions',
- # 'django.contrib.admin',
- ),
)
+
try:
from django.test.runner import DiscoverRunner
except ImportError:
# Fallback for django < 1.6
from discover_runner import DiscoverRunner
test_runner = DiscoverRunner(verbosity=1)
failures = test_runner.run_tests(['dbarray'])
if failures:
sys.exit(1)
|
64db9a503322ce1ee61c64afbdc38f367c3d6627
|
guardian/testapp/tests/management_test.py
|
guardian/testapp/tests/management_test.py
|
from __future__ import absolute_import
from __future__ import unicode_literals
from django.test import SimpleTestCase
from guardian.compat import get_user_model
from guardian.compat import mock
from guardian.management import create_anonymous_user
mocked_get_init_anon = mock.Mock()
class TestGetAnonymousUser(SimpleTestCase):
@mock.patch('guardian.management.guardian_settings')
def test_uses_custom_function(self, guardian_settings):
path = 'guardian.testapp.tests.management_test.mocked_get_init_anon'
guardian_settings.GET_INIT_ANONYMOUS_USER = path
guardian_settings.ANONYMOUS_USER_ID = 219
User = get_user_model()
anon = mocked_get_init_anon.return_value = mock.Mock()
create_anonymous_user('sender')
mocked_get_init_anon.assert_called_once_with(User)
self.assertEqual(anon.pk, 219)
anon.save.assert_called_once_with()
|
from __future__ import absolute_import
from __future__ import unicode_literals
from guardian.compat import get_user_model
from guardian.compat import mock
from guardian.compat import unittest
from guardian.management import create_anonymous_user
import django
mocked_get_init_anon = mock.Mock()
class TestGetAnonymousUser(unittest.TestCase):
@unittest.skipUnless(django.VERSION >= (1, 5), "Django >= 1.5 only")
@mock.patch('guardian.management.guardian_settings')
def test_uses_custom_function(self, guardian_settings):
path = 'guardian.testapp.tests.management_test.mocked_get_init_anon'
guardian_settings.GET_INIT_ANONYMOUS_USER = path
guardian_settings.ANONYMOUS_USER_ID = 219
User = get_user_model()
anon = mocked_get_init_anon.return_value = mock.Mock()
create_anonymous_user('sender')
mocked_get_init_anon.assert_called_once_with(User)
self.assertEqual(anon.pk, 219)
anon.save.assert_called_once_with()
|
Use unit test.TestCase instead of SimpleTestCase
|
Use unit test.TestCase instead of SimpleTestCase
|
Python
|
bsd-2-clause
|
calvinpy/django-guardian,vovanbo/django-guardian,loop0/django-guardian,frwickst/django-guardian,thedrow/django-guardian,emperorcezar/django-guardian,alexshin/django-guardian,calvinpy/django-guardian,flisky/django-guardian,vitan/django-guardian,sustainingtechnologies/django-guardian,rfleschenberg/django-guardian,frwickst/django-guardian,alexshin/django-guardian,RDXT/django-guardian,lukaszb/django-guardian,benkonrath/django-guardian,brianmay/django-guardian,flisky/django-guardian,rmgorman/django-guardian,vitan/django-guardian,rmgorman/django-guardian,onaio/django-guardian,thedrow/django-guardian,alexshin/django-guardian,kostko/django-guardian,EnHatch/django-guardian,TailorDev/django-guardian,calvinpy/django-guardian,rfleschenberg/django-guardian,EnHatch/django-guardian,giocalitri/django-guardian,hunter007/django-guardian,sustainingtechnologies/django-guardian,brianmay/django-guardian,sindrig/django-guardian,RDXT/django-guardian,denilsonsa/django-guardian,sindrig/django-guardian,patgmiller/django-guardian,loop0/django-guardian,EnHatch/django-guardian,benkonrath/django-guardian,vovanbo/django-guardian,kostko/django-guardian,thedrow/django-guardian,EnHatch/django-guardian,benkonrath/django-guardian,loop0/django-guardian,vovanbo/django-guardian,giocalitri/django-guardian,onaio/django-guardian,TailorDev/django-guardian,patgmiller/django-guardian,patgmiller/django-guardian,vitan/django-guardian,sustainingtechnologies/django-guardian,haxo/django-guardian,frwickst/django-guardian,haxo/django-guardian,flisky/django-guardian,haxo/django-guardian,infoxchange/django-guardian,emperorcezar/django-guardian,denilsonsa/django-guardian,lukaszb/django-guardian,brianmay/django-guardian,thedrow/django-guardian,flisky/django-guardian,TailorDev/django-guardian,infoxchange/django-guardian,sustainingtechnologies/django-guardian,lukaszb/django-guardian,hunter007/django-guardian,onaio/django-guardian,emperorcezar/django-guardian,giocalitri/django-guardian,denilsonsa/django-guardian,infoxchange/django-guardian,emperorcezar/django-guardian,rmgorman/django-guardian,sindrig/django-guardian,kostko/django-guardian,rfleschenberg/django-guardian,loop0/django-guardian,kostko/django-guardian,RDXT/django-guardian,vitan/django-guardian,hunter007/django-guardian,denilsonsa/django-guardian,haxo/django-guardian
|
from __future__ import absolute_import
from __future__ import unicode_literals
- from django.test import SimpleTestCase
from guardian.compat import get_user_model
from guardian.compat import mock
+ from guardian.compat import unittest
from guardian.management import create_anonymous_user
+ import django
mocked_get_init_anon = mock.Mock()
- class TestGetAnonymousUser(SimpleTestCase):
+ class TestGetAnonymousUser(unittest.TestCase):
+ @unittest.skipUnless(django.VERSION >= (1, 5), "Django >= 1.5 only")
@mock.patch('guardian.management.guardian_settings')
def test_uses_custom_function(self, guardian_settings):
path = 'guardian.testapp.tests.management_test.mocked_get_init_anon'
guardian_settings.GET_INIT_ANONYMOUS_USER = path
guardian_settings.ANONYMOUS_USER_ID = 219
User = get_user_model()
anon = mocked_get_init_anon.return_value = mock.Mock()
create_anonymous_user('sender')
mocked_get_init_anon.assert_called_once_with(User)
self.assertEqual(anon.pk, 219)
anon.save.assert_called_once_with()
|
Use unit test.TestCase instead of SimpleTestCase
|
## Code Before:
from __future__ import absolute_import
from __future__ import unicode_literals
from django.test import SimpleTestCase
from guardian.compat import get_user_model
from guardian.compat import mock
from guardian.management import create_anonymous_user
mocked_get_init_anon = mock.Mock()
class TestGetAnonymousUser(SimpleTestCase):
@mock.patch('guardian.management.guardian_settings')
def test_uses_custom_function(self, guardian_settings):
path = 'guardian.testapp.tests.management_test.mocked_get_init_anon'
guardian_settings.GET_INIT_ANONYMOUS_USER = path
guardian_settings.ANONYMOUS_USER_ID = 219
User = get_user_model()
anon = mocked_get_init_anon.return_value = mock.Mock()
create_anonymous_user('sender')
mocked_get_init_anon.assert_called_once_with(User)
self.assertEqual(anon.pk, 219)
anon.save.assert_called_once_with()
## Instruction:
Use unit test.TestCase instead of SimpleTestCase
## Code After:
from __future__ import absolute_import
from __future__ import unicode_literals
from guardian.compat import get_user_model
from guardian.compat import mock
from guardian.compat import unittest
from guardian.management import create_anonymous_user
import django
mocked_get_init_anon = mock.Mock()
class TestGetAnonymousUser(unittest.TestCase):
@unittest.skipUnless(django.VERSION >= (1, 5), "Django >= 1.5 only")
@mock.patch('guardian.management.guardian_settings')
def test_uses_custom_function(self, guardian_settings):
path = 'guardian.testapp.tests.management_test.mocked_get_init_anon'
guardian_settings.GET_INIT_ANONYMOUS_USER = path
guardian_settings.ANONYMOUS_USER_ID = 219
User = get_user_model()
anon = mocked_get_init_anon.return_value = mock.Mock()
create_anonymous_user('sender')
mocked_get_init_anon.assert_called_once_with(User)
self.assertEqual(anon.pk, 219)
anon.save.assert_called_once_with()
|
from __future__ import absolute_import
from __future__ import unicode_literals
- from django.test import SimpleTestCase
from guardian.compat import get_user_model
from guardian.compat import mock
+ from guardian.compat import unittest
from guardian.management import create_anonymous_user
+ import django
mocked_get_init_anon = mock.Mock()
- class TestGetAnonymousUser(SimpleTestCase):
? ^ ^^^
+ class TestGetAnonymousUser(unittest.TestCase):
? ^^ ^^ +++
+ @unittest.skipUnless(django.VERSION >= (1, 5), "Django >= 1.5 only")
@mock.patch('guardian.management.guardian_settings')
def test_uses_custom_function(self, guardian_settings):
path = 'guardian.testapp.tests.management_test.mocked_get_init_anon'
guardian_settings.GET_INIT_ANONYMOUS_USER = path
guardian_settings.ANONYMOUS_USER_ID = 219
User = get_user_model()
anon = mocked_get_init_anon.return_value = mock.Mock()
create_anonymous_user('sender')
mocked_get_init_anon.assert_called_once_with(User)
self.assertEqual(anon.pk, 219)
anon.save.assert_called_once_with()
|
aa6a74abc382bb6be86fa4a91132a9be51f365a5
|
tests/test_data_checksums.py
|
tests/test_data_checksums.py
|
""" test data_checksums"""
from nose.tools import assert_equal
def test_data_checksums():
from pyne.data import data_checksums
assert_equal(len(data_checksums), 6)
assert_equal(data_checksums['/neutron/simple_xs'], '3d6e086977783dcdf07e5c6b0c2416be')
|
""" test data_checksums and hashing functions"""
import os
from nose.tools import assert_equal, assert_true
import pyne
# These tests require nuc_data
if not os.path.isfile(pyne.nuc_data):
raise RuntimeError("Tests require nuc_data.h5. Please run nuc_data_make.")
def test_data_checksums():
from pyne.data import data_checksums
assert_equal(len(data_checksums), 6)
assert_equal(data_checksums['/neutron/simple_xs'], '3d6e086977783dcdf07e5c6b0c2416be')
def test_internal_hashes():
from pyne.dbgen import hashtools
hashtools.set_internal_hashes(pyne.nuc_data)
for item, val in hashtools.check_internal_hashes(pyne.nuc_data):
assert_true(val)
|
Add test of internal hashes and guarded pyne.nuc_data use
|
Add test of internal hashes and guarded pyne.nuc_data use
|
Python
|
bsd-3-clause
|
pyne/simplesim
|
- """ test data_checksums"""
- from nose.tools import assert_equal
+ """ test data_checksums and hashing functions"""
+ import os
+ from nose.tools import assert_equal, assert_true
+
+ import pyne
+
+ # These tests require nuc_data
+ if not os.path.isfile(pyne.nuc_data):
+ raise RuntimeError("Tests require nuc_data.h5. Please run nuc_data_make.")
def test_data_checksums():
from pyne.data import data_checksums
assert_equal(len(data_checksums), 6)
assert_equal(data_checksums['/neutron/simple_xs'], '3d6e086977783dcdf07e5c6b0c2416be')
+
+ def test_internal_hashes():
+ from pyne.dbgen import hashtools
+ hashtools.set_internal_hashes(pyne.nuc_data)
+ for item, val in hashtools.check_internal_hashes(pyne.nuc_data):
+ assert_true(val)
+
+
|
Add test of internal hashes and guarded pyne.nuc_data use
|
## Code Before:
""" test data_checksums"""
from nose.tools import assert_equal
def test_data_checksums():
from pyne.data import data_checksums
assert_equal(len(data_checksums), 6)
assert_equal(data_checksums['/neutron/simple_xs'], '3d6e086977783dcdf07e5c6b0c2416be')
## Instruction:
Add test of internal hashes and guarded pyne.nuc_data use
## Code After:
""" test data_checksums and hashing functions"""
import os
from nose.tools import assert_equal, assert_true
import pyne
# These tests require nuc_data
if not os.path.isfile(pyne.nuc_data):
raise RuntimeError("Tests require nuc_data.h5. Please run nuc_data_make.")
def test_data_checksums():
from pyne.data import data_checksums
assert_equal(len(data_checksums), 6)
assert_equal(data_checksums['/neutron/simple_xs'], '3d6e086977783dcdf07e5c6b0c2416be')
def test_internal_hashes():
from pyne.dbgen import hashtools
hashtools.set_internal_hashes(pyne.nuc_data)
for item, val in hashtools.check_internal_hashes(pyne.nuc_data):
assert_true(val)
|
- """ test data_checksums"""
- from nose.tools import assert_equal
+ """ test data_checksums and hashing functions"""
+ import os
+ from nose.tools import assert_equal, assert_true
+
+ import pyne
+
+ # These tests require nuc_data
+ if not os.path.isfile(pyne.nuc_data):
+ raise RuntimeError("Tests require nuc_data.h5. Please run nuc_data_make.")
def test_data_checksums():
from pyne.data import data_checksums
assert_equal(len(data_checksums), 6)
assert_equal(data_checksums['/neutron/simple_xs'], '3d6e086977783dcdf07e5c6b0c2416be')
+
+ def test_internal_hashes():
+ from pyne.dbgen import hashtools
+ hashtools.set_internal_hashes(pyne.nuc_data)
+ for item, val in hashtools.check_internal_hashes(pyne.nuc_data):
+ assert_true(val)
+
|
cc8cc05480e85c9a66450f1655083e87d00ba3f4
|
usersettings/shortcuts.py
|
usersettings/shortcuts.py
|
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
def get_usersettings_model():
"""
Returns the ``UserSettings`` model that is active in this project.
"""
from django.db.models import get_model
try:
app_label, model_name = settings.USERSETTINGS_MODEL.split('.')
except ValueError:
raise ImproperlyConfigured('USERSETTINGS_MODEL must be of the '
'form "app_label.model_name"')
usersettings_model = get_model(app_label, model_name)
if usersettings_model is None:
raise ImproperlyConfigured('USERSETTINGS_MODEL refers to model "%s" that has '
'not been installed' % settings.USERSETTINGS_MODEL)
return usersettings_model
def get_current_usersettings():
"""
Returns the current ``UserSettings`` based on
the SITE_ID in the project's settings
"""
USERSETTINGS_MODEL = get_usersettings_model()
current_usersettings = USERSETTINGS_MODEL.objects.get_current()
return current_usersettings
|
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
def get_usersettings_model():
"""
Returns the ``UserSettings`` model that is active in this project.
"""
from django.db.models import get_model
try:
app_label, model_name = settings.USERSETTINGS_MODEL.split('.')
except ValueError:
raise ImproperlyConfigured('USERSETTINGS_MODEL must be of the '
'form "app_label.model_name"')
usersettings_model = get_model(app_label, model_name)
if usersettings_model is None:
raise ImproperlyConfigured('USERSETTINGS_MODEL refers to model "%s" that has '
'not been installed' % settings.USERSETTINGS_MODEL)
return usersettings_model
def get_current_usersettings():
"""
Returns the current ``UserSettings`` based on
the SITE_ID in the project's settings
"""
USERSETTINGS_MODEL = get_usersettings_model()
try:
current_usersettings = USERSETTINGS_MODEL.objects.get_current()
except USERSETTINGS_MODEL.DoesNotExist:
current_usersettings = None
return current_usersettings
|
Update 'get_current_usersettings' to catch 'DoesNotExist' error
|
Update 'get_current_usersettings' to catch 'DoesNotExist' error
|
Python
|
bsd-3-clause
|
mishbahr/django-usersettings2,mishbahr/django-usersettings2
|
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
def get_usersettings_model():
"""
Returns the ``UserSettings`` model that is active in this project.
"""
from django.db.models import get_model
try:
app_label, model_name = settings.USERSETTINGS_MODEL.split('.')
except ValueError:
raise ImproperlyConfigured('USERSETTINGS_MODEL must be of the '
'form "app_label.model_name"')
usersettings_model = get_model(app_label, model_name)
if usersettings_model is None:
raise ImproperlyConfigured('USERSETTINGS_MODEL refers to model "%s" that has '
'not been installed' % settings.USERSETTINGS_MODEL)
return usersettings_model
def get_current_usersettings():
"""
Returns the current ``UserSettings`` based on
the SITE_ID in the project's settings
"""
USERSETTINGS_MODEL = get_usersettings_model()
+ try:
- current_usersettings = USERSETTINGS_MODEL.objects.get_current()
+ current_usersettings = USERSETTINGS_MODEL.objects.get_current()
+ except USERSETTINGS_MODEL.DoesNotExist:
+ current_usersettings = None
return current_usersettings
|
Update 'get_current_usersettings' to catch 'DoesNotExist' error
|
## Code Before:
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
def get_usersettings_model():
"""
Returns the ``UserSettings`` model that is active in this project.
"""
from django.db.models import get_model
try:
app_label, model_name = settings.USERSETTINGS_MODEL.split('.')
except ValueError:
raise ImproperlyConfigured('USERSETTINGS_MODEL must be of the '
'form "app_label.model_name"')
usersettings_model = get_model(app_label, model_name)
if usersettings_model is None:
raise ImproperlyConfigured('USERSETTINGS_MODEL refers to model "%s" that has '
'not been installed' % settings.USERSETTINGS_MODEL)
return usersettings_model
def get_current_usersettings():
"""
Returns the current ``UserSettings`` based on
the SITE_ID in the project's settings
"""
USERSETTINGS_MODEL = get_usersettings_model()
current_usersettings = USERSETTINGS_MODEL.objects.get_current()
return current_usersettings
## Instruction:
Update 'get_current_usersettings' to catch 'DoesNotExist' error
## Code After:
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
def get_usersettings_model():
"""
Returns the ``UserSettings`` model that is active in this project.
"""
from django.db.models import get_model
try:
app_label, model_name = settings.USERSETTINGS_MODEL.split('.')
except ValueError:
raise ImproperlyConfigured('USERSETTINGS_MODEL must be of the '
'form "app_label.model_name"')
usersettings_model = get_model(app_label, model_name)
if usersettings_model is None:
raise ImproperlyConfigured('USERSETTINGS_MODEL refers to model "%s" that has '
'not been installed' % settings.USERSETTINGS_MODEL)
return usersettings_model
def get_current_usersettings():
"""
Returns the current ``UserSettings`` based on
the SITE_ID in the project's settings
"""
USERSETTINGS_MODEL = get_usersettings_model()
try:
current_usersettings = USERSETTINGS_MODEL.objects.get_current()
except USERSETTINGS_MODEL.DoesNotExist:
current_usersettings = None
return current_usersettings
|
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
def get_usersettings_model():
"""
Returns the ``UserSettings`` model that is active in this project.
"""
from django.db.models import get_model
try:
app_label, model_name = settings.USERSETTINGS_MODEL.split('.')
except ValueError:
raise ImproperlyConfigured('USERSETTINGS_MODEL must be of the '
'form "app_label.model_name"')
usersettings_model = get_model(app_label, model_name)
if usersettings_model is None:
raise ImproperlyConfigured('USERSETTINGS_MODEL refers to model "%s" that has '
'not been installed' % settings.USERSETTINGS_MODEL)
return usersettings_model
def get_current_usersettings():
"""
Returns the current ``UserSettings`` based on
the SITE_ID in the project's settings
"""
USERSETTINGS_MODEL = get_usersettings_model()
+ try:
- current_usersettings = USERSETTINGS_MODEL.objects.get_current()
+ current_usersettings = USERSETTINGS_MODEL.objects.get_current()
? ++++
+ except USERSETTINGS_MODEL.DoesNotExist:
+ current_usersettings = None
return current_usersettings
|
663f839ef539759143369f84289b6e27f21bdcce
|
setup.py
|
setup.py
|
from distutils.core import setup
description = """
The Jersey core libraries provide common abstractions used by Jersey software.
"""
def getVersion():
import os
packageSeedFile = os.path.join("src", "lib", "_version.py")
ns = {"__name__": __name__, }
execfile(packageSeedFile, ns)
return ns["version"]
version = getVersion()
setup(
name = "jersey",
version = version.short(),
description = "Jersey Core Libraries",
long_description = description,
author = "Oliver Gould", author_email = "[email protected]",
maintainer = "Jersey-Devel", maintainer_email = "[email protected]",
package_dir = {
"jersey": "src/lib",
},
packages = [
"jersey",
"jersey.cases",
],
py_modules = [
"jersey._version",
"jersey.cli", "jersey.cases.test_cli",
"jersey.inet", "jersey.cases.test_inet",
"jersey.log", "jersey.cases.test_log",
],
provides = [
"jersey",
"jersey.cli",
"jersey.log",
],
requires = [
"twisted (>=9.0.0)",
],
)
|
from distutils.core import setup
description = """
The Jersey core libraries provide common abstractions used by Jersey software.
"""
def getVersion():
import os
packageSeedFile = os.path.join("lib", "_version.py")
ns = {"__name__": __name__, }
execfile(packageSeedFile, ns)
return ns["version"]
version = getVersion()
setup(
name = "jersey",
version = version.short(),
description = "Jersey Core Libraries",
long_description = description,
author = "Oliver Gould", author_email = "[email protected]",
maintainer = "Jersey-Devel", maintainer_email = "[email protected]",
package_dir = {
"jersey": "lib",
},
packages = [
"jersey",
"jersey.cases",
],
py_modules = [
"jersey._version",
"jersey.cli", "jersey.cases.test_cli",
"jersey.inet", "jersey.cases.test_inet",
"jersey.log", "jersey.cases.test_log",
],
provides = [
"jersey",
"jersey.cli",
"jersey.log",
],
requires = [
"twisted (>=9.0.0)",
],
)
|
Use lib/ instead of src/lib.
|
Use lib/ instead of src/lib.
|
Python
|
bsd-3-clause
|
olix0r/tx-jersey
|
from distutils.core import setup
description = """
The Jersey core libraries provide common abstractions used by Jersey software.
"""
def getVersion():
import os
- packageSeedFile = os.path.join("src", "lib", "_version.py")
+ packageSeedFile = os.path.join("lib", "_version.py")
ns = {"__name__": __name__, }
execfile(packageSeedFile, ns)
return ns["version"]
version = getVersion()
setup(
name = "jersey",
version = version.short(),
description = "Jersey Core Libraries",
long_description = description,
author = "Oliver Gould", author_email = "[email protected]",
maintainer = "Jersey-Devel", maintainer_email = "[email protected]",
package_dir = {
- "jersey": "src/lib",
+ "jersey": "lib",
},
packages = [
"jersey",
"jersey.cases",
],
py_modules = [
"jersey._version",
"jersey.cli", "jersey.cases.test_cli",
"jersey.inet", "jersey.cases.test_inet",
"jersey.log", "jersey.cases.test_log",
],
provides = [
"jersey",
"jersey.cli",
"jersey.log",
],
requires = [
"twisted (>=9.0.0)",
],
)
|
Use lib/ instead of src/lib.
|
## Code Before:
from distutils.core import setup
description = """
The Jersey core libraries provide common abstractions used by Jersey software.
"""
def getVersion():
import os
packageSeedFile = os.path.join("src", "lib", "_version.py")
ns = {"__name__": __name__, }
execfile(packageSeedFile, ns)
return ns["version"]
version = getVersion()
setup(
name = "jersey",
version = version.short(),
description = "Jersey Core Libraries",
long_description = description,
author = "Oliver Gould", author_email = "[email protected]",
maintainer = "Jersey-Devel", maintainer_email = "[email protected]",
package_dir = {
"jersey": "src/lib",
},
packages = [
"jersey",
"jersey.cases",
],
py_modules = [
"jersey._version",
"jersey.cli", "jersey.cases.test_cli",
"jersey.inet", "jersey.cases.test_inet",
"jersey.log", "jersey.cases.test_log",
],
provides = [
"jersey",
"jersey.cli",
"jersey.log",
],
requires = [
"twisted (>=9.0.0)",
],
)
## Instruction:
Use lib/ instead of src/lib.
## Code After:
from distutils.core import setup
description = """
The Jersey core libraries provide common abstractions used by Jersey software.
"""
def getVersion():
import os
packageSeedFile = os.path.join("lib", "_version.py")
ns = {"__name__": __name__, }
execfile(packageSeedFile, ns)
return ns["version"]
version = getVersion()
setup(
name = "jersey",
version = version.short(),
description = "Jersey Core Libraries",
long_description = description,
author = "Oliver Gould", author_email = "[email protected]",
maintainer = "Jersey-Devel", maintainer_email = "[email protected]",
package_dir = {
"jersey": "lib",
},
packages = [
"jersey",
"jersey.cases",
],
py_modules = [
"jersey._version",
"jersey.cli", "jersey.cases.test_cli",
"jersey.inet", "jersey.cases.test_inet",
"jersey.log", "jersey.cases.test_log",
],
provides = [
"jersey",
"jersey.cli",
"jersey.log",
],
requires = [
"twisted (>=9.0.0)",
],
)
|
from distutils.core import setup
description = """
The Jersey core libraries provide common abstractions used by Jersey software.
"""
def getVersion():
import os
- packageSeedFile = os.path.join("src", "lib", "_version.py")
? -------
+ packageSeedFile = os.path.join("lib", "_version.py")
ns = {"__name__": __name__, }
execfile(packageSeedFile, ns)
return ns["version"]
version = getVersion()
setup(
name = "jersey",
version = version.short(),
description = "Jersey Core Libraries",
long_description = description,
author = "Oliver Gould", author_email = "[email protected]",
maintainer = "Jersey-Devel", maintainer_email = "[email protected]",
package_dir = {
- "jersey": "src/lib",
? ----
+ "jersey": "lib",
},
packages = [
"jersey",
"jersey.cases",
],
py_modules = [
"jersey._version",
"jersey.cli", "jersey.cases.test_cli",
"jersey.inet", "jersey.cases.test_inet",
"jersey.log", "jersey.cases.test_log",
],
provides = [
"jersey",
"jersey.cli",
"jersey.log",
],
requires = [
"twisted (>=9.0.0)",
],
)
|
bdc0466c63347280fbd8bc8c30fb07f294200194
|
client/third_party/idna/__init__.py
|
client/third_party/idna/__init__.py
|
def encode(host, uts46):
return unicode(host)
|
from encodings import idna
def encode(host, uts46=False): # pylint: disable=unused-argument
# Used by urllib3
return idna.ToASCII(host)
def decode(host):
# Used by cryptography/hazmat/backends/openssl/x509.py
return idna.ToUnicode(host)
|
Change idna stub to use python's default
|
[client] Change idna stub to use python's default
Fix a regression from 690b8ae29be2ca3b4782fa6ad0e7f2454443c38d which broke
select bots running inside docker.
The new stub is still simpler than https://pypi.org/project/idna/ and lighter
weight but much better than ignoring the "xn-" encoding as this was done
previously. As per the project home page:
This acts as a suitable replacement for the “encodings.idna” module that comes
with the Python standard library, but only supports the old, deprecated IDNA
specification (RFC 3490).
In practice, we don't expect to use non-ASCII hostnames, so it's not a big deal
for us.
decode() is required by openssl/x509.py.
[email protected]
Bug: 916644
Change-Id: Ia999a56b981d943e2f3d942f83e40d40e1bb805b
Reviewed-on: https://chromium-review.googlesource.com/c/infra/luci/luci-py/+/1573244
Reviewed-by: Marc-Antoine Ruel <[email protected]>
Commit-Queue: Marc-Antoine Ruel <[email protected]>
|
Python
|
apache-2.0
|
luci/luci-py,luci/luci-py,luci/luci-py,luci/luci-py
|
- def encode(host, uts46):
- return unicode(host)
+ from encodings import idna
+
+
+ def encode(host, uts46=False): # pylint: disable=unused-argument
+ # Used by urllib3
+ return idna.ToASCII(host)
+
+
+ def decode(host):
+ # Used by cryptography/hazmat/backends/openssl/x509.py
+ return idna.ToUnicode(host)
+
|
Change idna stub to use python's default
|
## Code Before:
def encode(host, uts46):
return unicode(host)
## Instruction:
Change idna stub to use python's default
## Code After:
from encodings import idna
def encode(host, uts46=False): # pylint: disable=unused-argument
# Used by urllib3
return idna.ToASCII(host)
def decode(host):
# Used by cryptography/hazmat/backends/openssl/x509.py
return idna.ToUnicode(host)
|
+
+ from encodings import idna
+
+
+ def encode(host, uts46=False): # pylint: disable=unused-argument
+ # Used by urllib3
+ return idna.ToASCII(host)
+
+
- def encode(host, uts46):
? - -------
+ def decode(host):
? +
+ # Used by cryptography/hazmat/backends/openssl/x509.py
- return unicode(host)
? ^
+ return idna.ToUnicode(host)
? ^^^^^^^^
|
2277e8412fbd92c26cd822f389ecb7e099519da4
|
.conda/merge_dups.py
|
.conda/merge_dups.py
|
import yaml
linux = yaml.load(open('data_linux-64.yml'))
res = yaml.load(open('data_osx-.yml'))
res.extend(linux)
# Remove duplicates
unique_packages = {}
for package in res:
# This information is the unique portion, so we key on that
key = '|'.join([package[x] for x in ('url', 'version', 'name')])
# We turn the architecture item into a list.
if key in unique_packages:
unique_packages[key]['arch'].append(package['arch'])
else:
unique_packages[key] = package
unique_packages[key]['arch'] = [unique_packages[key]['arch']]
res = []
for item in unique_packages.values():
if len(item['arch']) == 1:
# If there is only one arch, then we have a platform specific URL,
# since otherwise we would have generated an arch that contains both
# linux + osx
item['arch'] = item['arch'][0]
res.append(item)
else:
# Here we have two or more archs (ideally. We don't check conditions
# like 0 arches)
item['arch'] = 'src'
res.append(item)
with open('data.yml', 'w') as outfile:
yaml.safe_dump(res, outfile, default_flow_style=False)
|
import yaml
linux = yaml.load(open('data_linux-64.yml', 'r'))
res = yaml.load(open('data_osx-.yml', 'r'))
res.extend(linux)
# Remove duplicates
unique_packages = {}
for package in res:
# This information is the unique portion, so we key on that
key_data = [
package['version'],
package['name']
]
if isinstance(package['url'], list):
key_data += package['url']
else:
key_data.append(package['url'])
key = '|'.join(key_data)
# We turn the architecture item into a list.
if key in unique_packages:
unique_packages[key]['arch'].append(package['arch'])
else:
unique_packages[key] = package
unique_packages[key]['arch'] = [unique_packages[key]['arch']]
res = []
for item in unique_packages.values():
if len(item['arch']) == 1:
# If there is only one arch, then we have a platform specific URL,
# since otherwise we would have generated an arch that contains both
# linux + osx
item['arch'] = item['arch'][0]
res.append(item)
else:
# Here we have two or more archs (ideally. We don't check conditions
# like 0 arches)
item['arch'] = 'src'
res.append(item)
with open('data.yml', 'w') as outfile:
yaml.safe_dump(res, outfile, default_flow_style=False)
|
Handle lists of URLs like in r-ada
|
Handle lists of URLs like in r-ada
|
Python
|
mit
|
galaxyproject/cargo-port,galaxyproject/cargo-port,erasche/community-package-cache,erasche/community-package-cache,erasche/community-package-cache
|
import yaml
- linux = yaml.load(open('data_linux-64.yml'))
+ linux = yaml.load(open('data_linux-64.yml', 'r'))
- res = yaml.load(open('data_osx-.yml'))
+ res = yaml.load(open('data_osx-.yml', 'r'))
res.extend(linux)
# Remove duplicates
unique_packages = {}
for package in res:
# This information is the unique portion, so we key on that
- key = '|'.join([package[x] for x in ('url', 'version', 'name')])
+ key_data = [
+ package['version'],
+ package['name']
+ ]
+
+ if isinstance(package['url'], list):
+ key_data += package['url']
+ else:
+ key_data.append(package['url'])
+
+ key = '|'.join(key_data)
# We turn the architecture item into a list.
if key in unique_packages:
unique_packages[key]['arch'].append(package['arch'])
else:
unique_packages[key] = package
unique_packages[key]['arch'] = [unique_packages[key]['arch']]
res = []
for item in unique_packages.values():
if len(item['arch']) == 1:
# If there is only one arch, then we have a platform specific URL,
# since otherwise we would have generated an arch that contains both
# linux + osx
item['arch'] = item['arch'][0]
res.append(item)
else:
# Here we have two or more archs (ideally. We don't check conditions
# like 0 arches)
item['arch'] = 'src'
res.append(item)
with open('data.yml', 'w') as outfile:
yaml.safe_dump(res, outfile, default_flow_style=False)
|
Handle lists of URLs like in r-ada
|
## Code Before:
import yaml
linux = yaml.load(open('data_linux-64.yml'))
res = yaml.load(open('data_osx-.yml'))
res.extend(linux)
# Remove duplicates
unique_packages = {}
for package in res:
# This information is the unique portion, so we key on that
key = '|'.join([package[x] for x in ('url', 'version', 'name')])
# We turn the architecture item into a list.
if key in unique_packages:
unique_packages[key]['arch'].append(package['arch'])
else:
unique_packages[key] = package
unique_packages[key]['arch'] = [unique_packages[key]['arch']]
res = []
for item in unique_packages.values():
if len(item['arch']) == 1:
# If there is only one arch, then we have a platform specific URL,
# since otherwise we would have generated an arch that contains both
# linux + osx
item['arch'] = item['arch'][0]
res.append(item)
else:
# Here we have two or more archs (ideally. We don't check conditions
# like 0 arches)
item['arch'] = 'src'
res.append(item)
with open('data.yml', 'w') as outfile:
yaml.safe_dump(res, outfile, default_flow_style=False)
## Instruction:
Handle lists of URLs like in r-ada
## Code After:
import yaml
linux = yaml.load(open('data_linux-64.yml', 'r'))
res = yaml.load(open('data_osx-.yml', 'r'))
res.extend(linux)
# Remove duplicates
unique_packages = {}
for package in res:
# This information is the unique portion, so we key on that
key_data = [
package['version'],
package['name']
]
if isinstance(package['url'], list):
key_data += package['url']
else:
key_data.append(package['url'])
key = '|'.join(key_data)
# We turn the architecture item into a list.
if key in unique_packages:
unique_packages[key]['arch'].append(package['arch'])
else:
unique_packages[key] = package
unique_packages[key]['arch'] = [unique_packages[key]['arch']]
res = []
for item in unique_packages.values():
if len(item['arch']) == 1:
# If there is only one arch, then we have a platform specific URL,
# since otherwise we would have generated an arch that contains both
# linux + osx
item['arch'] = item['arch'][0]
res.append(item)
else:
# Here we have two or more archs (ideally. We don't check conditions
# like 0 arches)
item['arch'] = 'src'
res.append(item)
with open('data.yml', 'w') as outfile:
yaml.safe_dump(res, outfile, default_flow_style=False)
|
import yaml
- linux = yaml.load(open('data_linux-64.yml'))
+ linux = yaml.load(open('data_linux-64.yml', 'r'))
? +++++
- res = yaml.load(open('data_osx-.yml'))
+ res = yaml.load(open('data_osx-.yml', 'r'))
? +++++
res.extend(linux)
# Remove duplicates
unique_packages = {}
for package in res:
# This information is the unique portion, so we key on that
- key = '|'.join([package[x] for x in ('url', 'version', 'name')])
+ key_data = [
+ package['version'],
+ package['name']
+ ]
+
+ if isinstance(package['url'], list):
+ key_data += package['url']
+ else:
+ key_data.append(package['url'])
+
+ key = '|'.join(key_data)
# We turn the architecture item into a list.
if key in unique_packages:
unique_packages[key]['arch'].append(package['arch'])
else:
unique_packages[key] = package
unique_packages[key]['arch'] = [unique_packages[key]['arch']]
res = []
for item in unique_packages.values():
if len(item['arch']) == 1:
# If there is only one arch, then we have a platform specific URL,
# since otherwise we would have generated an arch that contains both
# linux + osx
item['arch'] = item['arch'][0]
res.append(item)
else:
# Here we have two or more archs (ideally. We don't check conditions
# like 0 arches)
item['arch'] = 'src'
res.append(item)
with open('data.yml', 'w') as outfile:
yaml.safe_dump(res, outfile, default_flow_style=False)
|
a827279098ab2ef73778b15a76f738fedce9ed30
|
tests.py
|
tests.py
|
import api
import unittest
import os
from getpass import getpass
class ApiTest(unittest.TestCase):
def setUp(self):
self.linode = api.Api(os.environ['LINODE_API_KEY'])
def testAvailLinodeplans(self):
available_plans = self.linode.avail_linodeplans()
self.assertTrue(isinstance(available_plans, list))
if __name__ == "__main__":
if 'LINODE_API_KEY' not in os.environ:
os.environ['LINODE_API_KEY'] = getpass('Enter API Key: ')
unittest.main()
|
import api
import unittest
import os
from getpass import getpass
class ApiTest(unittest.TestCase):
def setUp(self):
self.linode = api.Api(os.environ['LINODE_API_KEY'])
def testAvailLinodeplans(self):
available_plans = self.linode.avail_linodeplans()
self.assertTrue(isinstance(available_plans, list))
def testEcho(self):
test_parameters = {'FOO': 'bar', 'FIZZ': 'buzz'}
response = self.linode.test_echo(**test_parameters)
self.assertTrue('FOO' in response)
self.assertTrue('FIZZ' in response)
self.assertEqual(test_parameters['FOO'], response['FOO'])
self.assertEqual(test_parameters['FIZZ'], response['FIZZ'])
if __name__ == "__main__":
if 'LINODE_API_KEY' not in os.environ:
os.environ['LINODE_API_KEY'] = getpass('Enter API Key: ')
unittest.main()
|
Add a test case for test.echo
|
Add a test case for test.echo
|
Python
|
mit
|
ryanshawty/linode-python,tjfontaine/linode-python
|
import api
import unittest
import os
from getpass import getpass
class ApiTest(unittest.TestCase):
def setUp(self):
self.linode = api.Api(os.environ['LINODE_API_KEY'])
def testAvailLinodeplans(self):
available_plans = self.linode.avail_linodeplans()
self.assertTrue(isinstance(available_plans, list))
+ def testEcho(self):
+ test_parameters = {'FOO': 'bar', 'FIZZ': 'buzz'}
+ response = self.linode.test_echo(**test_parameters)
+ self.assertTrue('FOO' in response)
+ self.assertTrue('FIZZ' in response)
+ self.assertEqual(test_parameters['FOO'], response['FOO'])
+ self.assertEqual(test_parameters['FIZZ'], response['FIZZ'])
+
if __name__ == "__main__":
if 'LINODE_API_KEY' not in os.environ:
os.environ['LINODE_API_KEY'] = getpass('Enter API Key: ')
unittest.main()
|
Add a test case for test.echo
|
## Code Before:
import api
import unittest
import os
from getpass import getpass
class ApiTest(unittest.TestCase):
def setUp(self):
self.linode = api.Api(os.environ['LINODE_API_KEY'])
def testAvailLinodeplans(self):
available_plans = self.linode.avail_linodeplans()
self.assertTrue(isinstance(available_plans, list))
if __name__ == "__main__":
if 'LINODE_API_KEY' not in os.environ:
os.environ['LINODE_API_KEY'] = getpass('Enter API Key: ')
unittest.main()
## Instruction:
Add a test case for test.echo
## Code After:
import api
import unittest
import os
from getpass import getpass
class ApiTest(unittest.TestCase):
def setUp(self):
self.linode = api.Api(os.environ['LINODE_API_KEY'])
def testAvailLinodeplans(self):
available_plans = self.linode.avail_linodeplans()
self.assertTrue(isinstance(available_plans, list))
def testEcho(self):
test_parameters = {'FOO': 'bar', 'FIZZ': 'buzz'}
response = self.linode.test_echo(**test_parameters)
self.assertTrue('FOO' in response)
self.assertTrue('FIZZ' in response)
self.assertEqual(test_parameters['FOO'], response['FOO'])
self.assertEqual(test_parameters['FIZZ'], response['FIZZ'])
if __name__ == "__main__":
if 'LINODE_API_KEY' not in os.environ:
os.environ['LINODE_API_KEY'] = getpass('Enter API Key: ')
unittest.main()
|
import api
import unittest
import os
from getpass import getpass
class ApiTest(unittest.TestCase):
def setUp(self):
self.linode = api.Api(os.environ['LINODE_API_KEY'])
def testAvailLinodeplans(self):
available_plans = self.linode.avail_linodeplans()
self.assertTrue(isinstance(available_plans, list))
+ def testEcho(self):
+ test_parameters = {'FOO': 'bar', 'FIZZ': 'buzz'}
+ response = self.linode.test_echo(**test_parameters)
+ self.assertTrue('FOO' in response)
+ self.assertTrue('FIZZ' in response)
+ self.assertEqual(test_parameters['FOO'], response['FOO'])
+ self.assertEqual(test_parameters['FIZZ'], response['FIZZ'])
+
if __name__ == "__main__":
if 'LINODE_API_KEY' not in os.environ:
os.environ['LINODE_API_KEY'] = getpass('Enter API Key: ')
unittest.main()
|
1096bc339caf0ba329332633d8b9170fb8940f6f
|
start.py
|
start.py
|
import cursingspock
from spockbot import Client
from spockbot.plugins import default_plugins as plugins
from bat import bat, command
plugins.extend([
('bat', bat.BatPlugin),
('commands', command.CommandPlugin),
('curses', cursingspock.CursesPlugin),
])
# login_credentials should contain a dict with 'username' and 'password'
#from login_credentials import settings
settings = {
'start': {'username': 'Bat'},
'auth': {'online_mode': False},
}
client = Client(plugins=plugins, settings=settings)
client.start('localhost', 25565)
|
import cursingspock
from spockbot import Client
from spockbot.plugins import default_plugins
from bat import bat, command
plugins = default_plugins.copy()
plugins.extend([
('bat', bat.BatPlugin),
('commands', command.CommandPlugin),
('curses', cursingspock.CursesPlugin),
])
# login_credentials should contain a dict with 'username' and 'password'
#from login_credentials import settings
settings = {
'start': {'username': 'Bat'},
'auth': {'online_mode': False},
}
client = Client(plugins=plugins, settings=settings)
client.start('localhost', 25565)
|
Copy default plugins and extend
|
Copy default plugins and extend
|
Python
|
mit
|
Gjum/Bat
|
import cursingspock
from spockbot import Client
- from spockbot.plugins import default_plugins as plugins
+ from spockbot.plugins import default_plugins
from bat import bat, command
+ plugins = default_plugins.copy()
plugins.extend([
('bat', bat.BatPlugin),
('commands', command.CommandPlugin),
('curses', cursingspock.CursesPlugin),
])
# login_credentials should contain a dict with 'username' and 'password'
#from login_credentials import settings
settings = {
'start': {'username': 'Bat'},
'auth': {'online_mode': False},
}
client = Client(plugins=plugins, settings=settings)
client.start('localhost', 25565)
|
Copy default plugins and extend
|
## Code Before:
import cursingspock
from spockbot import Client
from spockbot.plugins import default_plugins as plugins
from bat import bat, command
plugins.extend([
('bat', bat.BatPlugin),
('commands', command.CommandPlugin),
('curses', cursingspock.CursesPlugin),
])
# login_credentials should contain a dict with 'username' and 'password'
#from login_credentials import settings
settings = {
'start': {'username': 'Bat'},
'auth': {'online_mode': False},
}
client = Client(plugins=plugins, settings=settings)
client.start('localhost', 25565)
## Instruction:
Copy default plugins and extend
## Code After:
import cursingspock
from spockbot import Client
from spockbot.plugins import default_plugins
from bat import bat, command
plugins = default_plugins.copy()
plugins.extend([
('bat', bat.BatPlugin),
('commands', command.CommandPlugin),
('curses', cursingspock.CursesPlugin),
])
# login_credentials should contain a dict with 'username' and 'password'
#from login_credentials import settings
settings = {
'start': {'username': 'Bat'},
'auth': {'online_mode': False},
}
client = Client(plugins=plugins, settings=settings)
client.start('localhost', 25565)
|
import cursingspock
from spockbot import Client
- from spockbot.plugins import default_plugins as plugins
? -----------
+ from spockbot.plugins import default_plugins
from bat import bat, command
+ plugins = default_plugins.copy()
plugins.extend([
('bat', bat.BatPlugin),
('commands', command.CommandPlugin),
('curses', cursingspock.CursesPlugin),
])
# login_credentials should contain a dict with 'username' and 'password'
#from login_credentials import settings
settings = {
'start': {'username': 'Bat'},
'auth': {'online_mode': False},
}
client = Client(plugins=plugins, settings=settings)
client.start('localhost', 25565)
|
c603dc219d47ef255ef30447526e9c8dff82a5db
|
blues/python.py
|
blues/python.py
|
from fabric.decorators import task
from refabric.api import run, info
from refabric.context_managers import sudo
from . import debian
__all__ = ['setup']
@task
def setup():
"""
Install python develop tools
"""
install()
def install():
with sudo():
info('Install python dependencies')
debian.apt_get('install', 'python-dev', 'python-setuptools')
run('easy_install -0 pip')
def pip(command, *options):
info('Running pip {}', command)
run('pip {} {} -v --log=/tmp/pip.log --log-file=/tmp/pip.log'.format(command, ' '.join(options)))
|
from fabric.decorators import task
from refabric.api import run, info
from refabric.context_managers import sudo
from . import debian
__all__ = ['setup']
pip_log_file = '/tmp/pip.log'
@task
def setup():
"""
Install python develop tools
"""
install()
def install():
with sudo():
info('Install python dependencies')
debian.apt_get('install', 'python-dev', 'python-setuptools')
run('easy_install -0 pip')
run('touch {}'.format(pip_log_file))
debian.chmod(pip_log_file, mode=777)
def pip(command, *options):
info('Running pip {}', command)
run('pip {0} {1} -v --log={2} --log-file={2}'.format(command, ' '.join(options), pip_log_file))
|
Make pip log world writable
|
Make pip log world writable
|
Python
|
mit
|
adisbladis/blues,jocke-l/blues,gelbander/blues,jocke-l/blues,Sportamore/blues,gelbander/blues,chrippa/blues,andreif/blues,gelbander/blues,5monkeys/blues,Sportamore/blues,chrippa/blues,adisbladis/blues,andreif/blues,adisbladis/blues,5monkeys/blues,jocke-l/blues,Sportamore/blues,andreif/blues,5monkeys/blues,chrippa/blues
|
from fabric.decorators import task
from refabric.api import run, info
from refabric.context_managers import sudo
from . import debian
__all__ = ['setup']
+
+
+ pip_log_file = '/tmp/pip.log'
@task
def setup():
"""
Install python develop tools
"""
install()
def install():
with sudo():
info('Install python dependencies')
debian.apt_get('install', 'python-dev', 'python-setuptools')
run('easy_install -0 pip')
+ run('touch {}'.format(pip_log_file))
+ debian.chmod(pip_log_file, mode=777)
def pip(command, *options):
info('Running pip {}', command)
- run('pip {} {} -v --log=/tmp/pip.log --log-file=/tmp/pip.log'.format(command, ' '.join(options)))
+ run('pip {0} {1} -v --log={2} --log-file={2}'.format(command, ' '.join(options), pip_log_file))
|
Make pip log world writable
|
## Code Before:
from fabric.decorators import task
from refabric.api import run, info
from refabric.context_managers import sudo
from . import debian
__all__ = ['setup']
@task
def setup():
"""
Install python develop tools
"""
install()
def install():
with sudo():
info('Install python dependencies')
debian.apt_get('install', 'python-dev', 'python-setuptools')
run('easy_install -0 pip')
def pip(command, *options):
info('Running pip {}', command)
run('pip {} {} -v --log=/tmp/pip.log --log-file=/tmp/pip.log'.format(command, ' '.join(options)))
## Instruction:
Make pip log world writable
## Code After:
from fabric.decorators import task
from refabric.api import run, info
from refabric.context_managers import sudo
from . import debian
__all__ = ['setup']
pip_log_file = '/tmp/pip.log'
@task
def setup():
"""
Install python develop tools
"""
install()
def install():
with sudo():
info('Install python dependencies')
debian.apt_get('install', 'python-dev', 'python-setuptools')
run('easy_install -0 pip')
run('touch {}'.format(pip_log_file))
debian.chmod(pip_log_file, mode=777)
def pip(command, *options):
info('Running pip {}', command)
run('pip {0} {1} -v --log={2} --log-file={2}'.format(command, ' '.join(options), pip_log_file))
|
from fabric.decorators import task
from refabric.api import run, info
from refabric.context_managers import sudo
from . import debian
__all__ = ['setup']
+
+
+ pip_log_file = '/tmp/pip.log'
@task
def setup():
"""
Install python develop tools
"""
install()
def install():
with sudo():
info('Install python dependencies')
debian.apt_get('install', 'python-dev', 'python-setuptools')
run('easy_install -0 pip')
+ run('touch {}'.format(pip_log_file))
+ debian.chmod(pip_log_file, mode=777)
def pip(command, *options):
info('Running pip {}', command)
- run('pip {} {} -v --log=/tmp/pip.log --log-file=/tmp/pip.log'.format(command, ' '.join(options)))
? ^^^^^^^^^^^^ ^^^^^^^^^^^^
+ run('pip {0} {1} -v --log={2} --log-file={2}'.format(command, ' '.join(options), pip_log_file))
? + + ^^^ ^^^ ++++++++++++++
|
c67d9b1b45d64743698c52331e7fadc4ed5f8236
|
properties/prandtl_meyer_function.py
|
properties/prandtl_meyer_function.py
|
from math import atan, pi, sqrt
from properties.constants import GAMMA
def nu_in_rad(m):
if m < 1:
raise ValueError('Mach number should be greater than or equal to 1')
a = (GAMMA+1) / (GAMMA-1)
b = m**2 - 1
c = a**-1 * b
return sqrt(a) * atan(sqrt(c)) - atan(sqrt(b))
def nu_in_deg(m):
return nu_in_rad(m) * 180 / pi
|
from __future__ import absolute_import, division
from math import asin, atan, degrees, sqrt
from properties.constants import GAMMA
def nu_in_rad(m):
if m < 1:
raise ValueError('Mach number should be greater than or equal to 1')
a = (GAMMA+1) / (GAMMA-1)
b = m**2 - 1
c = a**-1 * b
return sqrt(a) * atan(sqrt(c)) - atan(sqrt(b))
def nu_in_deg(m):
return degrees(nu_in_rad(m))
def mu_in_rad(m):
return asin(1/m)
def mu_in_deg(m):
return degrees(mu_in_rad(m))
|
Add mu_in_rad and mu_in_deg, use built in method to convert rad to deg
|
Add mu_in_rad and mu_in_deg, use built in method to convert rad to deg
|
Python
|
mit
|
iwarobots/TunnelDesign
|
+ from __future__ import absolute_import, division
+
- from math import atan, pi, sqrt
+ from math import asin, atan, degrees, sqrt
from properties.constants import GAMMA
def nu_in_rad(m):
if m < 1:
raise ValueError('Mach number should be greater than or equal to 1')
a = (GAMMA+1) / (GAMMA-1)
b = m**2 - 1
c = a**-1 * b
return sqrt(a) * atan(sqrt(c)) - atan(sqrt(b))
def nu_in_deg(m):
- return nu_in_rad(m) * 180 / pi
+ return degrees(nu_in_rad(m))
+
+ def mu_in_rad(m):
+ return asin(1/m)
+
+
+ def mu_in_deg(m):
+ return degrees(mu_in_rad(m))
|
Add mu_in_rad and mu_in_deg, use built in method to convert rad to deg
|
## Code Before:
from math import atan, pi, sqrt
from properties.constants import GAMMA
def nu_in_rad(m):
if m < 1:
raise ValueError('Mach number should be greater than or equal to 1')
a = (GAMMA+1) / (GAMMA-1)
b = m**2 - 1
c = a**-1 * b
return sqrt(a) * atan(sqrt(c)) - atan(sqrt(b))
def nu_in_deg(m):
return nu_in_rad(m) * 180 / pi
## Instruction:
Add mu_in_rad and mu_in_deg, use built in method to convert rad to deg
## Code After:
from __future__ import absolute_import, division
from math import asin, atan, degrees, sqrt
from properties.constants import GAMMA
def nu_in_rad(m):
if m < 1:
raise ValueError('Mach number should be greater than or equal to 1')
a = (GAMMA+1) / (GAMMA-1)
b = m**2 - 1
c = a**-1 * b
return sqrt(a) * atan(sqrt(c)) - atan(sqrt(b))
def nu_in_deg(m):
return degrees(nu_in_rad(m))
def mu_in_rad(m):
return asin(1/m)
def mu_in_deg(m):
return degrees(mu_in_rad(m))
|
+ from __future__ import absolute_import, division
+
- from math import atan, pi, sqrt
? ^^
+ from math import asin, atan, degrees, sqrt
? ++++++ ^^^^^^^
from properties.constants import GAMMA
def nu_in_rad(m):
if m < 1:
raise ValueError('Mach number should be greater than or equal to 1')
a = (GAMMA+1) / (GAMMA-1)
b = m**2 - 1
c = a**-1 * b
return sqrt(a) * atan(sqrt(c)) - atan(sqrt(b))
def nu_in_deg(m):
- return nu_in_rad(m) * 180 / pi
+ return degrees(nu_in_rad(m))
+
+
+ def mu_in_rad(m):
+ return asin(1/m)
+
+
+ def mu_in_deg(m):
+ return degrees(mu_in_rad(m))
|
b0e101f523fd853392e65b1b30204a56e3ec34ec
|
tests/test_twitter.py
|
tests/test_twitter.py
|
import pytest
import tweepy
import vcr
from secrets import TWITTER_ACCESS, TWITTER_SECRET
from secrets import CONSUMER_KEY, CONSUMER_SECRET
class TestTweepyIntegration():
"""Test class to ensure tweepy functionality works as expected"""
# Class level client to use across tests
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(TWITTER_ACCESS, TWITTER_SECRET)
@vcr.use_cassette('fixtures/vcr_cassettes/twitter.yaml')
def test_authd(self):
api = tweepy.API(self.auth)
assert api.verify_credentials() is not False
if __name__ == '__main__':
pytest.main()
|
import pytest
import tweepy
import vcr
from secrets import TWITTER_ACCESS, TWITTER_SECRET
from secrets import TWITTER_CONSUMER_ACCESS, TWITTER_CONSUMER_SECRET
class TestTweepyIntegration():
"""Test class to ensure tweepy functionality works as expected"""
# Class level client to use across tests
auth = tweepy.OAuthHandler(TWITTER_CONSUMER_ACCESS, TWITTER_CONSUMER_SECRET)
auth.set_access_token(TWITTER_ACCESS, TWITTER_SECRET)
@vcr.use_cassette('fixtures/vcr_cassettes/twitter.yaml')
def test_authd(self):
api = tweepy.API(self.auth)
assert api.verify_credentials() is not False
if __name__ == '__main__':
pytest.main()
|
Update access token variable names
|
Update access token variable names
|
Python
|
mit
|
nestauk/inet
|
import pytest
import tweepy
import vcr
from secrets import TWITTER_ACCESS, TWITTER_SECRET
- from secrets import CONSUMER_KEY, CONSUMER_SECRET
+ from secrets import TWITTER_CONSUMER_ACCESS, TWITTER_CONSUMER_SECRET
class TestTweepyIntegration():
"""Test class to ensure tweepy functionality works as expected"""
# Class level client to use across tests
- auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
+ auth = tweepy.OAuthHandler(TWITTER_CONSUMER_ACCESS, TWITTER_CONSUMER_SECRET)
auth.set_access_token(TWITTER_ACCESS, TWITTER_SECRET)
@vcr.use_cassette('fixtures/vcr_cassettes/twitter.yaml')
def test_authd(self):
api = tweepy.API(self.auth)
assert api.verify_credentials() is not False
if __name__ == '__main__':
pytest.main()
|
Update access token variable names
|
## Code Before:
import pytest
import tweepy
import vcr
from secrets import TWITTER_ACCESS, TWITTER_SECRET
from secrets import CONSUMER_KEY, CONSUMER_SECRET
class TestTweepyIntegration():
"""Test class to ensure tweepy functionality works as expected"""
# Class level client to use across tests
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(TWITTER_ACCESS, TWITTER_SECRET)
@vcr.use_cassette('fixtures/vcr_cassettes/twitter.yaml')
def test_authd(self):
api = tweepy.API(self.auth)
assert api.verify_credentials() is not False
if __name__ == '__main__':
pytest.main()
## Instruction:
Update access token variable names
## Code After:
import pytest
import tweepy
import vcr
from secrets import TWITTER_ACCESS, TWITTER_SECRET
from secrets import TWITTER_CONSUMER_ACCESS, TWITTER_CONSUMER_SECRET
class TestTweepyIntegration():
"""Test class to ensure tweepy functionality works as expected"""
# Class level client to use across tests
auth = tweepy.OAuthHandler(TWITTER_CONSUMER_ACCESS, TWITTER_CONSUMER_SECRET)
auth.set_access_token(TWITTER_ACCESS, TWITTER_SECRET)
@vcr.use_cassette('fixtures/vcr_cassettes/twitter.yaml')
def test_authd(self):
api = tweepy.API(self.auth)
assert api.verify_credentials() is not False
if __name__ == '__main__':
pytest.main()
|
import pytest
import tweepy
import vcr
from secrets import TWITTER_ACCESS, TWITTER_SECRET
- from secrets import CONSUMER_KEY, CONSUMER_SECRET
? ^ ^
+ from secrets import TWITTER_CONSUMER_ACCESS, TWITTER_CONSUMER_SECRET
? ++++++++ ^^^ ^^ ++++++++
class TestTweepyIntegration():
"""Test class to ensure tweepy functionality works as expected"""
# Class level client to use across tests
- auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
? ^ ^
+ auth = tweepy.OAuthHandler(TWITTER_CONSUMER_ACCESS, TWITTER_CONSUMER_SECRET)
? ++++++++ ^^^ ^^ ++++++++
auth.set_access_token(TWITTER_ACCESS, TWITTER_SECRET)
@vcr.use_cassette('fixtures/vcr_cassettes/twitter.yaml')
def test_authd(self):
api = tweepy.API(self.auth)
assert api.verify_credentials() is not False
if __name__ == '__main__':
pytest.main()
|
fc472d043e81c2b5687a0f83dbbdd0dd02b73e35
|
flowtype/commands/exec_flow.py
|
flowtype/commands/exec_flow.py
|
import os
import json
import threading
import subprocess
import sublime
class ExecFlowCommand(threading.Thread):
"""Threaded class used for running flow commands in a different thread.
The subprocess must be threaded so we don't lockup the UI.
"""
def __init__(self, cmd, content):
"""Initialize with the command and the file content to send."""
self.cmd = cmd
self.content = content
self.stdout = None
self.returncode = 0
self.stderr = None
threading.Thread.__init__(self)
def run(self):
"""Execute the command in a subprocess."""
read, write = os.pipe()
os.write(write, str.encode(self.content))
os.close(write)
try:
output = subprocess.check_output(
self.cmd,
shell=sublime.platform() == 'windows',
stdin=read,
stderr=subprocess.STDOUT
)
if type(output) is bytes:
output = output.decode('utf-8')
try:
self.stdout = json.loads(output)
except ValueError:
self.stdout = output
os.close(read)
except subprocess.CalledProcessError as err:
self.stderr = str(err)
self.returncode = 1
|
import os
import json
import threading
import subprocess
import sublime
class ExecFlowCommand(threading.Thread):
"""Threaded class used for running flow commands in a different thread.
The subprocess must be threaded so we don't lockup the UI.
"""
def __init__(self, cmd, content):
"""Initialize with the command and the file content to send."""
self.cmd = cmd
self.content = content
self.stdout = None
self.returncode = 0
self.stderr = None
threading.Thread.__init__(self)
def run(self):
"""Execute the command in a subprocess."""
read, write = os.pipe()
os.write(write, str.encode(self.content))
os.close(write)
try:
output = subprocess.check_output(
self.cmd,
shell=sublime.platform() == 'windows',
stdin=read,
stderr=subprocess.STDOUT
)
if type(output) is bytes:
output = output.decode('utf-8')
try:
self.stdout = json.loads(output)
except ValueError:
self.stdout = output
os.close(read)
except subprocess.CalledProcessError as err:
if type(err.output) is bytes:
output = err.output.decode('utf-8')
else:
output = err.output
self.stderr = str(err) + ': ' + str(output)
self.returncode = 1
|
Add error output to exec error messages
|
Add error output to exec error messages
e.g. for an error like "env: ‘node’: No such file or directory"
the sublime console was only reporting "exited with code 127"
which wasn't very helpful in determining the cause.
|
Python
|
mit
|
Pegase745/sublime-flowtype
|
import os
import json
import threading
import subprocess
import sublime
class ExecFlowCommand(threading.Thread):
"""Threaded class used for running flow commands in a different thread.
The subprocess must be threaded so we don't lockup the UI.
"""
def __init__(self, cmd, content):
"""Initialize with the command and the file content to send."""
self.cmd = cmd
self.content = content
self.stdout = None
self.returncode = 0
self.stderr = None
threading.Thread.__init__(self)
def run(self):
"""Execute the command in a subprocess."""
read, write = os.pipe()
os.write(write, str.encode(self.content))
os.close(write)
try:
output = subprocess.check_output(
self.cmd,
shell=sublime.platform() == 'windows',
stdin=read,
stderr=subprocess.STDOUT
)
if type(output) is bytes:
output = output.decode('utf-8')
try:
self.stdout = json.loads(output)
except ValueError:
self.stdout = output
os.close(read)
except subprocess.CalledProcessError as err:
+ if type(err.output) is bytes:
+ output = err.output.decode('utf-8')
+ else:
+ output = err.output
- self.stderr = str(err)
+ self.stderr = str(err) + ': ' + str(output)
self.returncode = 1
|
Add error output to exec error messages
|
## Code Before:
import os
import json
import threading
import subprocess
import sublime
class ExecFlowCommand(threading.Thread):
"""Threaded class used for running flow commands in a different thread.
The subprocess must be threaded so we don't lockup the UI.
"""
def __init__(self, cmd, content):
"""Initialize with the command and the file content to send."""
self.cmd = cmd
self.content = content
self.stdout = None
self.returncode = 0
self.stderr = None
threading.Thread.__init__(self)
def run(self):
"""Execute the command in a subprocess."""
read, write = os.pipe()
os.write(write, str.encode(self.content))
os.close(write)
try:
output = subprocess.check_output(
self.cmd,
shell=sublime.platform() == 'windows',
stdin=read,
stderr=subprocess.STDOUT
)
if type(output) is bytes:
output = output.decode('utf-8')
try:
self.stdout = json.loads(output)
except ValueError:
self.stdout = output
os.close(read)
except subprocess.CalledProcessError as err:
self.stderr = str(err)
self.returncode = 1
## Instruction:
Add error output to exec error messages
## Code After:
import os
import json
import threading
import subprocess
import sublime
class ExecFlowCommand(threading.Thread):
"""Threaded class used for running flow commands in a different thread.
The subprocess must be threaded so we don't lockup the UI.
"""
def __init__(self, cmd, content):
"""Initialize with the command and the file content to send."""
self.cmd = cmd
self.content = content
self.stdout = None
self.returncode = 0
self.stderr = None
threading.Thread.__init__(self)
def run(self):
"""Execute the command in a subprocess."""
read, write = os.pipe()
os.write(write, str.encode(self.content))
os.close(write)
try:
output = subprocess.check_output(
self.cmd,
shell=sublime.platform() == 'windows',
stdin=read,
stderr=subprocess.STDOUT
)
if type(output) is bytes:
output = output.decode('utf-8')
try:
self.stdout = json.loads(output)
except ValueError:
self.stdout = output
os.close(read)
except subprocess.CalledProcessError as err:
if type(err.output) is bytes:
output = err.output.decode('utf-8')
else:
output = err.output
self.stderr = str(err) + ': ' + str(output)
self.returncode = 1
|
import os
import json
import threading
import subprocess
import sublime
class ExecFlowCommand(threading.Thread):
"""Threaded class used for running flow commands in a different thread.
The subprocess must be threaded so we don't lockup the UI.
"""
def __init__(self, cmd, content):
"""Initialize with the command and the file content to send."""
self.cmd = cmd
self.content = content
self.stdout = None
self.returncode = 0
self.stderr = None
threading.Thread.__init__(self)
def run(self):
"""Execute the command in a subprocess."""
read, write = os.pipe()
os.write(write, str.encode(self.content))
os.close(write)
try:
output = subprocess.check_output(
self.cmd,
shell=sublime.platform() == 'windows',
stdin=read,
stderr=subprocess.STDOUT
)
if type(output) is bytes:
output = output.decode('utf-8')
try:
self.stdout = json.loads(output)
except ValueError:
self.stdout = output
os.close(read)
except subprocess.CalledProcessError as err:
+ if type(err.output) is bytes:
+ output = err.output.decode('utf-8')
+ else:
+ output = err.output
- self.stderr = str(err)
+ self.stderr = str(err) + ': ' + str(output)
? +++++++++++++++++++++
self.returncode = 1
|
2f4365d1d8c54f4ced852ffe9824fc530ac14862
|
{{cookiecutter.repo_name}}/tests/test_{{cookiecutter.repo_name}}.py
|
{{cookiecutter.repo_name}}/tests/test_{{cookiecutter.repo_name}}.py
|
import pytest
def test_app_title(app):
"""Simply tests if the default app title meets the expectations.
Args:
app (:class:`{{cookiecutter.app_class_name}}`): Default app instance
Raises:
AssertionError: If the title does not match
"""
assert app.title == '{{cookiecutter.app_title}}'
def test_carousel(app):
"""Test for the carousel widget of the app checking the slides' names.
Args:
app (:class:`{{cookiecutter.app_class_name}}`): Default app instance
Raises:
AssertionError: If the names of the slides do not match the expectations
"""
names = [slide.name for slide in app.carousel.slides]
expected = ['hello', 'kivy', 'cookiecutterdozer', 'license', 'github']
assert names == expected
|
def test_app_title(app):
"""Simply tests if the default app title meets the expectations.
Args:
app (:class:`{{cookiecutter.app_class_name}}`): Default app instance
Raises:
AssertionError: If the title does not match
"""
assert app.title == '{{cookiecutter.app_title}}'
def test_carousel(app):
"""Test for the carousel widget of the app checking the slides' names.
Args:
app (:class:`{{cookiecutter.app_class_name}}`): Default app instance
Raises:
AssertionError: If the names of the slides do not match the expectations
"""
names = [slide.name for slide in app.carousel.slides]
expected = ['hello', 'kivy', 'cookiecutterdozer', 'license', 'github']
assert names == expected
|
Fix flake8 in app test
|
Fix flake8 in app test
|
Python
|
mit
|
hackebrot/cookiedozer,hackebrot/cookiedozer
|
-
- import pytest
def test_app_title(app):
"""Simply tests if the default app title meets the expectations.
Args:
app (:class:`{{cookiecutter.app_class_name}}`): Default app instance
Raises:
AssertionError: If the title does not match
"""
assert app.title == '{{cookiecutter.app_title}}'
def test_carousel(app):
"""Test for the carousel widget of the app checking the slides' names.
Args:
app (:class:`{{cookiecutter.app_class_name}}`): Default app instance
Raises:
AssertionError: If the names of the slides do not match the expectations
"""
names = [slide.name for slide in app.carousel.slides]
expected = ['hello', 'kivy', 'cookiecutterdozer', 'license', 'github']
assert names == expected
|
Fix flake8 in app test
|
## Code Before:
import pytest
def test_app_title(app):
"""Simply tests if the default app title meets the expectations.
Args:
app (:class:`{{cookiecutter.app_class_name}}`): Default app instance
Raises:
AssertionError: If the title does not match
"""
assert app.title == '{{cookiecutter.app_title}}'
def test_carousel(app):
"""Test for the carousel widget of the app checking the slides' names.
Args:
app (:class:`{{cookiecutter.app_class_name}}`): Default app instance
Raises:
AssertionError: If the names of the slides do not match the expectations
"""
names = [slide.name for slide in app.carousel.slides]
expected = ['hello', 'kivy', 'cookiecutterdozer', 'license', 'github']
assert names == expected
## Instruction:
Fix flake8 in app test
## Code After:
def test_app_title(app):
"""Simply tests if the default app title meets the expectations.
Args:
app (:class:`{{cookiecutter.app_class_name}}`): Default app instance
Raises:
AssertionError: If the title does not match
"""
assert app.title == '{{cookiecutter.app_title}}'
def test_carousel(app):
"""Test for the carousel widget of the app checking the slides' names.
Args:
app (:class:`{{cookiecutter.app_class_name}}`): Default app instance
Raises:
AssertionError: If the names of the slides do not match the expectations
"""
names = [slide.name for slide in app.carousel.slides]
expected = ['hello', 'kivy', 'cookiecutterdozer', 'license', 'github']
assert names == expected
|
-
- import pytest
def test_app_title(app):
"""Simply tests if the default app title meets the expectations.
Args:
app (:class:`{{cookiecutter.app_class_name}}`): Default app instance
Raises:
AssertionError: If the title does not match
"""
assert app.title == '{{cookiecutter.app_title}}'
def test_carousel(app):
"""Test for the carousel widget of the app checking the slides' names.
Args:
app (:class:`{{cookiecutter.app_class_name}}`): Default app instance
Raises:
AssertionError: If the names of the slides do not match the expectations
"""
names = [slide.name for slide in app.carousel.slides]
expected = ['hello', 'kivy', 'cookiecutterdozer', 'license', 'github']
assert names == expected
|
e0c9f12463f1e4cc17eefbf8909118604695a23d
|
oscar/apps/search/search_indexes.py
|
oscar/apps/search/search_indexes.py
|
from haystack import indexes
from django.db.models import get_model
class ProductIndex(indexes.SearchIndex, indexes.Indexable):
"""
Base class for products solr index definition. Overide by creating your
own copy of oscar.search_indexes.py
"""
text = indexes.EdgeNgramField(document=True, use_template=True,
template_name='oscar/search/indexes/product/item_text.txt')
title = indexes.EdgeNgramField(model_attr='title', null=True)
upc = indexes.CharField(model_attr="upc", null=True)
date_created = indexes.DateTimeField(model_attr='date_created')
date_updated = indexes.DateTimeField(model_attr='date_updated')
def get_model(self):
return get_model('catalogue', 'Product')
def index_queryset(self):
"""
Used when the entire index for model is updated.
Orders by the most recently updated so that new objects are indexed first
"""
return self.get_model().objects.order_by('-date_updated')
def get_updated_field(self):
"""
Used to specify the field used to determine if an object has been updated
Can be used to filter the query set when updating the index
"""
return 'date_updated'
|
from haystack import indexes
from django.db.models import get_model
class ProductIndex(indexes.SearchIndex, indexes.Indexable):
"""
Base class for products solr index definition. Overide by creating your
own copy of oscar.search_indexes.py
"""
text = indexes.EdgeNgramField(document=True, use_template=True,
template_name='oscar/search/indexes/product/item_text.txt')
title = indexes.EdgeNgramField(model_attr='title', null=True)
upc = indexes.CharField(model_attr="upc", null=True)
date_created = indexes.DateTimeField(model_attr='date_created')
date_updated = indexes.DateTimeField(model_attr='date_updated')
def get_model(self):
return get_model('catalogue', 'Product')
def index_queryset(self, using=None):
"""
Used when the entire index for model is updated.
Orders by the most recently updated so that new objects are indexed first
"""
return self.get_model().objects.order_by('-date_updated')
def get_updated_field(self):
"""
Used to specify the field used to determine if an object has been updated
Can be used to filter the query set when updating the index
"""
return 'date_updated'
|
Fix issue with latest changes in haystack
|
Fix issue with latest changes in haystack
A merge into haystack has resulted in an additional argument for
``index_queryset``, I updated the search index for Oscar's product
to fix the issue.
|
Python
|
bsd-3-clause
|
MatthewWilkes/django-oscar,jmt4/django-oscar,michaelkuty/django-oscar,jlmadurga/django-oscar,rocopartners/django-oscar,sonofatailor/django-oscar,django-oscar/django-oscar,makielab/django-oscar,Bogh/django-oscar,jinnykoo/christmas,WadeYuChen/django-oscar,faratro/django-oscar,eddiep1101/django-oscar,jinnykoo/christmas,itbabu/django-oscar,john-parton/django-oscar,WillisXChen/django-oscar,pasqualguerrero/django-oscar,mexeniz/django-oscar,dongguangming/django-oscar,binarydud/django-oscar,django-oscar/django-oscar,bnprk/django-oscar,kapt/django-oscar,solarissmoke/django-oscar,django-oscar/django-oscar,ka7eh/django-oscar,MatthewWilkes/django-oscar,jinnykoo/wuyisj,itbabu/django-oscar,elliotthill/django-oscar,MatthewWilkes/django-oscar,rocopartners/django-oscar,itbabu/django-oscar,pasqualguerrero/django-oscar,nickpack/django-oscar,bschuon/django-oscar,pasqualguerrero/django-oscar,WadeYuChen/django-oscar,WillisXChen/django-oscar,nickpack/django-oscar,ahmetdaglarbas/e-commerce,binarydud/django-oscar,jinnykoo/wuyisj,ahmetdaglarbas/e-commerce,WadeYuChen/django-oscar,machtfit/django-oscar,anentropic/django-oscar,eddiep1101/django-oscar,michaelkuty/django-oscar,saadatqadri/django-oscar,dongguangming/django-oscar,lijoantony/django-oscar,amirrpp/django-oscar,sasha0/django-oscar,amirrpp/django-oscar,pdonadeo/django-oscar,lijoantony/django-oscar,mexeniz/django-oscar,marcoantoniooliveira/labweb,Jannes123/django-oscar,WillisXChen/django-oscar,kapari/django-oscar,bschuon/django-oscar,makielab/django-oscar,dongguangming/django-oscar,marcoantoniooliveira/labweb,solarissmoke/django-oscar,rocopartners/django-oscar,DrOctogon/unwash_ecom,spartonia/django-oscar,makielab/django-oscar,taedori81/django-oscar,Idematica/django-oscar,marcoantoniooliveira/labweb,spartonia/django-oscar,john-parton/django-oscar,okfish/django-oscar,elliotthill/django-oscar,Idematica/django-oscar,okfish/django-oscar,faratro/django-oscar,john-parton/django-oscar,manevant/django-oscar,makielab/django-oscar,pasqualguerrero/django-oscar,monikasulik/django-oscar,sonofatailor/django-oscar,ka7eh/django-oscar,faratro/django-oscar,josesanch/django-oscar,kapari/django-oscar,WillisXChen/django-oscar,eddiep1101/django-oscar,jlmadurga/django-oscar,nfletton/django-oscar,jinnykoo/wuyisj.com,amirrpp/django-oscar,Jannes123/django-oscar,manevant/django-oscar,rocopartners/django-oscar,josesanch/django-oscar,sonofatailor/django-oscar,nfletton/django-oscar,jinnykoo/wuyisj.com,jmt4/django-oscar,jmt4/django-oscar,elliotthill/django-oscar,sonofatailor/django-oscar,jinnykoo/wuyisj.com,amirrpp/django-oscar,QLGu/django-oscar,solarissmoke/django-oscar,adamend/django-oscar,pdonadeo/django-oscar,sasha0/django-oscar,manevant/django-oscar,monikasulik/django-oscar,thechampanurag/django-oscar,anentropic/django-oscar,lijoantony/django-oscar,marcoantoniooliveira/labweb,anentropic/django-oscar,machtfit/django-oscar,saadatqadri/django-oscar,WillisXChen/django-oscar,adamend/django-oscar,ademuk/django-oscar,machtfit/django-oscar,ademuk/django-oscar,vovanbo/django-oscar,ahmetdaglarbas/e-commerce,ka7eh/django-oscar,nfletton/django-oscar,saadatqadri/django-oscar,michaelkuty/django-oscar,jmt4/django-oscar,jinnykoo/christmas,spartonia/django-oscar,adamend/django-oscar,nfletton/django-oscar,faratro/django-oscar,QLGu/django-oscar,ahmetdaglarbas/e-commerce,WillisXChen/django-oscar,bnprk/django-oscar,sasha0/django-oscar,bnprk/django-oscar,vovanbo/django-oscar,mexeniz/django-oscar,kapari/django-oscar,Jannes123/django-oscar,anentropic/django-oscar,mexeniz/django-oscar,binarydud/django-oscar,adamend/django-oscar,Bogh/django-oscar,kapt/django-oscar,thechampanurag/django-oscar,QLGu/django-oscar,taedori81/django-oscar,ka7eh/django-oscar,Bogh/django-oscar,ademuk/django-oscar,jinnykoo/wuyisj.com,josesanch/django-oscar,bschuon/django-oscar,taedori81/django-oscar,monikasulik/django-oscar,spartonia/django-oscar,pdonadeo/django-oscar,vovanbo/django-oscar,bnprk/django-oscar,okfish/django-oscar,WadeYuChen/django-oscar,monikasulik/django-oscar,okfish/django-oscar,pdonadeo/django-oscar,jinnykoo/wuyisj,sasha0/django-oscar,ademuk/django-oscar,itbabu/django-oscar,solarissmoke/django-oscar,kapari/django-oscar,vovanbo/django-oscar,dongguangming/django-oscar,DrOctogon/unwash_ecom,jinnykoo/wuyisj,nickpack/django-oscar,michaelkuty/django-oscar,QLGu/django-oscar,Idematica/django-oscar,DrOctogon/unwash_ecom,django-oscar/django-oscar,manevant/django-oscar,binarydud/django-oscar,thechampanurag/django-oscar,bschuon/django-oscar,nickpack/django-oscar,john-parton/django-oscar,eddiep1101/django-oscar,MatthewWilkes/django-oscar,saadatqadri/django-oscar,taedori81/django-oscar,kapt/django-oscar,Bogh/django-oscar,thechampanurag/django-oscar,jlmadurga/django-oscar,lijoantony/django-oscar,jlmadurga/django-oscar,Jannes123/django-oscar
|
from haystack import indexes
from django.db.models import get_model
class ProductIndex(indexes.SearchIndex, indexes.Indexable):
"""
Base class for products solr index definition. Overide by creating your
own copy of oscar.search_indexes.py
"""
text = indexes.EdgeNgramField(document=True, use_template=True,
template_name='oscar/search/indexes/product/item_text.txt')
title = indexes.EdgeNgramField(model_attr='title', null=True)
upc = indexes.CharField(model_attr="upc", null=True)
date_created = indexes.DateTimeField(model_attr='date_created')
date_updated = indexes.DateTimeField(model_attr='date_updated')
def get_model(self):
return get_model('catalogue', 'Product')
- def index_queryset(self):
+ def index_queryset(self, using=None):
"""
Used when the entire index for model is updated.
Orders by the most recently updated so that new objects are indexed first
"""
return self.get_model().objects.order_by('-date_updated')
def get_updated_field(self):
"""
Used to specify the field used to determine if an object has been updated
Can be used to filter the query set when updating the index
"""
return 'date_updated'
-
-
|
Fix issue with latest changes in haystack
|
## Code Before:
from haystack import indexes
from django.db.models import get_model
class ProductIndex(indexes.SearchIndex, indexes.Indexable):
"""
Base class for products solr index definition. Overide by creating your
own copy of oscar.search_indexes.py
"""
text = indexes.EdgeNgramField(document=True, use_template=True,
template_name='oscar/search/indexes/product/item_text.txt')
title = indexes.EdgeNgramField(model_attr='title', null=True)
upc = indexes.CharField(model_attr="upc", null=True)
date_created = indexes.DateTimeField(model_attr='date_created')
date_updated = indexes.DateTimeField(model_attr='date_updated')
def get_model(self):
return get_model('catalogue', 'Product')
def index_queryset(self):
"""
Used when the entire index for model is updated.
Orders by the most recently updated so that new objects are indexed first
"""
return self.get_model().objects.order_by('-date_updated')
def get_updated_field(self):
"""
Used to specify the field used to determine if an object has been updated
Can be used to filter the query set when updating the index
"""
return 'date_updated'
## Instruction:
Fix issue with latest changes in haystack
## Code After:
from haystack import indexes
from django.db.models import get_model
class ProductIndex(indexes.SearchIndex, indexes.Indexable):
"""
Base class for products solr index definition. Overide by creating your
own copy of oscar.search_indexes.py
"""
text = indexes.EdgeNgramField(document=True, use_template=True,
template_name='oscar/search/indexes/product/item_text.txt')
title = indexes.EdgeNgramField(model_attr='title', null=True)
upc = indexes.CharField(model_attr="upc", null=True)
date_created = indexes.DateTimeField(model_attr='date_created')
date_updated = indexes.DateTimeField(model_attr='date_updated')
def get_model(self):
return get_model('catalogue', 'Product')
def index_queryset(self, using=None):
"""
Used when the entire index for model is updated.
Orders by the most recently updated so that new objects are indexed first
"""
return self.get_model().objects.order_by('-date_updated')
def get_updated_field(self):
"""
Used to specify the field used to determine if an object has been updated
Can be used to filter the query set when updating the index
"""
return 'date_updated'
|
from haystack import indexes
from django.db.models import get_model
class ProductIndex(indexes.SearchIndex, indexes.Indexable):
"""
Base class for products solr index definition. Overide by creating your
own copy of oscar.search_indexes.py
"""
text = indexes.EdgeNgramField(document=True, use_template=True,
template_name='oscar/search/indexes/product/item_text.txt')
title = indexes.EdgeNgramField(model_attr='title', null=True)
upc = indexes.CharField(model_attr="upc", null=True)
date_created = indexes.DateTimeField(model_attr='date_created')
date_updated = indexes.DateTimeField(model_attr='date_updated')
def get_model(self):
return get_model('catalogue', 'Product')
- def index_queryset(self):
+ def index_queryset(self, using=None):
? ++++++++++++
"""
Used when the entire index for model is updated.
Orders by the most recently updated so that new objects are indexed first
"""
return self.get_model().objects.order_by('-date_updated')
def get_updated_field(self):
"""
Used to specify the field used to determine if an object has been updated
Can be used to filter the query set when updating the index
"""
return 'date_updated'
-
-
|
7ba2299e2d429bd873539507b3edbe3cdd3de9d6
|
linkatos/firebase.py
|
linkatos/firebase.py
|
import pyrebase
def initialise(FB_API_KEY, project_name):
config = {
"apiKey": FB_API_KEY,
"authDomain": "{}.firebaseapp.com".format(project_name),
"databaseURL": "https://{}.firebaseio.com".format(project_name),
"storageBucket": "{}.appspot.com".format(project_name),
}
return pyrebase.initialize_app(config)
def store_url(is_yes, url, FB_USER, FB_PASS, firebase):
# do nothing if it's unnecessary
if not is_yes:
return False
# creates token every time maybe worth doing it once every 30m as they
# expire every hour
auth = firebase.auth()
user = auth.sign_in_with_email_and_password(FB_USER, FB_PASS)
db = firebase.database()
data = {
"url": url
}
db.child("users").push(data, user['idToken'])
return False
|
import pyrebase
def initialise(api_key, project_name):
config = {
"apiKey": api_key,
"authDomain": "{}.firebaseapp.com".format(project_name),
"databaseURL": "https://{}.firebaseio.com".format(project_name),
"storageBucket": "{}.appspot.com".format(project_name),
}
return pyrebase.initialize_app(config)
def store_url(is_yes, url, user, password, firebase):
# do nothing if it's unnecessary
if not is_yes:
return False
# creates token every time maybe worth doing it once every 30m as they
# expire every hour
auth = firebase.auth()
user = auth.sign_in_with_email_and_password(user, password)
db = firebase.database()
data = {
"url": url
}
db.child("users").push(data, user['idToken'])
return False
|
Change variables to lower case
|
style: Change variables to lower case
|
Python
|
mit
|
iwi/linkatos,iwi/linkatos
|
import pyrebase
- def initialise(FB_API_KEY, project_name):
+ def initialise(api_key, project_name):
config = {
- "apiKey": FB_API_KEY,
+ "apiKey": api_key,
"authDomain": "{}.firebaseapp.com".format(project_name),
"databaseURL": "https://{}.firebaseio.com".format(project_name),
"storageBucket": "{}.appspot.com".format(project_name),
}
return pyrebase.initialize_app(config)
- def store_url(is_yes, url, FB_USER, FB_PASS, firebase):
+ def store_url(is_yes, url, user, password, firebase):
# do nothing if it's unnecessary
if not is_yes:
return False
# creates token every time maybe worth doing it once every 30m as they
# expire every hour
auth = firebase.auth()
- user = auth.sign_in_with_email_and_password(FB_USER, FB_PASS)
+ user = auth.sign_in_with_email_and_password(user, password)
db = firebase.database()
data = {
"url": url
}
db.child("users").push(data, user['idToken'])
return False
|
Change variables to lower case
|
## Code Before:
import pyrebase
def initialise(FB_API_KEY, project_name):
config = {
"apiKey": FB_API_KEY,
"authDomain": "{}.firebaseapp.com".format(project_name),
"databaseURL": "https://{}.firebaseio.com".format(project_name),
"storageBucket": "{}.appspot.com".format(project_name),
}
return pyrebase.initialize_app(config)
def store_url(is_yes, url, FB_USER, FB_PASS, firebase):
# do nothing if it's unnecessary
if not is_yes:
return False
# creates token every time maybe worth doing it once every 30m as they
# expire every hour
auth = firebase.auth()
user = auth.sign_in_with_email_and_password(FB_USER, FB_PASS)
db = firebase.database()
data = {
"url": url
}
db.child("users").push(data, user['idToken'])
return False
## Instruction:
Change variables to lower case
## Code After:
import pyrebase
def initialise(api_key, project_name):
config = {
"apiKey": api_key,
"authDomain": "{}.firebaseapp.com".format(project_name),
"databaseURL": "https://{}.firebaseio.com".format(project_name),
"storageBucket": "{}.appspot.com".format(project_name),
}
return pyrebase.initialize_app(config)
def store_url(is_yes, url, user, password, firebase):
# do nothing if it's unnecessary
if not is_yes:
return False
# creates token every time maybe worth doing it once every 30m as they
# expire every hour
auth = firebase.auth()
user = auth.sign_in_with_email_and_password(user, password)
db = firebase.database()
data = {
"url": url
}
db.child("users").push(data, user['idToken'])
return False
|
import pyrebase
- def initialise(FB_API_KEY, project_name):
? ^^ ^^^^^^^
+ def initialise(api_key, project_name):
? ^^^ ^^^
config = {
- "apiKey": FB_API_KEY,
+ "apiKey": api_key,
"authDomain": "{}.firebaseapp.com".format(project_name),
"databaseURL": "https://{}.firebaseio.com".format(project_name),
"storageBucket": "{}.appspot.com".format(project_name),
}
return pyrebase.initialize_app(config)
- def store_url(is_yes, url, FB_USER, FB_PASS, firebase):
? ^^^^^^^ ^^^^^^^
+ def store_url(is_yes, url, user, password, firebase):
? ^^^^ ^^^^^^^^
# do nothing if it's unnecessary
if not is_yes:
return False
# creates token every time maybe worth doing it once every 30m as they
# expire every hour
auth = firebase.auth()
- user = auth.sign_in_with_email_and_password(FB_USER, FB_PASS)
? ^^^^^^^ ^^^^^^^
+ user = auth.sign_in_with_email_and_password(user, password)
? ^^^^ ^^^^^^^^
db = firebase.database()
data = {
"url": url
}
db.child("users").push(data, user['idToken'])
return False
|
b4806b4650f576c7b5cd7f33742ccb108e37321c
|
StartWithPython/StartWithPython/Theory/Loops/Range.py
|
StartWithPython/StartWithPython/Theory/Loops/Range.py
|
print('\n\t\tRANGE\n')
for x in range(10): # to make an action ('n') times
print("Pippo") # ...
print('')
for x in range(5, 12): # second example --> (from 'x' to(,) 'y')
print(x) # ...
print('')
for x in range(10, 40, 5): # third example --> (from 'x' to(,) 'y' in steps of(,) 'z')
print(x) # ...
print('')
|
print('\n\t\tRANGE\n')
for x in range(10): # to make an action ('n') times
print("Pippo") # ...
print('') # ...
for x in range(5, 12): # second example --> (from 'x' to(,) 'y')
print(x) # ...
print('') # ...
for x in range(10, 40, 5): # third example --> (from 'x' to(,) 'y' in steps of(,) 'z')
print(x) # ...
print('') # ...
print(list(range(4))) # create new list from 0 to 3
print(list(range(-6,7,2))) # -6 to +6 by 2
print([[x ** 2, x ** 3] for x in range(4)]) # some more complicated example
print([[x, x / 2, x*2] for x in range(-6, 7, 2) if x > 0]) # ...
|
Add some range application with list
|
Add some range application with list
|
Python
|
mit
|
CaptainMich/Python_Project
|
print('\n\t\tRANGE\n')
- for x in range(10): # to make an action ('n') times
+ for x in range(10): # to make an action ('n') times
- print("Pippo") # ...
+ print("Pippo") # ...
+ print('') # ...
- print('')
+ for x in range(5, 12): # second example --> (from 'x' to(,) 'y')
+ print(x) # ...
+ print('') # ...
- for x in range(5, 12): # second example --> (from 'x' to(,) 'y')
+ for x in range(10, 40, 5): # third example --> (from 'x' to(,) 'y' in steps of(,) 'z')
- print(x) # ...
+ print(x) # ...
+ print('') # ...
- print('')
+ print(list(range(4))) # create new list from 0 to 3
+ print(list(range(-6,7,2))) # -6 to +6 by 2
+
+ print([[x ** 2, x ** 3] for x in range(4)]) # some more complicated example
+ print([[x, x / 2, x*2] for x in range(-6, 7, 2) if x > 0]) # ...
- for x in range(10, 40, 5): # third example --> (from 'x' to(,) 'y' in steps of(,) 'z')
- print(x) # ...
-
- print('')
|
Add some range application with list
|
## Code Before:
print('\n\t\tRANGE\n')
for x in range(10): # to make an action ('n') times
print("Pippo") # ...
print('')
for x in range(5, 12): # second example --> (from 'x' to(,) 'y')
print(x) # ...
print('')
for x in range(10, 40, 5): # third example --> (from 'x' to(,) 'y' in steps of(,) 'z')
print(x) # ...
print('')
## Instruction:
Add some range application with list
## Code After:
print('\n\t\tRANGE\n')
for x in range(10): # to make an action ('n') times
print("Pippo") # ...
print('') # ...
for x in range(5, 12): # second example --> (from 'x' to(,) 'y')
print(x) # ...
print('') # ...
for x in range(10, 40, 5): # third example --> (from 'x' to(,) 'y' in steps of(,) 'z')
print(x) # ...
print('') # ...
print(list(range(4))) # create new list from 0 to 3
print(list(range(-6,7,2))) # -6 to +6 by 2
print([[x ** 2, x ** 3] for x in range(4)]) # some more complicated example
print([[x, x / 2, x*2] for x in range(-6, 7, 2) if x > 0]) # ...
|
print('\n\t\tRANGE\n')
- for x in range(10): # to make an action ('n') times
+ for x in range(10): # to make an action ('n') times
? ++++++++++++++++
- print("Pippo") # ...
+ print("Pippo") # ...
? ++++++++++++++++
+ print('') # ...
- print('')
+ for x in range(5, 12): # second example --> (from 'x' to(,) 'y')
+ print(x) # ...
+ print('') # ...
- for x in range(5, 12): # second example --> (from 'x' to(,) 'y')
? ^ ^^ ^^^^^
+ for x in range(10, 40, 5): # third example --> (from 'x' to(,) 'y' in steps of(,) 'z')
? ^^ ^^^^^ ++++++++++++ ^^^^ ++++++++++++++ +++++
- print(x) # ...
+ print(x) # ...
? ++++++++++++++++
+ print('') # ...
+ print(list(range(4))) # create new list from 0 to 3
+ print(list(range(-6,7,2))) # -6 to +6 by 2
+
+ print([[x ** 2, x ** 3] for x in range(4)]) # some more complicated example
+ print([[x, x / 2, x*2] for x in range(-6, 7, 2) if x > 0]) # ...
- print('')
-
- for x in range(10, 40, 5): # third example --> (from 'x' to(,) 'y' in steps of(,) 'z')
- print(x) # ...
-
- print('')
|
2fedb73b2c83fc7bb1b354d8b1ebd8dfe8497995
|
dataportal/tests/test_examples.py
|
dataportal/tests/test_examples.py
|
import unittest
from ..examples.sample_data import (temperature_ramp, multisource_event,
image_and_scalar)
from metadatastore.api import Document
class CommonSampleDataTests(object):
def setUp(self):
pass
def test_basic_usage(self):
events = self.example.run()
# check expected types
self.assertTrue(isinstance(events, list))
self.assertTrue(isinstance(events[0], Document))
class TestTemperatureRamp(CommonSampleDataTests, unittest.TestCase):
def setUp(self):
self.example = temperature_ramp
class TestMultisourceEvent(CommonSampleDataTests, unittest.TestCase):
def setUp(self):
self.example = multisource_event
class TestImageAndScalar(CommonSampleDataTests, unittest.TestCase):
def setUp(self):
self.example = image_and_scalar
|
from nose.tools import assert_true
from ..examples.sample_data import (temperature_ramp, multisource_event,
image_and_scalar)
from metadatastore.api import Document
def run_example(example):
events = example.run()
assert_true(isinstance(events, list))
assert_true(isinstance(events[0], Document))
def test_examples():
for example in [temperature_ramp, multisource_event, image_and_scalar]:
yield run_example, example
|
Use generator test for examples.
|
REF: Use generator test for examples.
|
Python
|
bsd-3-clause
|
ericdill/datamuxer,danielballan/datamuxer,NSLS-II/dataportal,tacaswell/dataportal,danielballan/dataportal,ericdill/databroker,NSLS-II/datamuxer,danielballan/datamuxer,NSLS-II/dataportal,danielballan/dataportal,ericdill/databroker,tacaswell/dataportal,ericdill/datamuxer
|
- import unittest
+ from nose.tools import assert_true
from ..examples.sample_data import (temperature_ramp, multisource_event,
image_and_scalar)
from metadatastore.api import Document
- class CommonSampleDataTests(object):
- def setUp(self):
- pass
+ def run_example(example):
+ events = example.run()
+ assert_true(isinstance(events, list))
+ assert_true(isinstance(events[0], Document))
- def test_basic_usage(self):
- events = self.example.run()
+ def test_examples():
+ for example in [temperature_ramp, multisource_event, image_and_scalar]:
+ yield run_example, example
- # check expected types
- self.assertTrue(isinstance(events, list))
- self.assertTrue(isinstance(events[0], Document))
-
-
- class TestTemperatureRamp(CommonSampleDataTests, unittest.TestCase):
-
- def setUp(self):
- self.example = temperature_ramp
-
-
- class TestMultisourceEvent(CommonSampleDataTests, unittest.TestCase):
-
- def setUp(self):
- self.example = multisource_event
-
-
- class TestImageAndScalar(CommonSampleDataTests, unittest.TestCase):
-
- def setUp(self):
- self.example = image_and_scalar
-
|
Use generator test for examples.
|
## Code Before:
import unittest
from ..examples.sample_data import (temperature_ramp, multisource_event,
image_and_scalar)
from metadatastore.api import Document
class CommonSampleDataTests(object):
def setUp(self):
pass
def test_basic_usage(self):
events = self.example.run()
# check expected types
self.assertTrue(isinstance(events, list))
self.assertTrue(isinstance(events[0], Document))
class TestTemperatureRamp(CommonSampleDataTests, unittest.TestCase):
def setUp(self):
self.example = temperature_ramp
class TestMultisourceEvent(CommonSampleDataTests, unittest.TestCase):
def setUp(self):
self.example = multisource_event
class TestImageAndScalar(CommonSampleDataTests, unittest.TestCase):
def setUp(self):
self.example = image_and_scalar
## Instruction:
Use generator test for examples.
## Code After:
from nose.tools import assert_true
from ..examples.sample_data import (temperature_ramp, multisource_event,
image_and_scalar)
from metadatastore.api import Document
def run_example(example):
events = example.run()
assert_true(isinstance(events, list))
assert_true(isinstance(events[0], Document))
def test_examples():
for example in [temperature_ramp, multisource_event, image_and_scalar]:
yield run_example, example
|
- import unittest
+ from nose.tools import assert_true
from ..examples.sample_data import (temperature_ramp, multisource_event,
image_and_scalar)
from metadatastore.api import Document
- class CommonSampleDataTests(object):
- def setUp(self):
- pass
+ def run_example(example):
+ events = example.run()
+ assert_true(isinstance(events, list))
+ assert_true(isinstance(events[0], Document))
+ def test_examples():
+ for example in [temperature_ramp, multisource_event, image_and_scalar]:
+ yield run_example, example
- def test_basic_usage(self):
- events = self.example.run()
-
- # check expected types
- self.assertTrue(isinstance(events, list))
- self.assertTrue(isinstance(events[0], Document))
-
-
- class TestTemperatureRamp(CommonSampleDataTests, unittest.TestCase):
-
- def setUp(self):
- self.example = temperature_ramp
-
-
- class TestMultisourceEvent(CommonSampleDataTests, unittest.TestCase):
-
- def setUp(self):
- self.example = multisource_event
-
-
- class TestImageAndScalar(CommonSampleDataTests, unittest.TestCase):
-
- def setUp(self):
- self.example = image_and_scalar
|
3a72b9164fc31e4e7f29715729160a48a7ce2f84
|
source/tyr/migrations/versions/266658781c00_instances_nullable_in_equipments_provider.py
|
source/tyr/migrations/versions/266658781c00_instances_nullable_in_equipments_provider.py
|
# revision identifiers, used by Alembic.
revision = '266658781c00'
down_revision = '204aae05372a'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def upgrade():
op.alter_column('equipments_provider', 'instances', existing_type=postgresql.ARRAY(sa.TEXT()), nullable=True)
def downgrade():
op.alter_column(
'equipments_provider', 'instances', existing_type=postgresql.ARRAY(sa.TEXT()), nullable=False
)
|
# revision identifiers, used by Alembic.
revision = '266658781c00'
down_revision = '204aae05372a'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def upgrade():
op.alter_column('equipments_provider', 'instances', existing_type=postgresql.ARRAY(sa.TEXT()), nullable=True)
def downgrade():
op.execute("UPDATE equipments_provider SET instances = '{null_instance}';")
op.alter_column(
'equipments_provider', 'instances', existing_type=postgresql.ARRAY(sa.TEXT()), nullable=False
)
|
Add required default value before downgrade migration
|
Add required default value before downgrade migration
|
Python
|
agpl-3.0
|
xlqian/navitia,kinnou02/navitia,xlqian/navitia,Tisseo/navitia,kinnou02/navitia,xlqian/navitia,xlqian/navitia,Tisseo/navitia,Tisseo/navitia,CanalTP/navitia,kinnou02/navitia,Tisseo/navitia,CanalTP/navitia,CanalTP/navitia,xlqian/navitia,CanalTP/navitia,CanalTP/navitia,Tisseo/navitia,kinnou02/navitia
|
# revision identifiers, used by Alembic.
revision = '266658781c00'
down_revision = '204aae05372a'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def upgrade():
op.alter_column('equipments_provider', 'instances', existing_type=postgresql.ARRAY(sa.TEXT()), nullable=True)
def downgrade():
+ op.execute("UPDATE equipments_provider SET instances = '{null_instance}';")
op.alter_column(
'equipments_provider', 'instances', existing_type=postgresql.ARRAY(sa.TEXT()), nullable=False
)
|
Add required default value before downgrade migration
|
## Code Before:
# revision identifiers, used by Alembic.
revision = '266658781c00'
down_revision = '204aae05372a'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def upgrade():
op.alter_column('equipments_provider', 'instances', existing_type=postgresql.ARRAY(sa.TEXT()), nullable=True)
def downgrade():
op.alter_column(
'equipments_provider', 'instances', existing_type=postgresql.ARRAY(sa.TEXT()), nullable=False
)
## Instruction:
Add required default value before downgrade migration
## Code After:
# revision identifiers, used by Alembic.
revision = '266658781c00'
down_revision = '204aae05372a'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def upgrade():
op.alter_column('equipments_provider', 'instances', existing_type=postgresql.ARRAY(sa.TEXT()), nullable=True)
def downgrade():
op.execute("UPDATE equipments_provider SET instances = '{null_instance}';")
op.alter_column(
'equipments_provider', 'instances', existing_type=postgresql.ARRAY(sa.TEXT()), nullable=False
)
|
# revision identifiers, used by Alembic.
revision = '266658781c00'
down_revision = '204aae05372a'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def upgrade():
op.alter_column('equipments_provider', 'instances', existing_type=postgresql.ARRAY(sa.TEXT()), nullable=True)
def downgrade():
+ op.execute("UPDATE equipments_provider SET instances = '{null_instance}';")
op.alter_column(
'equipments_provider', 'instances', existing_type=postgresql.ARRAY(sa.TEXT()), nullable=False
)
|
339f5c6d7cc5b3a70fa71fd423c0a4226acc77e7
|
valor/schema.py
|
valor/schema.py
|
import json
class Schema(dict):
"""
Lightweight encapsulation of a JSON Schema.
"""
@classmethod
def from_file(cls, path_or_stream):
"""
Create a schema from a file name or stream.
"""
if hasattr(path_or_stream, 'read'):
return cls(json.load(path_or_stream))
else:
with open(path_or_stream) as fp:
return cls(json.load(fp))
def resolve_ref(self, ref):
return Reference(ref).resolve(self)
class Reference(object):
def __init__(self, ref):
if not ref.startswith('#'):
raise ValueError("non-fragment references are not supported (got: %s)" % ref)
self.ref = ref
def resolve(self, schema):
# Very overly simplisitic - doesn't handle array indexes, etc. However,
# works with Heroku's schema, so good enough for a prototype.
node = schema
for bit in self.ref.split('/')[1:]:
node = node[bit]
return node
|
import json
import jsonpointer
class Schema(dict):
"""
Lightweight encapsulation of a JSON Schema.
"""
@classmethod
def from_file(cls, path_or_stream):
"""
Create a schema from a file name or stream.
"""
if hasattr(path_or_stream, 'read'):
return cls(json.load(path_or_stream))
else:
with open(path_or_stream) as fp:
return cls(json.load(fp))
def resolve_ref(self, ref):
if not ref.startswith('#'):
raise ValueError("non-fragment references are not supported (got: %s)" % ref)
return jsonpointer.resolve_pointer(self, ref.lstrip('#'))
|
Use jsonpointer instead of my own terrible Reference class.
|
Use jsonpointer instead of my own terrible Reference class.
|
Python
|
bsd-3-clause
|
jacobian/valor
|
import json
+ import jsonpointer
class Schema(dict):
"""
Lightweight encapsulation of a JSON Schema.
"""
@classmethod
def from_file(cls, path_or_stream):
"""
Create a schema from a file name or stream.
"""
if hasattr(path_or_stream, 'read'):
return cls(json.load(path_or_stream))
else:
with open(path_or_stream) as fp:
return cls(json.load(fp))
def resolve_ref(self, ref):
- return Reference(ref).resolve(self)
-
- class Reference(object):
- def __init__(self, ref):
if not ref.startswith('#'):
raise ValueError("non-fragment references are not supported (got: %s)" % ref)
- self.ref = ref
+ return jsonpointer.resolve_pointer(self, ref.lstrip('#'))
- def resolve(self, schema):
- # Very overly simplisitic - doesn't handle array indexes, etc. However,
- # works with Heroku's schema, so good enough for a prototype.
- node = schema
- for bit in self.ref.split('/')[1:]:
- node = node[bit]
- return node
-
|
Use jsonpointer instead of my own terrible Reference class.
|
## Code Before:
import json
class Schema(dict):
"""
Lightweight encapsulation of a JSON Schema.
"""
@classmethod
def from_file(cls, path_or_stream):
"""
Create a schema from a file name or stream.
"""
if hasattr(path_or_stream, 'read'):
return cls(json.load(path_or_stream))
else:
with open(path_or_stream) as fp:
return cls(json.load(fp))
def resolve_ref(self, ref):
return Reference(ref).resolve(self)
class Reference(object):
def __init__(self, ref):
if not ref.startswith('#'):
raise ValueError("non-fragment references are not supported (got: %s)" % ref)
self.ref = ref
def resolve(self, schema):
# Very overly simplisitic - doesn't handle array indexes, etc. However,
# works with Heroku's schema, so good enough for a prototype.
node = schema
for bit in self.ref.split('/')[1:]:
node = node[bit]
return node
## Instruction:
Use jsonpointer instead of my own terrible Reference class.
## Code After:
import json
import jsonpointer
class Schema(dict):
"""
Lightweight encapsulation of a JSON Schema.
"""
@classmethod
def from_file(cls, path_or_stream):
"""
Create a schema from a file name or stream.
"""
if hasattr(path_or_stream, 'read'):
return cls(json.load(path_or_stream))
else:
with open(path_or_stream) as fp:
return cls(json.load(fp))
def resolve_ref(self, ref):
if not ref.startswith('#'):
raise ValueError("non-fragment references are not supported (got: %s)" % ref)
return jsonpointer.resolve_pointer(self, ref.lstrip('#'))
|
import json
+ import jsonpointer
class Schema(dict):
"""
Lightweight encapsulation of a JSON Schema.
"""
@classmethod
def from_file(cls, path_or_stream):
"""
Create a schema from a file name or stream.
"""
if hasattr(path_or_stream, 'read'):
return cls(json.load(path_or_stream))
else:
with open(path_or_stream) as fp:
return cls(json.load(fp))
def resolve_ref(self, ref):
- return Reference(ref).resolve(self)
-
- class Reference(object):
- def __init__(self, ref):
if not ref.startswith('#'):
raise ValueError("non-fragment references are not supported (got: %s)" % ref)
+ return jsonpointer.resolve_pointer(self, ref.lstrip('#'))
- self.ref = ref
-
- def resolve(self, schema):
- # Very overly simplisitic - doesn't handle array indexes, etc. However,
- # works with Heroku's schema, so good enough for a prototype.
- node = schema
- for bit in self.ref.split('/')[1:]:
- node = node[bit]
- return node
|
090a11c08839eae78e0ca6ec963b66ac3876ba35
|
circuits/web/events.py
|
circuits/web/events.py
|
from circuits import Event
class WebEvent(Event):
channels = ("web",)
class Request(WebEvent):
"""Request(WebEvent) -> Request WebEvent
args: request, response
"""
success = True
failure = True
class Response(WebEvent):
"""Response(WebEvent) -> Response WebEvent
args: request, response
"""
success = True
failure = True
class Stream(WebEvent):
"""Stream(WebEvent) -> Stream WebEvent
args: request, response
"""
success = True
failure = True
|
from circuits import Event
class WebEvent(Event):
channels = ("web",)
success = True
failure = True
class Request(WebEvent):
"""Request(WebEvent) -> Request WebEvent
args: request, response
"""
class Response(WebEvent):
"""Response(WebEvent) -> Response WebEvent
args: request, response
"""
class Stream(WebEvent):
"""Stream(WebEvent) -> Stream WebEvent
args: request, response
"""
|
Move success/failure properties into WebEvent base class.
|
Move success/failure properties into WebEvent base class.
|
Python
|
mit
|
nizox/circuits,eriol/circuits,eriol/circuits,treemo/circuits,eriol/circuits,treemo/circuits,treemo/circuits
|
from circuits import Event
+
class WebEvent(Event):
channels = ("web",)
+
+ success = True
+ failure = True
class Request(WebEvent):
"""Request(WebEvent) -> Request WebEvent
args: request, response
"""
- success = True
- failure = True
-
class Response(WebEvent):
"""Response(WebEvent) -> Response WebEvent
args: request, response
"""
-
- success = True
- failure = True
class Stream(WebEvent):
"""Stream(WebEvent) -> Stream WebEvent
args: request, response
"""
- success = True
- failure = True
-
|
Move success/failure properties into WebEvent base class.
|
## Code Before:
from circuits import Event
class WebEvent(Event):
channels = ("web",)
class Request(WebEvent):
"""Request(WebEvent) -> Request WebEvent
args: request, response
"""
success = True
failure = True
class Response(WebEvent):
"""Response(WebEvent) -> Response WebEvent
args: request, response
"""
success = True
failure = True
class Stream(WebEvent):
"""Stream(WebEvent) -> Stream WebEvent
args: request, response
"""
success = True
failure = True
## Instruction:
Move success/failure properties into WebEvent base class.
## Code After:
from circuits import Event
class WebEvent(Event):
channels = ("web",)
success = True
failure = True
class Request(WebEvent):
"""Request(WebEvent) -> Request WebEvent
args: request, response
"""
class Response(WebEvent):
"""Response(WebEvent) -> Response WebEvent
args: request, response
"""
class Stream(WebEvent):
"""Stream(WebEvent) -> Stream WebEvent
args: request, response
"""
|
from circuits import Event
+
class WebEvent(Event):
channels = ("web",)
+
+ success = True
+ failure = True
class Request(WebEvent):
"""Request(WebEvent) -> Request WebEvent
args: request, response
"""
- success = True
- failure = True
-
class Response(WebEvent):
"""Response(WebEvent) -> Response WebEvent
args: request, response
"""
- success = True
- failure = True
-
class Stream(WebEvent):
"""Stream(WebEvent) -> Stream WebEvent
args: request, response
"""
-
- success = True
- failure = True
|
af010c5e924a779a37495905efc32aecdfd358ea
|
whalelinter/commands/common.py
|
whalelinter/commands/common.py
|
import re
from whalelinter.app import App
from whalelinter.dispatcher import Dispatcher
from whalelinter.commands.command import ShellCommand
from whalelinter.commands.apt import Apt
@Dispatcher.register(token='run', command='cd')
class Cd(ShellCommand):
def __init__(self, **kwargs):
App._collecter.throw(2002, self.line)
return False
@Dispatcher.register(token='run', command='rm')
class Rm(ShellCommand):
def __init__(self, **kwargs):
rf_flags_regex = re.compile("(-.*[rRf].+-?[rRf]|-[rR]f|-f[rR])")
rf_flags = True if [i for i in kwargs.get('args') if rf_flags_regex.search(i)] else False
cache_path_regex = re.compile("/var/lib/apt/lists(\/\*?)?")
cache_path = True if [i for i in kwargs.get('args') if cache_path_regex.search(i)] else False
if rf_flags and cache_path:
if (int(Apt._has_been_used) < int(kwargs.get('lineno'))):
Apt._has_been_used = 0
|
import re
from whalelinter.app import App
from whalelinter.dispatcher import Dispatcher
from whalelinter.commands.command import ShellCommand
from whalelinter.commands.apt import Apt
@Dispatcher.register(token='run', command='cd')
class Cd(ShellCommand):
def __init__(self, **kwargs):
App._collecter.throw(2002, kwargs.get('lineno'))
@Dispatcher.register(token='run', command='rm')
class Rm(ShellCommand):
def __init__(self, **kwargs):
rf_flags_regex = re.compile("(-.*[rRf].+-?[rRf]|-[rR]f|-f[rR])")
rf_flags = True if [i for i in kwargs.get('args') if rf_flags_regex.search(i)] else False
cache_path_regex = re.compile("/var/lib/apt/lists(\/\*?)?")
cache_path = True if [i for i in kwargs.get('args') if cache_path_regex.search(i)] else False
if rf_flags and cache_path:
if (int(Apt._has_been_used) < int(kwargs.get('lineno'))):
Apt._has_been_used = 0
|
Fix line addressing issue on 'cd' command
|
Fix line addressing issue on 'cd' command
|
Python
|
mit
|
jeromepin/whale-linter
|
import re
from whalelinter.app import App
from whalelinter.dispatcher import Dispatcher
from whalelinter.commands.command import ShellCommand
from whalelinter.commands.apt import Apt
@Dispatcher.register(token='run', command='cd')
class Cd(ShellCommand):
def __init__(self, **kwargs):
- App._collecter.throw(2002, self.line)
+ App._collecter.throw(2002, kwargs.get('lineno'))
- return False
@Dispatcher.register(token='run', command='rm')
class Rm(ShellCommand):
def __init__(self, **kwargs):
rf_flags_regex = re.compile("(-.*[rRf].+-?[rRf]|-[rR]f|-f[rR])")
rf_flags = True if [i for i in kwargs.get('args') if rf_flags_regex.search(i)] else False
cache_path_regex = re.compile("/var/lib/apt/lists(\/\*?)?")
cache_path = True if [i for i in kwargs.get('args') if cache_path_regex.search(i)] else False
if rf_flags and cache_path:
if (int(Apt._has_been_used) < int(kwargs.get('lineno'))):
Apt._has_been_used = 0
|
Fix line addressing issue on 'cd' command
|
## Code Before:
import re
from whalelinter.app import App
from whalelinter.dispatcher import Dispatcher
from whalelinter.commands.command import ShellCommand
from whalelinter.commands.apt import Apt
@Dispatcher.register(token='run', command='cd')
class Cd(ShellCommand):
def __init__(self, **kwargs):
App._collecter.throw(2002, self.line)
return False
@Dispatcher.register(token='run', command='rm')
class Rm(ShellCommand):
def __init__(self, **kwargs):
rf_flags_regex = re.compile("(-.*[rRf].+-?[rRf]|-[rR]f|-f[rR])")
rf_flags = True if [i for i in kwargs.get('args') if rf_flags_regex.search(i)] else False
cache_path_regex = re.compile("/var/lib/apt/lists(\/\*?)?")
cache_path = True if [i for i in kwargs.get('args') if cache_path_regex.search(i)] else False
if rf_flags and cache_path:
if (int(Apt._has_been_used) < int(kwargs.get('lineno'))):
Apt._has_been_used = 0
## Instruction:
Fix line addressing issue on 'cd' command
## Code After:
import re
from whalelinter.app import App
from whalelinter.dispatcher import Dispatcher
from whalelinter.commands.command import ShellCommand
from whalelinter.commands.apt import Apt
@Dispatcher.register(token='run', command='cd')
class Cd(ShellCommand):
def __init__(self, **kwargs):
App._collecter.throw(2002, kwargs.get('lineno'))
@Dispatcher.register(token='run', command='rm')
class Rm(ShellCommand):
def __init__(self, **kwargs):
rf_flags_regex = re.compile("(-.*[rRf].+-?[rRf]|-[rR]f|-f[rR])")
rf_flags = True if [i for i in kwargs.get('args') if rf_flags_regex.search(i)] else False
cache_path_regex = re.compile("/var/lib/apt/lists(\/\*?)?")
cache_path = True if [i for i in kwargs.get('args') if cache_path_regex.search(i)] else False
if rf_flags and cache_path:
if (int(Apt._has_been_used) < int(kwargs.get('lineno'))):
Apt._has_been_used = 0
|
import re
from whalelinter.app import App
from whalelinter.dispatcher import Dispatcher
from whalelinter.commands.command import ShellCommand
from whalelinter.commands.apt import Apt
@Dispatcher.register(token='run', command='cd')
class Cd(ShellCommand):
def __init__(self, **kwargs):
- App._collecter.throw(2002, self.line)
? ^^^
+ App._collecter.throw(2002, kwargs.get('lineno'))
? +++++ ++ ^^^ +++ +
- return False
@Dispatcher.register(token='run', command='rm')
class Rm(ShellCommand):
def __init__(self, **kwargs):
rf_flags_regex = re.compile("(-.*[rRf].+-?[rRf]|-[rR]f|-f[rR])")
rf_flags = True if [i for i in kwargs.get('args') if rf_flags_regex.search(i)] else False
cache_path_regex = re.compile("/var/lib/apt/lists(\/\*?)?")
cache_path = True if [i for i in kwargs.get('args') if cache_path_regex.search(i)] else False
if rf_flags and cache_path:
if (int(Apt._has_been_used) < int(kwargs.get('lineno'))):
Apt._has_been_used = 0
|
1d4777f810388ee87cceb01c2b53367723fb3a71
|
PyFBA/cmd/__init__.py
|
PyFBA/cmd/__init__.py
|
from .citation import cite_me_please
from .fluxes import measure_fluxes
from .gapfill_from_roles import gapfill_from_roles
from .assigned_functions_to_reactions import to_reactions
from .fba_from_reactions import run_the_fba
from .gapfill_from_reactions_multiple_conditions import gapfill_multiple_media
from .media import list_media
# Don't forget to add the imports here so that you can import *
__all__ = [
'cite_me_please', 'measure_fluxes', 'gapfill_from_roles', 'to_reactions', 'run_the_fba', 'gapfill_multiple_media',
'list_media'
]
|
from .citation import cite_me_please
from .fluxes import measure_fluxes
from .gapfill_from_roles import gapfill_from_roles
from .assigned_functions_to_reactions import to_reactions
from .fba_from_reactions import run_the_fba
from .gapfill_from_reactions_multiple_conditions import gapfill_multiple_media
from .media import list_media
from .reactions_to_roles import convert_reactions_to_roles
# Don't forget to add the imports here so that you can import *
__all__ = [
'cite_me_please', 'measure_fluxes', 'gapfill_from_roles', 'to_reactions', 'run_the_fba', 'gapfill_multiple_media',
'list_media', 'convert_reactions_to_roles'
]
|
Add a function to retrieve roles from reactions
|
Add a function to retrieve roles from reactions
|
Python
|
mit
|
linsalrob/PyFBA
|
from .citation import cite_me_please
from .fluxes import measure_fluxes
from .gapfill_from_roles import gapfill_from_roles
from .assigned_functions_to_reactions import to_reactions
from .fba_from_reactions import run_the_fba
from .gapfill_from_reactions_multiple_conditions import gapfill_multiple_media
from .media import list_media
+ from .reactions_to_roles import convert_reactions_to_roles
# Don't forget to add the imports here so that you can import *
__all__ = [
'cite_me_please', 'measure_fluxes', 'gapfill_from_roles', 'to_reactions', 'run_the_fba', 'gapfill_multiple_media',
- 'list_media'
+ 'list_media', 'convert_reactions_to_roles'
]
|
Add a function to retrieve roles from reactions
|
## Code Before:
from .citation import cite_me_please
from .fluxes import measure_fluxes
from .gapfill_from_roles import gapfill_from_roles
from .assigned_functions_to_reactions import to_reactions
from .fba_from_reactions import run_the_fba
from .gapfill_from_reactions_multiple_conditions import gapfill_multiple_media
from .media import list_media
# Don't forget to add the imports here so that you can import *
__all__ = [
'cite_me_please', 'measure_fluxes', 'gapfill_from_roles', 'to_reactions', 'run_the_fba', 'gapfill_multiple_media',
'list_media'
]
## Instruction:
Add a function to retrieve roles from reactions
## Code After:
from .citation import cite_me_please
from .fluxes import measure_fluxes
from .gapfill_from_roles import gapfill_from_roles
from .assigned_functions_to_reactions import to_reactions
from .fba_from_reactions import run_the_fba
from .gapfill_from_reactions_multiple_conditions import gapfill_multiple_media
from .media import list_media
from .reactions_to_roles import convert_reactions_to_roles
# Don't forget to add the imports here so that you can import *
__all__ = [
'cite_me_please', 'measure_fluxes', 'gapfill_from_roles', 'to_reactions', 'run_the_fba', 'gapfill_multiple_media',
'list_media', 'convert_reactions_to_roles'
]
|
from .citation import cite_me_please
from .fluxes import measure_fluxes
from .gapfill_from_roles import gapfill_from_roles
from .assigned_functions_to_reactions import to_reactions
from .fba_from_reactions import run_the_fba
from .gapfill_from_reactions_multiple_conditions import gapfill_multiple_media
from .media import list_media
+ from .reactions_to_roles import convert_reactions_to_roles
# Don't forget to add the imports here so that you can import *
__all__ = [
'cite_me_please', 'measure_fluxes', 'gapfill_from_roles', 'to_reactions', 'run_the_fba', 'gapfill_multiple_media',
- 'list_media'
+ 'list_media', 'convert_reactions_to_roles'
]
|
4c240f17571b5e63805a2632e5e8a6c1d3695d54
|
examples/00-load/create-tri-surface.py
|
examples/00-load/create-tri-surface.py
|
# sphinx_gallery_thumbnail_number = 2
import vtki
import numpy as np
################################################################################
# First, create some points for the surface.
# Define a simple Gaussian surface
xx, yy = np.meshgrid(np.linspace(-200,200,20), np.linspace(-200,200,20))
A, b = 100, 100
zz = A*np.exp(-0.5*((xx/b)**2. + (yy/b)**2.))
# Get the points as a 2D NumPy array (N by 3)
points = np.c_[xx.reshape(-1), yy.reshape(-1), zz.reshape(-1)]
print(points[0:5, :])
################################################################################
# Now use those points to create a point cloud ``vtki`` data object. This will
# be encompassed in a :class:`vtki.PolyData` object.
# simply pass the numpy points to the PolyData constructor
cloud = vtki.PolyData(points)
cloud.plot()
################################################################################
# Now that we have a ``vtki`` data structure of the points, we can perform a
# triangulation to turn those boring discrete points into a connected surface.
surf = cloud.delaunay_2d()
surf.plot(show_edges=True)
|
# sphinx_gallery_thumbnail_number = 2
import vtki
import numpy as np
################################################################################
# First, create some points for the surface.
# Define a simple Gaussian surface
xx, yy = np.meshgrid(np.linspace(-200,200,20), np.linspace(-200,200,20))
A, b = 100, 100
zz = A*np.exp(-0.5*((xx/b)**2. + (yy/b)**2.))
# Get the points as a 2D NumPy array (N by 3)
points = np.c_[xx.reshape(-1), yy.reshape(-1), zz.reshape(-1)]
print(points[0:5, :])
################################################################################
# Now use those points to create a point cloud ``vtki`` data object. This will
# be encompassed in a :class:`vtki.PolyData` object.
# simply pass the numpy points to the PolyData constructor
cloud = vtki.PolyData(points)
vtki.set_plot_theme('doc')
cloud.plot(point_size=15, use_panel=False)
################################################################################
# Now that we have a ``vtki`` data structure of the points, we can perform a
# triangulation to turn those boring discrete points into a connected surface.
surf = cloud.delaunay_2d()
surf.plot(show_edges=True)
|
Increase point size in example
|
Increase point size in example
|
Python
|
mit
|
akaszynski/vtkInterface
|
# sphinx_gallery_thumbnail_number = 2
import vtki
import numpy as np
################################################################################
# First, create some points for the surface.
# Define a simple Gaussian surface
xx, yy = np.meshgrid(np.linspace(-200,200,20), np.linspace(-200,200,20))
A, b = 100, 100
zz = A*np.exp(-0.5*((xx/b)**2. + (yy/b)**2.))
# Get the points as a 2D NumPy array (N by 3)
points = np.c_[xx.reshape(-1), yy.reshape(-1), zz.reshape(-1)]
print(points[0:5, :])
################################################################################
# Now use those points to create a point cloud ``vtki`` data object. This will
# be encompassed in a :class:`vtki.PolyData` object.
# simply pass the numpy points to the PolyData constructor
cloud = vtki.PolyData(points)
- cloud.plot()
+ vtki.set_plot_theme('doc')
+ cloud.plot(point_size=15, use_panel=False)
################################################################################
# Now that we have a ``vtki`` data structure of the points, we can perform a
# triangulation to turn those boring discrete points into a connected surface.
surf = cloud.delaunay_2d()
surf.plot(show_edges=True)
|
Increase point size in example
|
## Code Before:
# sphinx_gallery_thumbnail_number = 2
import vtki
import numpy as np
################################################################################
# First, create some points for the surface.
# Define a simple Gaussian surface
xx, yy = np.meshgrid(np.linspace(-200,200,20), np.linspace(-200,200,20))
A, b = 100, 100
zz = A*np.exp(-0.5*((xx/b)**2. + (yy/b)**2.))
# Get the points as a 2D NumPy array (N by 3)
points = np.c_[xx.reshape(-1), yy.reshape(-1), zz.reshape(-1)]
print(points[0:5, :])
################################################################################
# Now use those points to create a point cloud ``vtki`` data object. This will
# be encompassed in a :class:`vtki.PolyData` object.
# simply pass the numpy points to the PolyData constructor
cloud = vtki.PolyData(points)
cloud.plot()
################################################################################
# Now that we have a ``vtki`` data structure of the points, we can perform a
# triangulation to turn those boring discrete points into a connected surface.
surf = cloud.delaunay_2d()
surf.plot(show_edges=True)
## Instruction:
Increase point size in example
## Code After:
# sphinx_gallery_thumbnail_number = 2
import vtki
import numpy as np
################################################################################
# First, create some points for the surface.
# Define a simple Gaussian surface
xx, yy = np.meshgrid(np.linspace(-200,200,20), np.linspace(-200,200,20))
A, b = 100, 100
zz = A*np.exp(-0.5*((xx/b)**2. + (yy/b)**2.))
# Get the points as a 2D NumPy array (N by 3)
points = np.c_[xx.reshape(-1), yy.reshape(-1), zz.reshape(-1)]
print(points[0:5, :])
################################################################################
# Now use those points to create a point cloud ``vtki`` data object. This will
# be encompassed in a :class:`vtki.PolyData` object.
# simply pass the numpy points to the PolyData constructor
cloud = vtki.PolyData(points)
vtki.set_plot_theme('doc')
cloud.plot(point_size=15, use_panel=False)
################################################################################
# Now that we have a ``vtki`` data structure of the points, we can perform a
# triangulation to turn those boring discrete points into a connected surface.
surf = cloud.delaunay_2d()
surf.plot(show_edges=True)
|
# sphinx_gallery_thumbnail_number = 2
import vtki
import numpy as np
################################################################################
# First, create some points for the surface.
# Define a simple Gaussian surface
xx, yy = np.meshgrid(np.linspace(-200,200,20), np.linspace(-200,200,20))
A, b = 100, 100
zz = A*np.exp(-0.5*((xx/b)**2. + (yy/b)**2.))
# Get the points as a 2D NumPy array (N by 3)
points = np.c_[xx.reshape(-1), yy.reshape(-1), zz.reshape(-1)]
print(points[0:5, :])
################################################################################
# Now use those points to create a point cloud ``vtki`` data object. This will
# be encompassed in a :class:`vtki.PolyData` object.
# simply pass the numpy points to the PolyData constructor
cloud = vtki.PolyData(points)
- cloud.plot()
+ vtki.set_plot_theme('doc')
+ cloud.plot(point_size=15, use_panel=False)
################################################################################
# Now that we have a ``vtki`` data structure of the points, we can perform a
# triangulation to turn those boring discrete points into a connected surface.
surf = cloud.delaunay_2d()
surf.plot(show_edges=True)
|
b1f173fdbfb60e26a3923c7b024bc3e65e5abf80
|
selvbetjening/scheckin/now/urls.py
|
selvbetjening/scheckin/now/urls.py
|
from django.conf.urls import *
import views
urlpatterns = patterns('',
url(r'^(?P<event_id>[0-9]+)/$', views.checkin),
)
|
from django.conf.urls import *
from django.conf import settings
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext as _
from selvbetjening.sadmin.base.nav import RemoteSPage
import views
urlpatterns = patterns('',
url(r'^(?P<event_id>[0-9]+)/$', views.checkin, name='now_checkin'),
)
if 'selvbetjening.sadmin.events' in settings.INSTALLED_APPS:
import selvbetjening.sadmin.base.sadmin
now_url = lambda context, stack: reverse('now_checkin', kwargs={'event_id': stack[-1].pk})
now_page = RemoteSPage(_(u'Now Check-in'), now_url)
selvbetjening.sadmin.base.sadmin.site.get('events').attendee_admin.sadmin_action_menu.register(now_page)
|
Add links to easy check-in in sadmin
|
Add links to easy check-in in sadmin
|
Python
|
mit
|
animekita/selvbetjening,animekita/selvbetjening,animekita/selvbetjening,animekita/selvbetjening
|
from django.conf.urls import *
+ from django.conf import settings
+ from django.core.urlresolvers import reverse
+ from django.utils.translation import ugettext as _
+
+ from selvbetjening.sadmin.base.nav import RemoteSPage
import views
urlpatterns = patterns('',
- url(r'^(?P<event_id>[0-9]+)/$', views.checkin),
+ url(r'^(?P<event_id>[0-9]+)/$', views.checkin, name='now_checkin'),
)
+ if 'selvbetjening.sadmin.events' in settings.INSTALLED_APPS:
+ import selvbetjening.sadmin.base.sadmin
+
+ now_url = lambda context, stack: reverse('now_checkin', kwargs={'event_id': stack[-1].pk})
+ now_page = RemoteSPage(_(u'Now Check-in'), now_url)
+
+ selvbetjening.sadmin.base.sadmin.site.get('events').attendee_admin.sadmin_action_menu.register(now_page)
|
Add links to easy check-in in sadmin
|
## Code Before:
from django.conf.urls import *
import views
urlpatterns = patterns('',
url(r'^(?P<event_id>[0-9]+)/$', views.checkin),
)
## Instruction:
Add links to easy check-in in sadmin
## Code After:
from django.conf.urls import *
from django.conf import settings
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext as _
from selvbetjening.sadmin.base.nav import RemoteSPage
import views
urlpatterns = patterns('',
url(r'^(?P<event_id>[0-9]+)/$', views.checkin, name='now_checkin'),
)
if 'selvbetjening.sadmin.events' in settings.INSTALLED_APPS:
import selvbetjening.sadmin.base.sadmin
now_url = lambda context, stack: reverse('now_checkin', kwargs={'event_id': stack[-1].pk})
now_page = RemoteSPage(_(u'Now Check-in'), now_url)
selvbetjening.sadmin.base.sadmin.site.get('events').attendee_admin.sadmin_action_menu.register(now_page)
|
from django.conf.urls import *
+ from django.conf import settings
+ from django.core.urlresolvers import reverse
+ from django.utils.translation import ugettext as _
+
+ from selvbetjening.sadmin.base.nav import RemoteSPage
import views
urlpatterns = patterns('',
- url(r'^(?P<event_id>[0-9]+)/$', views.checkin),
+ url(r'^(?P<event_id>[0-9]+)/$', views.checkin, name='now_checkin'),
? ++++++++++++++++++++
)
+
+ if 'selvbetjening.sadmin.events' in settings.INSTALLED_APPS:
+ import selvbetjening.sadmin.base.sadmin
+
+ now_url = lambda context, stack: reverse('now_checkin', kwargs={'event_id': stack[-1].pk})
+ now_page = RemoteSPage(_(u'Now Check-in'), now_url)
+
+ selvbetjening.sadmin.base.sadmin.site.get('events').attendee_admin.sadmin_action_menu.register(now_page)
|
04cd17bb03f2b15cf37313cb3261dd37902d82b0
|
run_coveralls.py
|
run_coveralls.py
|
import os
from subprocess import call
if __name__ == '__main__':
if 'TRAVIS' in os.environ:
rc = call('coveralls')
raise SystemExit(rc)
|
import os
from subprocess import call
if __name__ == '__main__':
if 'TRAVIS' in os.environ:
print("Calling coveralls")
rc = call('coveralls')
raise SystemExit(rc)
|
Add a check that coveralls is actually called
|
Add a check that coveralls is actually called
|
Python
|
mit
|
browniebroke/deezer-python,browniebroke/deezer-python,pfouque/deezer-python,browniebroke/deezer-python
|
import os
from subprocess import call
if __name__ == '__main__':
if 'TRAVIS' in os.environ:
+ print("Calling coveralls")
rc = call('coveralls')
raise SystemExit(rc)
|
Add a check that coveralls is actually called
|
## Code Before:
import os
from subprocess import call
if __name__ == '__main__':
if 'TRAVIS' in os.environ:
rc = call('coveralls')
raise SystemExit(rc)
## Instruction:
Add a check that coveralls is actually called
## Code After:
import os
from subprocess import call
if __name__ == '__main__':
if 'TRAVIS' in os.environ:
print("Calling coveralls")
rc = call('coveralls')
raise SystemExit(rc)
|
import os
from subprocess import call
if __name__ == '__main__':
if 'TRAVIS' in os.environ:
+ print("Calling coveralls")
rc = call('coveralls')
raise SystemExit(rc)
|
8ddbd0b39687f46637041848ab7190bcefd57b68
|
pyramid_mongodb/__init__.py
|
pyramid_mongodb/__init__.py
|
import pymongo
from gridfs import GridFS
def initialize_mongo_db( config, settings ):
if ( 'mongodb.use' in settings ) and ( settings['mongodb.use'] == 'true' ):
conn = pymongo.Connection( settings['mongodb.uri'] )
config.registry.settings['!mongodb.conn'] = conn
config.add_subscriber(add_mongo_db, 'pyramid.events.NewRequest')
def add_mongo_db(event):
settings = event.request.registry.settings
db = settings['!mongodb.conn'][settings['mongodb.name']]
event.request.mongodb = db
event.request.gridfs = GridFS(db)
|
import pymongo
from gridfs import GridFS
def initialize_mongo_db(config, settings):
if ('mongodb.use' in settings) and (settings['mongodb.use'] == 'true'):
conn = pymongo.Connection(settings['mongodb.uri'])
config.registry.settings['!mongodb.conn'] = conn
config.set_request_property(add_mongo_db, 'mongodb', reify=True)
def add_mongo_db(request):
settings = request.registry.settings
db = settings['!mongodb.conn'][settings['mongodb.name']]
request.mongodb = db
request.gridfs = GridFS(db)
|
Use set_request_property instead of subscriber to improve performance
|
Use set_request_property instead of subscriber to improve performance
|
Python
|
mit
|
niallo/pyramid_mongodb
|
import pymongo
from gridfs import GridFS
- def initialize_mongo_db( config, settings ):
+ def initialize_mongo_db(config, settings):
- if ( 'mongodb.use' in settings ) and ( settings['mongodb.use'] == 'true' ):
+ if ('mongodb.use' in settings) and (settings['mongodb.use'] == 'true'):
- conn = pymongo.Connection( settings['mongodb.uri'] )
+ conn = pymongo.Connection(settings['mongodb.uri'])
config.registry.settings['!mongodb.conn'] = conn
- config.add_subscriber(add_mongo_db, 'pyramid.events.NewRequest')
+ config.set_request_property(add_mongo_db, 'mongodb', reify=True)
- def add_mongo_db(event):
+ def add_mongo_db(request):
- settings = event.request.registry.settings
+ settings = request.registry.settings
db = settings['!mongodb.conn'][settings['mongodb.name']]
- event.request.mongodb = db
+ request.mongodb = db
- event.request.gridfs = GridFS(db)
+ request.gridfs = GridFS(db)
|
Use set_request_property instead of subscriber to improve performance
|
## Code Before:
import pymongo
from gridfs import GridFS
def initialize_mongo_db( config, settings ):
if ( 'mongodb.use' in settings ) and ( settings['mongodb.use'] == 'true' ):
conn = pymongo.Connection( settings['mongodb.uri'] )
config.registry.settings['!mongodb.conn'] = conn
config.add_subscriber(add_mongo_db, 'pyramid.events.NewRequest')
def add_mongo_db(event):
settings = event.request.registry.settings
db = settings['!mongodb.conn'][settings['mongodb.name']]
event.request.mongodb = db
event.request.gridfs = GridFS(db)
## Instruction:
Use set_request_property instead of subscriber to improve performance
## Code After:
import pymongo
from gridfs import GridFS
def initialize_mongo_db(config, settings):
if ('mongodb.use' in settings) and (settings['mongodb.use'] == 'true'):
conn = pymongo.Connection(settings['mongodb.uri'])
config.registry.settings['!mongodb.conn'] = conn
config.set_request_property(add_mongo_db, 'mongodb', reify=True)
def add_mongo_db(request):
settings = request.registry.settings
db = settings['!mongodb.conn'][settings['mongodb.name']]
request.mongodb = db
request.gridfs = GridFS(db)
|
import pymongo
from gridfs import GridFS
- def initialize_mongo_db( config, settings ):
? - -
+ def initialize_mongo_db(config, settings):
- if ( 'mongodb.use' in settings ) and ( settings['mongodb.use'] == 'true' ):
? - - - -
+ if ('mongodb.use' in settings) and (settings['mongodb.use'] == 'true'):
- conn = pymongo.Connection( settings['mongodb.uri'] )
? - -
+ conn = pymongo.Connection(settings['mongodb.uri'])
config.registry.settings['!mongodb.conn'] = conn
- config.add_subscriber(add_mongo_db, 'pyramid.events.NewRequest')
+ config.set_request_property(add_mongo_db, 'mongodb', reify=True)
- def add_mongo_db(event):
? ^ ^
+ def add_mongo_db(request):
? + ^^ ^
- settings = event.request.registry.settings
? ------
+ settings = request.registry.settings
db = settings['!mongodb.conn'][settings['mongodb.name']]
- event.request.mongodb = db
? ------
+ request.mongodb = db
- event.request.gridfs = GridFS(db)
? ------
+ request.gridfs = GridFS(db)
|
a35b6e46bd9d443f07391f37f5e0e384e37608bb
|
nbgrader/tests/test_nbgrader_feedback.py
|
nbgrader/tests/test_nbgrader_feedback.py
|
from .base import TestBase
from nbgrader.api import Gradebook
import os
class TestNbgraderFeedback(TestBase):
def _setup_db(self):
dbpath = self._init_db()
gb = Gradebook(dbpath)
gb.add_assignment("Problem Set 1")
gb.add_student("foo")
gb.add_student("bar")
return dbpath
def test_help(self):
"""Does the help display without error?"""
with self._temp_cwd():
self._run_command("nbgrader feedback --help-all")
def test_single_file(self):
"""Can feedback be generated for an unchanged assignment?"""
with self._temp_cwd(["files/submitted-unchanged.ipynb"]):
dbpath = self._setup_db()
self._run_command(
'nbgrader autograde submitted-unchanged.ipynb '
'--db="{}" '
'--assignment="Problem Set 1" '
'--AssignmentExporter.notebook_id=teacher '
'--student=foo'.format(dbpath))
self._run_command(
'nbgrader feedback submitted-unchanged.nbconvert.ipynb '
'--db="{}" '
'--assignment="Problem Set 1" '
'--AssignmentExporter.notebook_id=teacher '
'--student=foo'.format(dbpath))
assert os.path.exists('submitted-unchanged.nbconvert.nbconvert.html')
|
from .base import TestBase
from nbgrader.api import Gradebook
import os
import shutil
class TestNbgraderFeedback(TestBase):
def _setup_db(self):
dbpath = self._init_db()
gb = Gradebook(dbpath)
gb.add_assignment("ps1")
gb.add_student("foo")
return dbpath
def test_help(self):
"""Does the help display without error?"""
with self._temp_cwd():
self._run_command("nbgrader feedback --help-all")
def test_single_file(self):
"""Can feedback be generated for an unchanged assignment?"""
with self._temp_cwd(["files/submitted-unchanged.ipynb"]):
dbpath = self._setup_db()
os.makedirs('source/ps1')
shutil.copy('submitted-unchanged.ipynb', 'source/ps1/p1.ipynb')
self._run_command('nbgrader assign ps1 --db="{}" '.format(dbpath))
os.makedirs('submitted/foo/ps1')
shutil.move('submitted-unchanged.ipynb', 'submitted/foo/ps1/p1.ipynb')
self._run_command('nbgrader autograde ps1 --db="{}" '.format(dbpath))
self._run_command('nbgrader feedback ps1 --db="{}" '.format(dbpath))
assert os.path.exists('feedback/foo/ps1/p1.html')
|
Update tests for nbgrader feedback
|
Update tests for nbgrader feedback
|
Python
|
bsd-3-clause
|
jhamrick/nbgrader,alope107/nbgrader,ellisonbg/nbgrader,EdwardJKim/nbgrader,modulexcite/nbgrader,EdwardJKim/nbgrader,ellisonbg/nbgrader,jdfreder/nbgrader,MatKallada/nbgrader,jupyter/nbgrader,MatKallada/nbgrader,jupyter/nbgrader,dementrock/nbgrader,jhamrick/nbgrader,EdwardJKim/nbgrader,jupyter/nbgrader,jupyter/nbgrader,modulexcite/nbgrader,jhamrick/nbgrader,alope107/nbgrader,jdfreder/nbgrader,EdwardJKim/nbgrader,jupyter/nbgrader,dementrock/nbgrader,ellisonbg/nbgrader,jhamrick/nbgrader,ellisonbg/nbgrader
|
from .base import TestBase
from nbgrader.api import Gradebook
import os
+ import shutil
class TestNbgraderFeedback(TestBase):
def _setup_db(self):
dbpath = self._init_db()
gb = Gradebook(dbpath)
- gb.add_assignment("Problem Set 1")
+ gb.add_assignment("ps1")
gb.add_student("foo")
- gb.add_student("bar")
return dbpath
def test_help(self):
"""Does the help display without error?"""
with self._temp_cwd():
self._run_command("nbgrader feedback --help-all")
def test_single_file(self):
"""Can feedback be generated for an unchanged assignment?"""
with self._temp_cwd(["files/submitted-unchanged.ipynb"]):
dbpath = self._setup_db()
- self._run_command(
- 'nbgrader autograde submitted-unchanged.ipynb '
- '--db="{}" '
- '--assignment="Problem Set 1" '
- '--AssignmentExporter.notebook_id=teacher '
- '--student=foo'.format(dbpath))
+ os.makedirs('source/ps1')
+ shutil.copy('submitted-unchanged.ipynb', 'source/ps1/p1.ipynb')
+ self._run_command('nbgrader assign ps1 --db="{}" '.format(dbpath))
- self._run_command(
- 'nbgrader feedback submitted-unchanged.nbconvert.ipynb '
- '--db="{}" '
- '--assignment="Problem Set 1" '
- '--AssignmentExporter.notebook_id=teacher '
- '--student=foo'.format(dbpath))
- assert os.path.exists('submitted-unchanged.nbconvert.nbconvert.html')
+ os.makedirs('submitted/foo/ps1')
+ shutil.move('submitted-unchanged.ipynb', 'submitted/foo/ps1/p1.ipynb')
+ self._run_command('nbgrader autograde ps1 --db="{}" '.format(dbpath))
+ self._run_command('nbgrader feedback ps1 --db="{}" '.format(dbpath))
+ assert os.path.exists('feedback/foo/ps1/p1.html')
+
|
Update tests for nbgrader feedback
|
## Code Before:
from .base import TestBase
from nbgrader.api import Gradebook
import os
class TestNbgraderFeedback(TestBase):
def _setup_db(self):
dbpath = self._init_db()
gb = Gradebook(dbpath)
gb.add_assignment("Problem Set 1")
gb.add_student("foo")
gb.add_student("bar")
return dbpath
def test_help(self):
"""Does the help display without error?"""
with self._temp_cwd():
self._run_command("nbgrader feedback --help-all")
def test_single_file(self):
"""Can feedback be generated for an unchanged assignment?"""
with self._temp_cwd(["files/submitted-unchanged.ipynb"]):
dbpath = self._setup_db()
self._run_command(
'nbgrader autograde submitted-unchanged.ipynb '
'--db="{}" '
'--assignment="Problem Set 1" '
'--AssignmentExporter.notebook_id=teacher '
'--student=foo'.format(dbpath))
self._run_command(
'nbgrader feedback submitted-unchanged.nbconvert.ipynb '
'--db="{}" '
'--assignment="Problem Set 1" '
'--AssignmentExporter.notebook_id=teacher '
'--student=foo'.format(dbpath))
assert os.path.exists('submitted-unchanged.nbconvert.nbconvert.html')
## Instruction:
Update tests for nbgrader feedback
## Code After:
from .base import TestBase
from nbgrader.api import Gradebook
import os
import shutil
class TestNbgraderFeedback(TestBase):
def _setup_db(self):
dbpath = self._init_db()
gb = Gradebook(dbpath)
gb.add_assignment("ps1")
gb.add_student("foo")
return dbpath
def test_help(self):
"""Does the help display without error?"""
with self._temp_cwd():
self._run_command("nbgrader feedback --help-all")
def test_single_file(self):
"""Can feedback be generated for an unchanged assignment?"""
with self._temp_cwd(["files/submitted-unchanged.ipynb"]):
dbpath = self._setup_db()
os.makedirs('source/ps1')
shutil.copy('submitted-unchanged.ipynb', 'source/ps1/p1.ipynb')
self._run_command('nbgrader assign ps1 --db="{}" '.format(dbpath))
os.makedirs('submitted/foo/ps1')
shutil.move('submitted-unchanged.ipynb', 'submitted/foo/ps1/p1.ipynb')
self._run_command('nbgrader autograde ps1 --db="{}" '.format(dbpath))
self._run_command('nbgrader feedback ps1 --db="{}" '.format(dbpath))
assert os.path.exists('feedback/foo/ps1/p1.html')
|
from .base import TestBase
from nbgrader.api import Gradebook
import os
+ import shutil
class TestNbgraderFeedback(TestBase):
def _setup_db(self):
dbpath = self._init_db()
gb = Gradebook(dbpath)
- gb.add_assignment("Problem Set 1")
? ^^^^^^^^^^^^
+ gb.add_assignment("ps1")
? ^^
gb.add_student("foo")
- gb.add_student("bar")
return dbpath
def test_help(self):
"""Does the help display without error?"""
with self._temp_cwd():
self._run_command("nbgrader feedback --help-all")
def test_single_file(self):
"""Can feedback be generated for an unchanged assignment?"""
with self._temp_cwd(["files/submitted-unchanged.ipynb"]):
dbpath = self._setup_db()
- self._run_command(
- 'nbgrader autograde submitted-unchanged.ipynb '
- '--db="{}" '
- '--assignment="Problem Set 1" '
- '--AssignmentExporter.notebook_id=teacher '
- '--student=foo'.format(dbpath))
+ os.makedirs('source/ps1')
+ shutil.copy('submitted-unchanged.ipynb', 'source/ps1/p1.ipynb')
+ self._run_command('nbgrader assign ps1 --db="{}" '.format(dbpath))
- self._run_command(
- 'nbgrader feedback submitted-unchanged.nbconvert.ipynb '
- '--db="{}" '
- '--assignment="Problem Set 1" '
- '--AssignmentExporter.notebook_id=teacher '
- '--student=foo'.format(dbpath))
- assert os.path.exists('submitted-unchanged.nbconvert.nbconvert.html')
+ os.makedirs('submitted/foo/ps1')
+ shutil.move('submitted-unchanged.ipynb', 'submitted/foo/ps1/p1.ipynb')
+ self._run_command('nbgrader autograde ps1 --db="{}" '.format(dbpath))
+ self._run_command('nbgrader feedback ps1 --db="{}" '.format(dbpath))
+
+ assert os.path.exists('feedback/foo/ps1/p1.html')
|
19fd2795e1cd909bb969a4c4e514d8cb1fd884f5
|
plugins/XmlMaterialProfile/__init__.py
|
plugins/XmlMaterialProfile/__init__.py
|
from . import XmlMaterialProfile
from UM.MimeTypeDatabase import MimeType, MimeTypeDatabase
from UM.i18n import i18nCatalog
catalog = i18nCatalog("cura")
def getMetaData():
return {
"plugin": {
"name": catalog.i18nc("@label", "Material Profiles"),
"author": "Ultimaker",
"version": "1.0",
"description": catalog.i18nc("@info:whatsthis", "Provides capabilities to read and write XML-based material profiles."),
"api": 3
},
"settings_container": {
"mimetype": "application/x-ultimaker-material-profile"
}
}
def register(app):
mime_type = MimeType(
name = "application/x-ultimaker-material-profile",
comment = "Ultimaker Material Profile",
suffixes = [ "xml.fdm_material" ]
)
MimeTypeDatabase.addMimeType(mime_type)
return { "settings_container": XmlMaterialProfile.XmlMaterialProfile("default_xml_material_profile") }
|
from . import XmlMaterialProfile
from UM.MimeTypeDatabase import MimeType, MimeTypeDatabase
from UM.i18n import i18nCatalog
catalog = i18nCatalog("cura")
def getMetaData():
return {
"plugin": {
"name": catalog.i18nc("@label", "Material Profiles"),
"author": "Ultimaker",
"version": "1.0",
"description": catalog.i18nc("@info:whatsthis", "Provides capabilities to read and write XML-based material profiles."),
"api": 3
},
"settings_container": {
"type": "material",
"mimetype": "application/x-ultimaker-material-profile"
}
}
def register(app):
mime_type = MimeType(
name = "application/x-ultimaker-material-profile",
comment = "Ultimaker Material Profile",
suffixes = [ "xml.fdm_material" ]
)
MimeTypeDatabase.addMimeType(mime_type)
return { "settings_container": XmlMaterialProfile.XmlMaterialProfile("default_xml_material_profile") }
|
Mark XmlMaterialProfile as type "material" so the import/export code can find it
|
Mark XmlMaterialProfile as type "material" so the import/export code can find it
Contributes to CURA-341
|
Python
|
agpl-3.0
|
senttech/Cura,fieldOfView/Cura,totalretribution/Cura,hmflash/Cura,Curahelper/Cura,totalretribution/Cura,fieldOfView/Cura,hmflash/Cura,ynotstartups/Wanhao,ynotstartups/Wanhao,senttech/Cura,Curahelper/Cura
|
from . import XmlMaterialProfile
from UM.MimeTypeDatabase import MimeType, MimeTypeDatabase
from UM.i18n import i18nCatalog
catalog = i18nCatalog("cura")
def getMetaData():
return {
"plugin": {
"name": catalog.i18nc("@label", "Material Profiles"),
"author": "Ultimaker",
"version": "1.0",
"description": catalog.i18nc("@info:whatsthis", "Provides capabilities to read and write XML-based material profiles."),
"api": 3
},
"settings_container": {
+ "type": "material",
"mimetype": "application/x-ultimaker-material-profile"
}
}
def register(app):
mime_type = MimeType(
name = "application/x-ultimaker-material-profile",
comment = "Ultimaker Material Profile",
suffixes = [ "xml.fdm_material" ]
)
MimeTypeDatabase.addMimeType(mime_type)
return { "settings_container": XmlMaterialProfile.XmlMaterialProfile("default_xml_material_profile") }
|
Mark XmlMaterialProfile as type "material" so the import/export code can find it
|
## Code Before:
from . import XmlMaterialProfile
from UM.MimeTypeDatabase import MimeType, MimeTypeDatabase
from UM.i18n import i18nCatalog
catalog = i18nCatalog("cura")
def getMetaData():
return {
"plugin": {
"name": catalog.i18nc("@label", "Material Profiles"),
"author": "Ultimaker",
"version": "1.0",
"description": catalog.i18nc("@info:whatsthis", "Provides capabilities to read and write XML-based material profiles."),
"api": 3
},
"settings_container": {
"mimetype": "application/x-ultimaker-material-profile"
}
}
def register(app):
mime_type = MimeType(
name = "application/x-ultimaker-material-profile",
comment = "Ultimaker Material Profile",
suffixes = [ "xml.fdm_material" ]
)
MimeTypeDatabase.addMimeType(mime_type)
return { "settings_container": XmlMaterialProfile.XmlMaterialProfile("default_xml_material_profile") }
## Instruction:
Mark XmlMaterialProfile as type "material" so the import/export code can find it
## Code After:
from . import XmlMaterialProfile
from UM.MimeTypeDatabase import MimeType, MimeTypeDatabase
from UM.i18n import i18nCatalog
catalog = i18nCatalog("cura")
def getMetaData():
return {
"plugin": {
"name": catalog.i18nc("@label", "Material Profiles"),
"author": "Ultimaker",
"version": "1.0",
"description": catalog.i18nc("@info:whatsthis", "Provides capabilities to read and write XML-based material profiles."),
"api": 3
},
"settings_container": {
"type": "material",
"mimetype": "application/x-ultimaker-material-profile"
}
}
def register(app):
mime_type = MimeType(
name = "application/x-ultimaker-material-profile",
comment = "Ultimaker Material Profile",
suffixes = [ "xml.fdm_material" ]
)
MimeTypeDatabase.addMimeType(mime_type)
return { "settings_container": XmlMaterialProfile.XmlMaterialProfile("default_xml_material_profile") }
|
from . import XmlMaterialProfile
from UM.MimeTypeDatabase import MimeType, MimeTypeDatabase
from UM.i18n import i18nCatalog
catalog = i18nCatalog("cura")
def getMetaData():
return {
"plugin": {
"name": catalog.i18nc("@label", "Material Profiles"),
"author": "Ultimaker",
"version": "1.0",
"description": catalog.i18nc("@info:whatsthis", "Provides capabilities to read and write XML-based material profiles."),
"api": 3
},
"settings_container": {
+ "type": "material",
"mimetype": "application/x-ultimaker-material-profile"
}
}
def register(app):
mime_type = MimeType(
name = "application/x-ultimaker-material-profile",
comment = "Ultimaker Material Profile",
suffixes = [ "xml.fdm_material" ]
)
MimeTypeDatabase.addMimeType(mime_type)
return { "settings_container": XmlMaterialProfile.XmlMaterialProfile("default_xml_material_profile") }
|
0e376d987dd8d513354a840da6bee6d5a2752f89
|
django_countries/widgets.py
|
django_countries/widgets.py
|
from django.conf import settings
from django.forms import widgets
from django.utils.safestring import mark_safe
COUNTRY_CHANGE_HANDLER = """
this.nextSibling.src = this.nextSibling.src.replace(/[a-z_]*\.gif/, (this.value.toLowerCase() || '__') + '.gif');
"""
FLAG_IMAGE = """<img style="margin: 6px 4px; position: absolute;" src="%s" id="%%s-flag">"""
class CountrySelectWidget(widgets.Select):
def render(self, name, value, attrs=None):
attrs = attrs or {}
attrs['onchange'] = COUNTRY_CHANGE_HANDLER
data = super(CountrySelectWidget, self).render(name, value, attrs)
data += mark_safe((FLAG_IMAGE % settings.COUNTRIES_FLAG_URL) % (
settings.STATIC_URL,
unicode(value).lower() or '__',
attrs['id']
))
return data
|
from django.conf import settings
from django.forms import widgets
from django.utils.safestring import mark_safe
COUNTRY_CHANGE_HANDLER = """
this.nextSibling.src = this.nextSibling.src.replace(/[a-z_]{2}(\.[a-zA-Z]*)$/, (this.value.toLowerCase() || '__') + '$1');
"""
FLAG_IMAGE = """<img style="margin: 6px 4px; position: absolute;" src="%s" id="%%s-flag">"""
class CountrySelectWidget(widgets.Select):
def render(self, name, value, attrs=None):
attrs = attrs or {}
attrs['onchange'] = COUNTRY_CHANGE_HANDLER
data = super(CountrySelectWidget, self).render(name, value, attrs)
data += mark_safe((FLAG_IMAGE % settings.COUNTRIES_FLAG_URL) % (
settings.STATIC_URL,
unicode(value).lower() or '__',
attrs['id']
))
return data
|
Make the regular expression not require a gif image.
|
Make the regular expression not require a gif image.
|
Python
|
mit
|
SmileyChris/django-countries,schinckel/django-countries,rahimnathwani/django-countries,jrfernandes/django-countries,velfimov/django-countries,fladi/django-countries,pimlie/django-countries
|
from django.conf import settings
from django.forms import widgets
from django.utils.safestring import mark_safe
COUNTRY_CHANGE_HANDLER = """
- this.nextSibling.src = this.nextSibling.src.replace(/[a-z_]*\.gif/, (this.value.toLowerCase() || '__') + '.gif');
+ this.nextSibling.src = this.nextSibling.src.replace(/[a-z_]{2}(\.[a-zA-Z]*)$/, (this.value.toLowerCase() || '__') + '$1');
"""
FLAG_IMAGE = """<img style="margin: 6px 4px; position: absolute;" src="%s" id="%%s-flag">"""
class CountrySelectWidget(widgets.Select):
def render(self, name, value, attrs=None):
attrs = attrs or {}
attrs['onchange'] = COUNTRY_CHANGE_HANDLER
data = super(CountrySelectWidget, self).render(name, value, attrs)
data += mark_safe((FLAG_IMAGE % settings.COUNTRIES_FLAG_URL) % (
settings.STATIC_URL,
unicode(value).lower() or '__',
attrs['id']
))
return data
|
Make the regular expression not require a gif image.
|
## Code Before:
from django.conf import settings
from django.forms import widgets
from django.utils.safestring import mark_safe
COUNTRY_CHANGE_HANDLER = """
this.nextSibling.src = this.nextSibling.src.replace(/[a-z_]*\.gif/, (this.value.toLowerCase() || '__') + '.gif');
"""
FLAG_IMAGE = """<img style="margin: 6px 4px; position: absolute;" src="%s" id="%%s-flag">"""
class CountrySelectWidget(widgets.Select):
def render(self, name, value, attrs=None):
attrs = attrs or {}
attrs['onchange'] = COUNTRY_CHANGE_HANDLER
data = super(CountrySelectWidget, self).render(name, value, attrs)
data += mark_safe((FLAG_IMAGE % settings.COUNTRIES_FLAG_URL) % (
settings.STATIC_URL,
unicode(value).lower() or '__',
attrs['id']
))
return data
## Instruction:
Make the regular expression not require a gif image.
## Code After:
from django.conf import settings
from django.forms import widgets
from django.utils.safestring import mark_safe
COUNTRY_CHANGE_HANDLER = """
this.nextSibling.src = this.nextSibling.src.replace(/[a-z_]{2}(\.[a-zA-Z]*)$/, (this.value.toLowerCase() || '__') + '$1');
"""
FLAG_IMAGE = """<img style="margin: 6px 4px; position: absolute;" src="%s" id="%%s-flag">"""
class CountrySelectWidget(widgets.Select):
def render(self, name, value, attrs=None):
attrs = attrs or {}
attrs['onchange'] = COUNTRY_CHANGE_HANDLER
data = super(CountrySelectWidget, self).render(name, value, attrs)
data += mark_safe((FLAG_IMAGE % settings.COUNTRIES_FLAG_URL) % (
settings.STATIC_URL,
unicode(value).lower() or '__',
attrs['id']
))
return data
|
from django.conf import settings
from django.forms import widgets
from django.utils.safestring import mark_safe
COUNTRY_CHANGE_HANDLER = """
- this.nextSibling.src = this.nextSibling.src.replace(/[a-z_]*\.gif/, (this.value.toLowerCase() || '__') + '.gif');
? ^ ^^^ ^^^^
+ this.nextSibling.src = this.nextSibling.src.replace(/[a-z_]{2}(\.[a-zA-Z]*)$/, (this.value.toLowerCase() || '__') + '$1');
? ^^^^ ^^^^^^^^^^^ ^^
"""
FLAG_IMAGE = """<img style="margin: 6px 4px; position: absolute;" src="%s" id="%%s-flag">"""
class CountrySelectWidget(widgets.Select):
def render(self, name, value, attrs=None):
attrs = attrs or {}
attrs['onchange'] = COUNTRY_CHANGE_HANDLER
data = super(CountrySelectWidget, self).render(name, value, attrs)
data += mark_safe((FLAG_IMAGE % settings.COUNTRIES_FLAG_URL) % (
settings.STATIC_URL,
unicode(value).lower() or '__',
attrs['id']
))
return data
|
ab6ca021430933b38788ad2ae19f27f8ed00ab54
|
parse.py
|
parse.py
|
import sys
import simplejson as json
indentation = 0
lang_def = None
with open('language.json') as lang_def_file:
lang_def = json.loads(lang_def_file.read())
if lang_def is None:
print("error reading json language definition")
exit(1)
repl = lang_def['rules']
sin = sys.argv[1]
for r in repl:
sin = sin.replace(r['lang_rep'], r['il_rep'])
for r in repl:
sin = sin.replace(r['il_rep'], r['python_rep'])
sin = sin.replace('\\n', '\n')
for l in sin.splitlines():
try:
r = eval(l)
if r is not None:
print(r)
except:
try:
exec(l)
except:
print("ERROR OMG ERROR" + str(l))
|
import sys
import simplejson as json
def translate(file='hello_world.py'):
lang_def = None
with open('language.json') as lang_def_file:
lang_def = json.loads(lang_def_file.read())
if lang_def is None:
print('error reading json language definition')
exit(1)
python_code = None
with open(file) as python_file:
python_code = python_file.read()
if python_code is None:
print('error reading python file', file)
exit(1)
repl = lang_def['rules']
for r in repl:
python_code = python_code.replace(r['python_rep'], r['il_rep'])
for r in repl:
python_code = python_code.replace(r['il_rep'], r['lang_rep'])
python_code = python_code.replace('\\n', '\n')
print(python_code)
exit(0)
if len(sys.argv) == 1:
print("fail: requires at least one command line argument")
exit(1)
if sys.argv[1] == 'translate':
if len(sys.argv) > 2:
translate(sys.argv[2])
else:
translate()
print('fail: shouldn\'t reach here')
exit(1)
indentation = 0
lang_def = None
with open('language.json') as lang_def_file:
lang_def = json.loads(lang_def_file.read())
if lang_def is None:
print('error reading json language definition')
exit(1)
repl = lang_def['rules']
sin = sys.argv[1]
for r in repl:
sin = sin.replace(r['lang_rep'], r['il_rep'])
for r in repl:
sin = sin.replace(r['il_rep'], r['python_rep'])
sin = sin.replace('\\n', '\n')
for l in sin.splitlines():
try:
r = eval(l)
if r is not None:
print(r)
except:
try:
exec(l)
except:
print("ERROR OMG ERROR" + str(l))
|
Add translate functionality via command line arguments
|
Add translate functionality via command line arguments
|
Python
|
unlicense
|
philipdexter/build-a-lang
|
import sys
import simplejson as json
+
+ def translate(file='hello_world.py'):
+ lang_def = None
+ with open('language.json') as lang_def_file:
+ lang_def = json.loads(lang_def_file.read())
+ if lang_def is None:
+ print('error reading json language definition')
+ exit(1)
+ python_code = None
+ with open(file) as python_file:
+ python_code = python_file.read()
+ if python_code is None:
+ print('error reading python file', file)
+ exit(1)
+
+ repl = lang_def['rules']
+
+ for r in repl:
+ python_code = python_code.replace(r['python_rep'], r['il_rep'])
+ for r in repl:
+ python_code = python_code.replace(r['il_rep'], r['lang_rep'])
+
+ python_code = python_code.replace('\\n', '\n')
+
+ print(python_code)
+
+ exit(0)
+
+ if len(sys.argv) == 1:
+ print("fail: requires at least one command line argument")
+ exit(1)
+
+ if sys.argv[1] == 'translate':
+ if len(sys.argv) > 2:
+ translate(sys.argv[2])
+ else:
+ translate()
+
+ print('fail: shouldn\'t reach here')
+ exit(1)
indentation = 0
lang_def = None
with open('language.json') as lang_def_file:
lang_def = json.loads(lang_def_file.read())
if lang_def is None:
- print("error reading json language definition")
+ print('error reading json language definition')
exit(1)
repl = lang_def['rules']
sin = sys.argv[1]
for r in repl:
sin = sin.replace(r['lang_rep'], r['il_rep'])
for r in repl:
sin = sin.replace(r['il_rep'], r['python_rep'])
sin = sin.replace('\\n', '\n')
for l in sin.splitlines():
try:
r = eval(l)
if r is not None:
print(r)
except:
try:
exec(l)
except:
print("ERROR OMG ERROR" + str(l))
|
Add translate functionality via command line arguments
|
## Code Before:
import sys
import simplejson as json
indentation = 0
lang_def = None
with open('language.json') as lang_def_file:
lang_def = json.loads(lang_def_file.read())
if lang_def is None:
print("error reading json language definition")
exit(1)
repl = lang_def['rules']
sin = sys.argv[1]
for r in repl:
sin = sin.replace(r['lang_rep'], r['il_rep'])
for r in repl:
sin = sin.replace(r['il_rep'], r['python_rep'])
sin = sin.replace('\\n', '\n')
for l in sin.splitlines():
try:
r = eval(l)
if r is not None:
print(r)
except:
try:
exec(l)
except:
print("ERROR OMG ERROR" + str(l))
## Instruction:
Add translate functionality via command line arguments
## Code After:
import sys
import simplejson as json
def translate(file='hello_world.py'):
lang_def = None
with open('language.json') as lang_def_file:
lang_def = json.loads(lang_def_file.read())
if lang_def is None:
print('error reading json language definition')
exit(1)
python_code = None
with open(file) as python_file:
python_code = python_file.read()
if python_code is None:
print('error reading python file', file)
exit(1)
repl = lang_def['rules']
for r in repl:
python_code = python_code.replace(r['python_rep'], r['il_rep'])
for r in repl:
python_code = python_code.replace(r['il_rep'], r['lang_rep'])
python_code = python_code.replace('\\n', '\n')
print(python_code)
exit(0)
if len(sys.argv) == 1:
print("fail: requires at least one command line argument")
exit(1)
if sys.argv[1] == 'translate':
if len(sys.argv) > 2:
translate(sys.argv[2])
else:
translate()
print('fail: shouldn\'t reach here')
exit(1)
indentation = 0
lang_def = None
with open('language.json') as lang_def_file:
lang_def = json.loads(lang_def_file.read())
if lang_def is None:
print('error reading json language definition')
exit(1)
repl = lang_def['rules']
sin = sys.argv[1]
for r in repl:
sin = sin.replace(r['lang_rep'], r['il_rep'])
for r in repl:
sin = sin.replace(r['il_rep'], r['python_rep'])
sin = sin.replace('\\n', '\n')
for l in sin.splitlines():
try:
r = eval(l)
if r is not None:
print(r)
except:
try:
exec(l)
except:
print("ERROR OMG ERROR" + str(l))
|
import sys
import simplejson as json
+
+ def translate(file='hello_world.py'):
+ lang_def = None
+ with open('language.json') as lang_def_file:
+ lang_def = json.loads(lang_def_file.read())
+ if lang_def is None:
+ print('error reading json language definition')
+ exit(1)
+ python_code = None
+ with open(file) as python_file:
+ python_code = python_file.read()
+ if python_code is None:
+ print('error reading python file', file)
+ exit(1)
+
+ repl = lang_def['rules']
+
+ for r in repl:
+ python_code = python_code.replace(r['python_rep'], r['il_rep'])
+ for r in repl:
+ python_code = python_code.replace(r['il_rep'], r['lang_rep'])
+
+ python_code = python_code.replace('\\n', '\n')
+
+ print(python_code)
+
+ exit(0)
+
+ if len(sys.argv) == 1:
+ print("fail: requires at least one command line argument")
+ exit(1)
+
+ if sys.argv[1] == 'translate':
+ if len(sys.argv) > 2:
+ translate(sys.argv[2])
+ else:
+ translate()
+
+ print('fail: shouldn\'t reach here')
+ exit(1)
indentation = 0
lang_def = None
with open('language.json') as lang_def_file:
lang_def = json.loads(lang_def_file.read())
if lang_def is None:
- print("error reading json language definition")
? ^ ^
+ print('error reading json language definition')
? ^ ^
exit(1)
repl = lang_def['rules']
sin = sys.argv[1]
for r in repl:
sin = sin.replace(r['lang_rep'], r['il_rep'])
for r in repl:
sin = sin.replace(r['il_rep'], r['python_rep'])
sin = sin.replace('\\n', '\n')
for l in sin.splitlines():
try:
r = eval(l)
if r is not None:
print(r)
except:
try:
exec(l)
except:
print("ERROR OMG ERROR" + str(l))
|
670a72728ea7462972f3578b62cf33c5740187c2
|
locust/rpc/protocol.py
|
locust/rpc/protocol.py
|
import msgpack
class Message(object):
def __init__(self, message_type, data, node_id):
self.type = message_type
self.data = data
self.node_id = node_id
def serialize(self):
return msgpack.dumps((self.type, self.data, self.node_id))
@classmethod
def unserialize(cls, data):
msg = cls(*msgpack.loads(data, raw=False))
return msg
|
import msgpack
class Message(object):
def __init__(self, message_type, data, node_id):
self.type = message_type
self.data = data
self.node_id = node_id
def __repr__(self):
return "<Message %s:%s>" % (self.type, self.node_id)
def serialize(self):
return msgpack.dumps((self.type, self.data, self.node_id))
@classmethod
def unserialize(cls, data):
msg = cls(*msgpack.loads(data, raw=False))
return msg
|
Add Message.__repr__ for better debugging
|
Add Message.__repr__ for better debugging
|
Python
|
mit
|
mbeacom/locust,mbeacom/locust,locustio/locust,mbeacom/locust,locustio/locust,mbeacom/locust,locustio/locust,locustio/locust
|
import msgpack
class Message(object):
def __init__(self, message_type, data, node_id):
self.type = message_type
self.data = data
self.node_id = node_id
+
+ def __repr__(self):
+ return "<Message %s:%s>" % (self.type, self.node_id)
def serialize(self):
return msgpack.dumps((self.type, self.data, self.node_id))
@classmethod
def unserialize(cls, data):
msg = cls(*msgpack.loads(data, raw=False))
return msg
|
Add Message.__repr__ for better debugging
|
## Code Before:
import msgpack
class Message(object):
def __init__(self, message_type, data, node_id):
self.type = message_type
self.data = data
self.node_id = node_id
def serialize(self):
return msgpack.dumps((self.type, self.data, self.node_id))
@classmethod
def unserialize(cls, data):
msg = cls(*msgpack.loads(data, raw=False))
return msg
## Instruction:
Add Message.__repr__ for better debugging
## Code After:
import msgpack
class Message(object):
def __init__(self, message_type, data, node_id):
self.type = message_type
self.data = data
self.node_id = node_id
def __repr__(self):
return "<Message %s:%s>" % (self.type, self.node_id)
def serialize(self):
return msgpack.dumps((self.type, self.data, self.node_id))
@classmethod
def unserialize(cls, data):
msg = cls(*msgpack.loads(data, raw=False))
return msg
|
import msgpack
class Message(object):
def __init__(self, message_type, data, node_id):
self.type = message_type
self.data = data
self.node_id = node_id
+
+ def __repr__(self):
+ return "<Message %s:%s>" % (self.type, self.node_id)
def serialize(self):
return msgpack.dumps((self.type, self.data, self.node_id))
@classmethod
def unserialize(cls, data):
msg = cls(*msgpack.loads(data, raw=False))
return msg
|
e3edaa6a1a970b266a7411dcadbf86dccb5d8234
|
tests/run.py
|
tests/run.py
|
import os
import sys
import dj_database_url
import django
from colour_runner.django_runner import ColourRunnerMixin
from django.conf import settings
from django.test.runner import DiscoverRunner
BASEDIR = os.path.dirname(os.path.dirname(__file__))
settings.configure(
DATABASES={
'default': dj_database_url.config(
default='sqlite://{}/user_deletion.db'.format(BASEDIR),
),
},
INSTALLED_APPS=('user_deletion',),
MIDDLEWARE_CLASSES=(),
)
if django.VERSION >= (1, 7):
django.setup()
class TestRunner(ColourRunnerMixin, DiscoverRunner):
"""Enable colorised output."""
test_runner = TestRunner(verbosity=1)
failures = test_runner.run_tests(['tests'])
if failures:
sys.exit(1)
|
import os
import sys
import dj_database_url
import django
from colour_runner.django_runner import ColourRunnerMixin
from django.conf import settings
from django.test.runner import DiscoverRunner
BASEDIR = os.path.dirname(os.path.dirname(__file__))
settings.configure(
DATABASES={
'default': dj_database_url.config(
default='sqlite://{}/user_deletion.db'.format(BASEDIR),
),
},
INSTALLED_APPS=('user_deletion',),
MIDDLEWARE_CLASSES=(),
)
django.setup()
class TestRunner(ColourRunnerMixin, DiscoverRunner):
"""Enable colorised output."""
test_runner = TestRunner(verbosity=1)
failures = test_runner.run_tests(['tests'])
if failures:
sys.exit(1)
|
Remove if branch to test django > 1.7
|
Remove if branch to test django > 1.7
|
Python
|
bsd-2-clause
|
incuna/django-user-deletion
|
import os
import sys
import dj_database_url
import django
from colour_runner.django_runner import ColourRunnerMixin
from django.conf import settings
from django.test.runner import DiscoverRunner
BASEDIR = os.path.dirname(os.path.dirname(__file__))
settings.configure(
DATABASES={
'default': dj_database_url.config(
default='sqlite://{}/user_deletion.db'.format(BASEDIR),
),
},
INSTALLED_APPS=('user_deletion',),
MIDDLEWARE_CLASSES=(),
)
- if django.VERSION >= (1, 7):
- django.setup()
+ django.setup()
class TestRunner(ColourRunnerMixin, DiscoverRunner):
"""Enable colorised output."""
test_runner = TestRunner(verbosity=1)
failures = test_runner.run_tests(['tests'])
if failures:
sys.exit(1)
|
Remove if branch to test django > 1.7
|
## Code Before:
import os
import sys
import dj_database_url
import django
from colour_runner.django_runner import ColourRunnerMixin
from django.conf import settings
from django.test.runner import DiscoverRunner
BASEDIR = os.path.dirname(os.path.dirname(__file__))
settings.configure(
DATABASES={
'default': dj_database_url.config(
default='sqlite://{}/user_deletion.db'.format(BASEDIR),
),
},
INSTALLED_APPS=('user_deletion',),
MIDDLEWARE_CLASSES=(),
)
if django.VERSION >= (1, 7):
django.setup()
class TestRunner(ColourRunnerMixin, DiscoverRunner):
"""Enable colorised output."""
test_runner = TestRunner(verbosity=1)
failures = test_runner.run_tests(['tests'])
if failures:
sys.exit(1)
## Instruction:
Remove if branch to test django > 1.7
## Code After:
import os
import sys
import dj_database_url
import django
from colour_runner.django_runner import ColourRunnerMixin
from django.conf import settings
from django.test.runner import DiscoverRunner
BASEDIR = os.path.dirname(os.path.dirname(__file__))
settings.configure(
DATABASES={
'default': dj_database_url.config(
default='sqlite://{}/user_deletion.db'.format(BASEDIR),
),
},
INSTALLED_APPS=('user_deletion',),
MIDDLEWARE_CLASSES=(),
)
django.setup()
class TestRunner(ColourRunnerMixin, DiscoverRunner):
"""Enable colorised output."""
test_runner = TestRunner(verbosity=1)
failures = test_runner.run_tests(['tests'])
if failures:
sys.exit(1)
|
import os
import sys
import dj_database_url
import django
from colour_runner.django_runner import ColourRunnerMixin
from django.conf import settings
from django.test.runner import DiscoverRunner
BASEDIR = os.path.dirname(os.path.dirname(__file__))
settings.configure(
DATABASES={
'default': dj_database_url.config(
default='sqlite://{}/user_deletion.db'.format(BASEDIR),
),
},
INSTALLED_APPS=('user_deletion',),
MIDDLEWARE_CLASSES=(),
)
- if django.VERSION >= (1, 7):
- django.setup()
? ----
+ django.setup()
class TestRunner(ColourRunnerMixin, DiscoverRunner):
"""Enable colorised output."""
test_runner = TestRunner(verbosity=1)
failures = test_runner.run_tests(['tests'])
if failures:
sys.exit(1)
|
35bb090dd926d4327fa046ee2da64c4cb5b38a47
|
app/notify_client/email_branding_client.py
|
app/notify_client/email_branding_client.py
|
from app.notify_client import NotifyAdminAPIClient, cache
class EmailBrandingClient(NotifyAdminAPIClient):
@cache.set("email_branding-{branding_id}")
def get_email_branding(self, branding_id):
return self.get(url="/email-branding/{}".format(branding_id))
@cache.set("email_branding")
def get_all_email_branding(self, sort_key=None):
brandings = self.get(url="/email-branding")["email_branding"]
if sort_key and sort_key in brandings[0]:
brandings.sort(key=lambda branding: branding[sort_key].lower())
return brandings
@cache.delete("email_branding")
def create_email_branding(self, logo, name, text, colour, brand_type):
data = {"logo": logo, "name": name, "text": text, "colour": colour, "brand_type": brand_type}
return self.post(url="/email-branding", data=data)
@cache.delete("email_branding")
@cache.delete("email_branding-{branding_id}")
def update_email_branding(self, branding_id, logo, name, text, colour, brand_type):
data = {"logo": logo, "name": name, "text": text, "colour": colour, "brand_type": brand_type}
return self.post(url="/email-branding/{}".format(branding_id), data=data)
email_branding_client = EmailBrandingClient()
|
from app.notify_client import NotifyAdminAPIClient, cache
class EmailBrandingClient(NotifyAdminAPIClient):
@cache.set("email_branding-{branding_id}")
def get_email_branding(self, branding_id):
return self.get(url="/email-branding/{}".format(branding_id))
@cache.set("email_branding")
def get_all_email_branding(self):
return self.get(url="/email-branding")["email_branding"]
@cache.delete("email_branding")
def create_email_branding(self, logo, name, text, colour, brand_type):
data = {"logo": logo, "name": name, "text": text, "colour": colour, "brand_type": brand_type}
return self.post(url="/email-branding", data=data)
@cache.delete("email_branding")
@cache.delete("email_branding-{branding_id}")
def update_email_branding(self, branding_id, logo, name, text, colour, brand_type):
data = {"logo": logo, "name": name, "text": text, "colour": colour, "brand_type": brand_type}
return self.post(url="/email-branding/{}".format(branding_id), data=data)
email_branding_client = EmailBrandingClient()
|
Remove old way of sorting
|
Remove old way of sorting
This is redundant since the model layer has built-in sorting.
It’s also not a good separation of concerns for something presentational
(sort order) to be in the API client layer.
|
Python
|
mit
|
alphagov/notifications-admin,alphagov/notifications-admin,alphagov/notifications-admin,alphagov/notifications-admin
|
from app.notify_client import NotifyAdminAPIClient, cache
class EmailBrandingClient(NotifyAdminAPIClient):
@cache.set("email_branding-{branding_id}")
def get_email_branding(self, branding_id):
return self.get(url="/email-branding/{}".format(branding_id))
@cache.set("email_branding")
- def get_all_email_branding(self, sort_key=None):
+ def get_all_email_branding(self):
- brandings = self.get(url="/email-branding")["email_branding"]
+ return self.get(url="/email-branding")["email_branding"]
- if sort_key and sort_key in brandings[0]:
- brandings.sort(key=lambda branding: branding[sort_key].lower())
- return brandings
@cache.delete("email_branding")
def create_email_branding(self, logo, name, text, colour, brand_type):
data = {"logo": logo, "name": name, "text": text, "colour": colour, "brand_type": brand_type}
return self.post(url="/email-branding", data=data)
@cache.delete("email_branding")
@cache.delete("email_branding-{branding_id}")
def update_email_branding(self, branding_id, logo, name, text, colour, brand_type):
data = {"logo": logo, "name": name, "text": text, "colour": colour, "brand_type": brand_type}
return self.post(url="/email-branding/{}".format(branding_id), data=data)
email_branding_client = EmailBrandingClient()
|
Remove old way of sorting
|
## Code Before:
from app.notify_client import NotifyAdminAPIClient, cache
class EmailBrandingClient(NotifyAdminAPIClient):
@cache.set("email_branding-{branding_id}")
def get_email_branding(self, branding_id):
return self.get(url="/email-branding/{}".format(branding_id))
@cache.set("email_branding")
def get_all_email_branding(self, sort_key=None):
brandings = self.get(url="/email-branding")["email_branding"]
if sort_key and sort_key in brandings[0]:
brandings.sort(key=lambda branding: branding[sort_key].lower())
return brandings
@cache.delete("email_branding")
def create_email_branding(self, logo, name, text, colour, brand_type):
data = {"logo": logo, "name": name, "text": text, "colour": colour, "brand_type": brand_type}
return self.post(url="/email-branding", data=data)
@cache.delete("email_branding")
@cache.delete("email_branding-{branding_id}")
def update_email_branding(self, branding_id, logo, name, text, colour, brand_type):
data = {"logo": logo, "name": name, "text": text, "colour": colour, "brand_type": brand_type}
return self.post(url="/email-branding/{}".format(branding_id), data=data)
email_branding_client = EmailBrandingClient()
## Instruction:
Remove old way of sorting
## Code After:
from app.notify_client import NotifyAdminAPIClient, cache
class EmailBrandingClient(NotifyAdminAPIClient):
@cache.set("email_branding-{branding_id}")
def get_email_branding(self, branding_id):
return self.get(url="/email-branding/{}".format(branding_id))
@cache.set("email_branding")
def get_all_email_branding(self):
return self.get(url="/email-branding")["email_branding"]
@cache.delete("email_branding")
def create_email_branding(self, logo, name, text, colour, brand_type):
data = {"logo": logo, "name": name, "text": text, "colour": colour, "brand_type": brand_type}
return self.post(url="/email-branding", data=data)
@cache.delete("email_branding")
@cache.delete("email_branding-{branding_id}")
def update_email_branding(self, branding_id, logo, name, text, colour, brand_type):
data = {"logo": logo, "name": name, "text": text, "colour": colour, "brand_type": brand_type}
return self.post(url="/email-branding/{}".format(branding_id), data=data)
email_branding_client = EmailBrandingClient()
|
from app.notify_client import NotifyAdminAPIClient, cache
class EmailBrandingClient(NotifyAdminAPIClient):
@cache.set("email_branding-{branding_id}")
def get_email_branding(self, branding_id):
return self.get(url="/email-branding/{}".format(branding_id))
@cache.set("email_branding")
- def get_all_email_branding(self, sort_key=None):
? ---------------
+ def get_all_email_branding(self):
- brandings = self.get(url="/email-branding")["email_branding"]
? - ^ -------
+ return self.get(url="/email-branding")["email_branding"]
? ^^^^
- if sort_key and sort_key in brandings[0]:
- brandings.sort(key=lambda branding: branding[sort_key].lower())
- return brandings
@cache.delete("email_branding")
def create_email_branding(self, logo, name, text, colour, brand_type):
data = {"logo": logo, "name": name, "text": text, "colour": colour, "brand_type": brand_type}
return self.post(url="/email-branding", data=data)
@cache.delete("email_branding")
@cache.delete("email_branding-{branding_id}")
def update_email_branding(self, branding_id, logo, name, text, colour, brand_type):
data = {"logo": logo, "name": name, "text": text, "colour": colour, "brand_type": brand_type}
return self.post(url="/email-branding/{}".format(branding_id), data=data)
email_branding_client = EmailBrandingClient()
|
cd174416301e03c0beea260925d6227c38444c73
|
shapely/geometry/__init__.py
|
shapely/geometry/__init__.py
|
from geo import box, shape, asShape, mapping
from point import Point, asPoint
from linestring import LineString, asLineString
from polygon import Polygon, asPolygon
from multipoint import MultiPoint, asMultiPoint
from multilinestring import MultiLineString, asMultiLineString
from multipolygon import MultiPolygon, asMultiPolygon
from collection import GeometryCollection
__all__ = [
'box', 'shape', 'asShape', 'Point', 'asPoint', 'LineString', 'asLineString',
'Polygon', 'asPolygon', 'MultiPoint', 'asMultiPoint',
'MultiLineString', 'asMultiLineString', 'MultiPolygon', 'asMultiPolygon',
'GeometryCollection', 'mapping'
]
|
from base import CAP_STYLE, JOIN_STYLE
from geo import box, shape, asShape, mapping
from point import Point, asPoint
from linestring import LineString, asLineString
from polygon import Polygon, asPolygon
from multipoint import MultiPoint, asMultiPoint
from multilinestring import MultiLineString, asMultiLineString
from multipolygon import MultiPolygon, asMultiPolygon
from collection import GeometryCollection
__all__ = [
'box', 'shape', 'asShape', 'Point', 'asPoint', 'LineString', 'asLineString',
'Polygon', 'asPolygon', 'MultiPoint', 'asMultiPoint',
'MultiLineString', 'asMultiLineString', 'MultiPolygon', 'asMultiPolygon',
'GeometryCollection', 'mapping', 'CAP_STYLE', 'JOIN_STYLE'
]
|
Add missing cap and join style imports
|
Add missing cap and join style imports
|
Python
|
bsd-3-clause
|
jdmcbr/Shapely,jdmcbr/Shapely,mouadino/Shapely,abali96/Shapely,mindw/shapely,mindw/shapely,mouadino/Shapely,abali96/Shapely
|
+ from base import CAP_STYLE, JOIN_STYLE
from geo import box, shape, asShape, mapping
from point import Point, asPoint
from linestring import LineString, asLineString
from polygon import Polygon, asPolygon
from multipoint import MultiPoint, asMultiPoint
from multilinestring import MultiLineString, asMultiLineString
from multipolygon import MultiPolygon, asMultiPolygon
from collection import GeometryCollection
__all__ = [
'box', 'shape', 'asShape', 'Point', 'asPoint', 'LineString', 'asLineString',
'Polygon', 'asPolygon', 'MultiPoint', 'asMultiPoint',
'MultiLineString', 'asMultiLineString', 'MultiPolygon', 'asMultiPolygon',
- 'GeometryCollection', 'mapping'
+ 'GeometryCollection', 'mapping', 'CAP_STYLE', 'JOIN_STYLE'
]
|
Add missing cap and join style imports
|
## Code Before:
from geo import box, shape, asShape, mapping
from point import Point, asPoint
from linestring import LineString, asLineString
from polygon import Polygon, asPolygon
from multipoint import MultiPoint, asMultiPoint
from multilinestring import MultiLineString, asMultiLineString
from multipolygon import MultiPolygon, asMultiPolygon
from collection import GeometryCollection
__all__ = [
'box', 'shape', 'asShape', 'Point', 'asPoint', 'LineString', 'asLineString',
'Polygon', 'asPolygon', 'MultiPoint', 'asMultiPoint',
'MultiLineString', 'asMultiLineString', 'MultiPolygon', 'asMultiPolygon',
'GeometryCollection', 'mapping'
]
## Instruction:
Add missing cap and join style imports
## Code After:
from base import CAP_STYLE, JOIN_STYLE
from geo import box, shape, asShape, mapping
from point import Point, asPoint
from linestring import LineString, asLineString
from polygon import Polygon, asPolygon
from multipoint import MultiPoint, asMultiPoint
from multilinestring import MultiLineString, asMultiLineString
from multipolygon import MultiPolygon, asMultiPolygon
from collection import GeometryCollection
__all__ = [
'box', 'shape', 'asShape', 'Point', 'asPoint', 'LineString', 'asLineString',
'Polygon', 'asPolygon', 'MultiPoint', 'asMultiPoint',
'MultiLineString', 'asMultiLineString', 'MultiPolygon', 'asMultiPolygon',
'GeometryCollection', 'mapping', 'CAP_STYLE', 'JOIN_STYLE'
]
|
+ from base import CAP_STYLE, JOIN_STYLE
from geo import box, shape, asShape, mapping
from point import Point, asPoint
from linestring import LineString, asLineString
from polygon import Polygon, asPolygon
from multipoint import MultiPoint, asMultiPoint
from multilinestring import MultiLineString, asMultiLineString
from multipolygon import MultiPolygon, asMultiPolygon
from collection import GeometryCollection
__all__ = [
'box', 'shape', 'asShape', 'Point', 'asPoint', 'LineString', 'asLineString',
'Polygon', 'asPolygon', 'MultiPoint', 'asMultiPoint',
'MultiLineString', 'asMultiLineString', 'MultiPolygon', 'asMultiPolygon',
- 'GeometryCollection', 'mapping'
+ 'GeometryCollection', 'mapping', 'CAP_STYLE', 'JOIN_STYLE'
]
|
8a9e58d2170e3f06228cbc0257d41f0c969da957
|
tangled/website/resources.py
|
tangled/website/resources.py
|
from tangled.web import Resource, represent
from tangled.site.resources.entry import Entry
class Docs(Entry):
@represent('text/html', template_name='tangled.website:templates/docs.mako')
def GET(self):
static_dirs = self.app.get_all('static_directory', as_dict=True)
links = []
for prefix, dir_app in static_dirs.items():
if prefix[0] == 'docs':
links.append({
'href': '/'.join(prefix),
'text': prefix[1],
})
self.urlvars['id'] = 'docs'
data = super().GET()
data['links'] = sorted(links, key=lambda i: i['text'])
return data
|
from tangled.web import Resource, config
from tangled.site.resources.entry import Entry
class Docs(Entry):
@config('text/html', template_name='tangled.website:templates/docs.mako')
def GET(self):
static_dirs = self.app.get_all('static_directory', as_dict=True)
links = []
for prefix, dir_app in static_dirs.items():
if prefix[0] == 'docs':
links.append({
'href': '/'.join(prefix),
'text': prefix[1],
})
self.urlvars['id'] = 'docs'
data = super().GET()
data['links'] = sorted(links, key=lambda i: i['text'])
return data
|
Replace @represent w/ @config throughout
|
Replace @represent w/ @config throughout
New name, same functionality.
|
Python
|
mit
|
TangledWeb/tangled.website
|
- from tangled.web import Resource, represent
+ from tangled.web import Resource, config
from tangled.site.resources.entry import Entry
class Docs(Entry):
- @represent('text/html', template_name='tangled.website:templates/docs.mako')
+ @config('text/html', template_name='tangled.website:templates/docs.mako')
def GET(self):
static_dirs = self.app.get_all('static_directory', as_dict=True)
links = []
for prefix, dir_app in static_dirs.items():
if prefix[0] == 'docs':
links.append({
'href': '/'.join(prefix),
'text': prefix[1],
})
self.urlvars['id'] = 'docs'
data = super().GET()
data['links'] = sorted(links, key=lambda i: i['text'])
return data
|
Replace @represent w/ @config throughout
|
## Code Before:
from tangled.web import Resource, represent
from tangled.site.resources.entry import Entry
class Docs(Entry):
@represent('text/html', template_name='tangled.website:templates/docs.mako')
def GET(self):
static_dirs = self.app.get_all('static_directory', as_dict=True)
links = []
for prefix, dir_app in static_dirs.items():
if prefix[0] == 'docs':
links.append({
'href': '/'.join(prefix),
'text': prefix[1],
})
self.urlvars['id'] = 'docs'
data = super().GET()
data['links'] = sorted(links, key=lambda i: i['text'])
return data
## Instruction:
Replace @represent w/ @config throughout
## Code After:
from tangled.web import Resource, config
from tangled.site.resources.entry import Entry
class Docs(Entry):
@config('text/html', template_name='tangled.website:templates/docs.mako')
def GET(self):
static_dirs = self.app.get_all('static_directory', as_dict=True)
links = []
for prefix, dir_app in static_dirs.items():
if prefix[0] == 'docs':
links.append({
'href': '/'.join(prefix),
'text': prefix[1],
})
self.urlvars['id'] = 'docs'
data = super().GET()
data['links'] = sorted(links, key=lambda i: i['text'])
return data
|
- from tangled.web import Resource, represent
? ^^^^^^^ ^
+ from tangled.web import Resource, config
? ^^ ^^^
from tangled.site.resources.entry import Entry
class Docs(Entry):
- @represent('text/html', template_name='tangled.website:templates/docs.mako')
? ^^^^^^^ ^
+ @config('text/html', template_name='tangled.website:templates/docs.mako')
? ^^ ^^^
def GET(self):
static_dirs = self.app.get_all('static_directory', as_dict=True)
links = []
for prefix, dir_app in static_dirs.items():
if prefix[0] == 'docs':
links.append({
'href': '/'.join(prefix),
'text': prefix[1],
})
self.urlvars['id'] = 'docs'
data = super().GET()
data['links'] = sorted(links, key=lambda i: i['text'])
return data
|
ccb1759a205a4cdc8f5eb2c28adcf49503221135
|
ecpy/tasks/api.py
|
ecpy/tasks/api.py
|
from __future__ import (division, unicode_literals, print_function,
absolute_import)
import enaml
from .base_tasks import BaseTask, SimpleTask, ComplexTask, RootTask
from .task_interface import (InterfaceableTaskMixin, TaskInterface,
InterfaceableInterfaceMixin, IInterface)
from .manager.declarations import (Tasks, Task, Interfaces, Interface,
TaskConfig)
from .manager.filters import (TaskFilter, SubclassTaskFilter, GroupTaskFilter,
MetadataTaskFilter)
from .manager.configs.base_configs import BaseTaskConfig
with enaml.imports():
from .manager.configs.base_config_views import BaseConfigView
from .base_views import BaseTaskView
__all__ = ['BaseTask', 'SimpleTask', 'ComplexTask', 'RootTask', 'BaseTaskView',
'InterfaceableTaskMixin', 'TaskInterface',
'InterfaceableInterfaceMixin', 'IInterface',
'Tasks', 'Task', 'Interfaces', 'Interface', 'TaskConfig',
'TaskFilter', 'SubclassTaskFilter', 'GroupTaskFilter',
'MetadataTaskFilter', 'BaseTaskConfig', 'BaseConfigView']
|
from __future__ import (division, unicode_literals, print_function,
absolute_import)
import enaml
from .base_tasks import BaseTask, SimpleTask, ComplexTask, RootTask
from .task_interface import (InterfaceableTaskMixin, TaskInterface,
InterfaceableInterfaceMixin, IInterface)
from .manager.declarations import (Tasks, Task, Interfaces, Interface,
TaskConfig)
from .manager.filters import (TaskFilter, SubclassTaskFilter, GroupTaskFilter,
MetadataTaskFilter)
from .manager.configs.base_configs import BaseTaskConfig
from .manager.utils.building import build_task_from_config
with enaml.imports():
from .manager.configs.base_config_views import BaseConfigView
from .base_views import BaseTaskView
__all__ = ['BaseTask', 'SimpleTask', 'ComplexTask', 'RootTask', 'BaseTaskView',
'InterfaceableTaskMixin', 'TaskInterface',
'InterfaceableInterfaceMixin', 'IInterface',
'Tasks', 'Task', 'Interfaces', 'Interface', 'TaskConfig',
'TaskFilter', 'SubclassTaskFilter', 'GroupTaskFilter',
'MetadataTaskFilter', 'BaseTaskConfig', 'BaseConfigView',
'build_task_from_config']
|
Add tasks/build_from_config to the public API.
|
Add tasks/build_from_config to the public API.
|
Python
|
bsd-3-clause
|
Ecpy/ecpy,Ecpy/ecpy
|
from __future__ import (division, unicode_literals, print_function,
absolute_import)
import enaml
from .base_tasks import BaseTask, SimpleTask, ComplexTask, RootTask
from .task_interface import (InterfaceableTaskMixin, TaskInterface,
InterfaceableInterfaceMixin, IInterface)
from .manager.declarations import (Tasks, Task, Interfaces, Interface,
TaskConfig)
from .manager.filters import (TaskFilter, SubclassTaskFilter, GroupTaskFilter,
MetadataTaskFilter)
from .manager.configs.base_configs import BaseTaskConfig
+ from .manager.utils.building import build_task_from_config
+
with enaml.imports():
from .manager.configs.base_config_views import BaseConfigView
from .base_views import BaseTaskView
__all__ = ['BaseTask', 'SimpleTask', 'ComplexTask', 'RootTask', 'BaseTaskView',
'InterfaceableTaskMixin', 'TaskInterface',
'InterfaceableInterfaceMixin', 'IInterface',
'Tasks', 'Task', 'Interfaces', 'Interface', 'TaskConfig',
'TaskFilter', 'SubclassTaskFilter', 'GroupTaskFilter',
- 'MetadataTaskFilter', 'BaseTaskConfig', 'BaseConfigView']
+ 'MetadataTaskFilter', 'BaseTaskConfig', 'BaseConfigView',
+ 'build_task_from_config']
|
Add tasks/build_from_config to the public API.
|
## Code Before:
from __future__ import (division, unicode_literals, print_function,
absolute_import)
import enaml
from .base_tasks import BaseTask, SimpleTask, ComplexTask, RootTask
from .task_interface import (InterfaceableTaskMixin, TaskInterface,
InterfaceableInterfaceMixin, IInterface)
from .manager.declarations import (Tasks, Task, Interfaces, Interface,
TaskConfig)
from .manager.filters import (TaskFilter, SubclassTaskFilter, GroupTaskFilter,
MetadataTaskFilter)
from .manager.configs.base_configs import BaseTaskConfig
with enaml.imports():
from .manager.configs.base_config_views import BaseConfigView
from .base_views import BaseTaskView
__all__ = ['BaseTask', 'SimpleTask', 'ComplexTask', 'RootTask', 'BaseTaskView',
'InterfaceableTaskMixin', 'TaskInterface',
'InterfaceableInterfaceMixin', 'IInterface',
'Tasks', 'Task', 'Interfaces', 'Interface', 'TaskConfig',
'TaskFilter', 'SubclassTaskFilter', 'GroupTaskFilter',
'MetadataTaskFilter', 'BaseTaskConfig', 'BaseConfigView']
## Instruction:
Add tasks/build_from_config to the public API.
## Code After:
from __future__ import (division, unicode_literals, print_function,
absolute_import)
import enaml
from .base_tasks import BaseTask, SimpleTask, ComplexTask, RootTask
from .task_interface import (InterfaceableTaskMixin, TaskInterface,
InterfaceableInterfaceMixin, IInterface)
from .manager.declarations import (Tasks, Task, Interfaces, Interface,
TaskConfig)
from .manager.filters import (TaskFilter, SubclassTaskFilter, GroupTaskFilter,
MetadataTaskFilter)
from .manager.configs.base_configs import BaseTaskConfig
from .manager.utils.building import build_task_from_config
with enaml.imports():
from .manager.configs.base_config_views import BaseConfigView
from .base_views import BaseTaskView
__all__ = ['BaseTask', 'SimpleTask', 'ComplexTask', 'RootTask', 'BaseTaskView',
'InterfaceableTaskMixin', 'TaskInterface',
'InterfaceableInterfaceMixin', 'IInterface',
'Tasks', 'Task', 'Interfaces', 'Interface', 'TaskConfig',
'TaskFilter', 'SubclassTaskFilter', 'GroupTaskFilter',
'MetadataTaskFilter', 'BaseTaskConfig', 'BaseConfigView',
'build_task_from_config']
|
from __future__ import (division, unicode_literals, print_function,
absolute_import)
import enaml
from .base_tasks import BaseTask, SimpleTask, ComplexTask, RootTask
from .task_interface import (InterfaceableTaskMixin, TaskInterface,
InterfaceableInterfaceMixin, IInterface)
from .manager.declarations import (Tasks, Task, Interfaces, Interface,
TaskConfig)
from .manager.filters import (TaskFilter, SubclassTaskFilter, GroupTaskFilter,
MetadataTaskFilter)
from .manager.configs.base_configs import BaseTaskConfig
+ from .manager.utils.building import build_task_from_config
+
with enaml.imports():
from .manager.configs.base_config_views import BaseConfigView
from .base_views import BaseTaskView
__all__ = ['BaseTask', 'SimpleTask', 'ComplexTask', 'RootTask', 'BaseTaskView',
'InterfaceableTaskMixin', 'TaskInterface',
'InterfaceableInterfaceMixin', 'IInterface',
'Tasks', 'Task', 'Interfaces', 'Interface', 'TaskConfig',
'TaskFilter', 'SubclassTaskFilter', 'GroupTaskFilter',
- 'MetadataTaskFilter', 'BaseTaskConfig', 'BaseConfigView']
? ^
+ 'MetadataTaskFilter', 'BaseTaskConfig', 'BaseConfigView',
? ^
+ 'build_task_from_config']
|
6e19ff22ea0e8c78e7faaa2ba58626de383dfee3
|
djangae/contrib/mappers/urls.py
|
djangae/contrib/mappers/urls.py
|
from django.conf.urls import url
from djangae.utils import djangae_webapp
from django.views.decorators.csrf import csrf_exempt
try:
from mapreduce.main import create_handlers_map
wrapped_urls = [url(url_re.replace('.*/', '^', 1), csrf_exempt(djangae_webapp(func))) for url_re, func in create_handlers_map()]
except ImportError as e:
wrapped_urls = []
urlpatterns = wrapped_urls
|
from django.conf.urls import url
from djangae.utils import djangae_webapp
from django.views.decorators.csrf import csrf_exempt
# The Mapreduce status UI uses inline JS, which will fail If we have django-csp
# installed and are not allowing 'unsafe-inline' as a SCRIPT_SRC.
try:
from csp.decorators import csp_update
exempt_from_unsafe_inline = csp_update(SCRIPT_SRC=("'unsafe-inline'",))
except ImportError:
exempt_from_unsafe_inline = lambda func: func
try:
from mapreduce.main import create_handlers_map
wrapped_urls = [
url(
url_re.replace('.*/', '^', 1),
exempt_from_unsafe_inline(csrf_exempt(djangae_webapp(func)))
)
for url_re, func in create_handlers_map()
]
except ImportError as e:
wrapped_urls = []
urlpatterns = wrapped_urls
|
Allow the Mapreduce status UI to function when a CSP is enforced with django-csp.
|
Allow the Mapreduce status UI to function when a CSP is enforced with django-csp.
|
Python
|
bsd-3-clause
|
potatolondon/djangae,grzes/djangae,grzes/djangae,kirberich/djangae,potatolondon/djangae,grzes/djangae,kirberich/djangae,kirberich/djangae
|
from django.conf.urls import url
from djangae.utils import djangae_webapp
from django.views.decorators.csrf import csrf_exempt
+ # The Mapreduce status UI uses inline JS, which will fail If we have django-csp
+ # installed and are not allowing 'unsafe-inline' as a SCRIPT_SRC.
+ try:
+ from csp.decorators import csp_update
+ exempt_from_unsafe_inline = csp_update(SCRIPT_SRC=("'unsafe-inline'",))
+ except ImportError:
+ exempt_from_unsafe_inline = lambda func: func
+
+
try:
from mapreduce.main import create_handlers_map
- wrapped_urls = [url(url_re.replace('.*/', '^', 1), csrf_exempt(djangae_webapp(func))) for url_re, func in create_handlers_map()]
+ wrapped_urls = [
+ url(
+ url_re.replace('.*/', '^', 1),
+ exempt_from_unsafe_inline(csrf_exempt(djangae_webapp(func)))
+ )
+ for url_re, func in create_handlers_map()
+ ]
except ImportError as e:
wrapped_urls = []
urlpatterns = wrapped_urls
|
Allow the Mapreduce status UI to function when a CSP is enforced with django-csp.
|
## Code Before:
from django.conf.urls import url
from djangae.utils import djangae_webapp
from django.views.decorators.csrf import csrf_exempt
try:
from mapreduce.main import create_handlers_map
wrapped_urls = [url(url_re.replace('.*/', '^', 1), csrf_exempt(djangae_webapp(func))) for url_re, func in create_handlers_map()]
except ImportError as e:
wrapped_urls = []
urlpatterns = wrapped_urls
## Instruction:
Allow the Mapreduce status UI to function when a CSP is enforced with django-csp.
## Code After:
from django.conf.urls import url
from djangae.utils import djangae_webapp
from django.views.decorators.csrf import csrf_exempt
# The Mapreduce status UI uses inline JS, which will fail If we have django-csp
# installed and are not allowing 'unsafe-inline' as a SCRIPT_SRC.
try:
from csp.decorators import csp_update
exempt_from_unsafe_inline = csp_update(SCRIPT_SRC=("'unsafe-inline'",))
except ImportError:
exempt_from_unsafe_inline = lambda func: func
try:
from mapreduce.main import create_handlers_map
wrapped_urls = [
url(
url_re.replace('.*/', '^', 1),
exempt_from_unsafe_inline(csrf_exempt(djangae_webapp(func)))
)
for url_re, func in create_handlers_map()
]
except ImportError as e:
wrapped_urls = []
urlpatterns = wrapped_urls
|
from django.conf.urls import url
from djangae.utils import djangae_webapp
from django.views.decorators.csrf import csrf_exempt
+ # The Mapreduce status UI uses inline JS, which will fail If we have django-csp
+ # installed and are not allowing 'unsafe-inline' as a SCRIPT_SRC.
+ try:
+ from csp.decorators import csp_update
+ exempt_from_unsafe_inline = csp_update(SCRIPT_SRC=("'unsafe-inline'",))
+ except ImportError:
+ exempt_from_unsafe_inline = lambda func: func
+
+
try:
from mapreduce.main import create_handlers_map
- wrapped_urls = [url(url_re.replace('.*/', '^', 1), csrf_exempt(djangae_webapp(func))) for url_re, func in create_handlers_map()]
+ wrapped_urls = [
+ url(
+ url_re.replace('.*/', '^', 1),
+ exempt_from_unsafe_inline(csrf_exempt(djangae_webapp(func)))
+ )
+ for url_re, func in create_handlers_map()
+ ]
except ImportError as e:
wrapped_urls = []
urlpatterns = wrapped_urls
|
0281aaa0868d0bfa6ecb7368cff89b4af6b57129
|
tests/functions_tests/test_dropout.py
|
tests/functions_tests/test_dropout.py
|
import unittest
import numpy
import chainer
from chainer import cuda
from chainer import functions
from chainer import testing
if cuda.available:
cuda.init()
class TestDropout(unittest.TestCase):
def setUp(self):
self.x = numpy.random.uniform(-1, 1, (2, 3)).astype(numpy.float32)
def check_type_forward(self, x_data):
x = chainer.Variable(x_data)
try:
functions.dropout(x)
except Exception:
self.fail()
def test_type_forward_cpu(self):
self.check_type_forward(self.x)
def test_type_forward_gpu(self):
self.check_type_forward(cuda.to_gpu(self.x))
testing.run_module(__name__, __file__)
|
import unittest
import numpy
import chainer
from chainer import cuda
from chainer import functions
from chainer import testing
from chainer.testing import attr
if cuda.available:
cuda.init()
class TestDropout(unittest.TestCase):
def setUp(self):
self.x = numpy.random.uniform(-1, 1, (2, 3)).astype(numpy.float32)
def check_type_forward(self, x_data):
x = chainer.Variable(x_data)
try:
functions.dropout(x)
except Exception:
self.fail()
def test_type_forward_cpu(self):
self.check_type_forward(self.x)
@attr.gpu
def test_type_forward_gpu(self):
self.check_type_forward(cuda.to_gpu(self.x))
testing.run_module(__name__, __file__)
|
Add attr.gpu decorator to gpu test of dropout
|
Add attr.gpu decorator to gpu test of dropout
|
Python
|
mit
|
yanweifu/chainer,hvy/chainer,cupy/cupy,ysekky/chainer,woodshop/complex-chainer,niboshi/chainer,tkerola/chainer,kashif/chainer,kikusu/chainer,jnishi/chainer,okuta/chainer,niboshi/chainer,benob/chainer,chainer/chainer,AlpacaDB/chainer,sou81821/chainer,umitanuki/chainer,tscohen/chainer,cupy/cupy,laysakura/chainer,masia02/chainer,jfsantos/chainer,anaruse/chainer,keisuke-umezawa/chainer,truongdq/chainer,chainer/chainer,wkentaro/chainer,ktnyt/chainer,aonotas/chainer,ikasumi/chainer,kikusu/chainer,AlpacaDB/chainer,sinhrks/chainer,sinhrks/chainer,rezoo/chainer,okuta/chainer,jnishi/chainer,1986ks/chainer,muupan/chainer,ytoyama/yans_chainer_hackathon,minhpqn/chainer,wavelets/chainer,muupan/chainer,ktnyt/chainer,cemoody/chainer,ktnyt/chainer,cupy/cupy,kuwa32/chainer,ktnyt/chainer,hvy/chainer,jnishi/chainer,niboshi/chainer,Kaisuke5/chainer,tigerneil/chainer,ronekko/chainer,cupy/cupy,truongdq/chainer,pfnet/chainer,t-abe/chainer,hvy/chainer,niboshi/chainer,chainer/chainer,okuta/chainer,t-abe/chainer,keisuke-umezawa/chainer,woodshop/chainer,jnishi/chainer,wkentaro/chainer,hidenori-t/chainer,elviswf/chainer,chainer/chainer,keisuke-umezawa/chainer,wkentaro/chainer,hvy/chainer,benob/chainer,delta2323/chainer,kiyukuta/chainer,okuta/chainer,wkentaro/chainer,keisuke-umezawa/chainer
|
import unittest
import numpy
import chainer
from chainer import cuda
from chainer import functions
from chainer import testing
+ from chainer.testing import attr
if cuda.available:
cuda.init()
class TestDropout(unittest.TestCase):
def setUp(self):
self.x = numpy.random.uniform(-1, 1, (2, 3)).astype(numpy.float32)
def check_type_forward(self, x_data):
x = chainer.Variable(x_data)
try:
functions.dropout(x)
except Exception:
self.fail()
def test_type_forward_cpu(self):
self.check_type_forward(self.x)
+ @attr.gpu
def test_type_forward_gpu(self):
self.check_type_forward(cuda.to_gpu(self.x))
testing.run_module(__name__, __file__)
|
Add attr.gpu decorator to gpu test of dropout
|
## Code Before:
import unittest
import numpy
import chainer
from chainer import cuda
from chainer import functions
from chainer import testing
if cuda.available:
cuda.init()
class TestDropout(unittest.TestCase):
def setUp(self):
self.x = numpy.random.uniform(-1, 1, (2, 3)).astype(numpy.float32)
def check_type_forward(self, x_data):
x = chainer.Variable(x_data)
try:
functions.dropout(x)
except Exception:
self.fail()
def test_type_forward_cpu(self):
self.check_type_forward(self.x)
def test_type_forward_gpu(self):
self.check_type_forward(cuda.to_gpu(self.x))
testing.run_module(__name__, __file__)
## Instruction:
Add attr.gpu decorator to gpu test of dropout
## Code After:
import unittest
import numpy
import chainer
from chainer import cuda
from chainer import functions
from chainer import testing
from chainer.testing import attr
if cuda.available:
cuda.init()
class TestDropout(unittest.TestCase):
def setUp(self):
self.x = numpy.random.uniform(-1, 1, (2, 3)).astype(numpy.float32)
def check_type_forward(self, x_data):
x = chainer.Variable(x_data)
try:
functions.dropout(x)
except Exception:
self.fail()
def test_type_forward_cpu(self):
self.check_type_forward(self.x)
@attr.gpu
def test_type_forward_gpu(self):
self.check_type_forward(cuda.to_gpu(self.x))
testing.run_module(__name__, __file__)
|
import unittest
import numpy
import chainer
from chainer import cuda
from chainer import functions
from chainer import testing
+ from chainer.testing import attr
if cuda.available:
cuda.init()
class TestDropout(unittest.TestCase):
def setUp(self):
self.x = numpy.random.uniform(-1, 1, (2, 3)).astype(numpy.float32)
def check_type_forward(self, x_data):
x = chainer.Variable(x_data)
try:
functions.dropout(x)
except Exception:
self.fail()
def test_type_forward_cpu(self):
self.check_type_forward(self.x)
+ @attr.gpu
def test_type_forward_gpu(self):
self.check_type_forward(cuda.to_gpu(self.x))
testing.run_module(__name__, __file__)
|
c254bf20bc8b4b7c73e3361d3666fb3733dbc09f
|
pycroscopy/processing/__init__.py
|
pycroscopy/processing/__init__.py
|
import fft
import gmode_utils
import cluster
import proc_utils
import decomposition
def no_impl(*args,**kwargs):
raise NotImplementedError("You need to install Multiprocess package (pip,github) to do a parallel Computation.\n"
"Switching to the serial version. ")
# from .feature_extraction import FeatureExtractorParallel, FeatureExtractorSerial
# from .geometric_transformation import geoTransformerParallel, geoTransformerSerial
#
# FeatureExtractor = FeatureExtractorSerial
# geoTransformer = geoTransformerSerial
#
# try:
# import multiprocess
# except ImportError:
# FeatureExtractorParallel = no_impl
# geoTransformerParallel = no_impl
# else:
# FeatureExtractor = FeatureExtractorParallel
# geoTransformer = geoTransformerParallel
|
import fft
import gmode_utils
import cluster
import proc_utils
import decomposition
def no_impl(*args,**kwargs):
raise NotImplementedError("You need to install Multiprocess package (pip,github) to do a parallel Computation.\n"
"Switching to the serial version. ")
from .feature_extraction import FeatureExtractorParallel, FeatureExtractorSerial
from .geometric_transformation import geoTransformerParallel, geoTransformerSerial
FeatureExtractor = FeatureExtractorSerial
geoTransformer = geoTransformerSerial
try:
import multiprocess
except ImportError:
FeatureExtractorParallel = no_impl
geoTransformerParallel = no_impl
else:
FeatureExtractor = FeatureExtractorParallel
geoTransformer = geoTransformerParallel
|
Revert "Commented out unimplemented imports"
|
Revert "Commented out unimplemented imports"
This reverts commit f6b76db8f963d28c0a9f2875139d5e286e3bd01b.
|
Python
|
mit
|
pycroscopy/pycroscopy,anugrah-saxena/pycroscopy
|
import fft
import gmode_utils
import cluster
import proc_utils
import decomposition
def no_impl(*args,**kwargs):
raise NotImplementedError("You need to install Multiprocess package (pip,github) to do a parallel Computation.\n"
"Switching to the serial version. ")
- # from .feature_extraction import FeatureExtractorParallel, FeatureExtractorSerial
+ from .feature_extraction import FeatureExtractorParallel, FeatureExtractorSerial
- # from .geometric_transformation import geoTransformerParallel, geoTransformerSerial
+ from .geometric_transformation import geoTransformerParallel, geoTransformerSerial
- #
- # FeatureExtractor = FeatureExtractorSerial
- # geoTransformer = geoTransformerSerial
- #
- # try:
- # import multiprocess
- # except ImportError:
- # FeatureExtractorParallel = no_impl
- # geoTransformerParallel = no_impl
- # else:
- # FeatureExtractor = FeatureExtractorParallel
- # geoTransformer = geoTransformerParallel
+ FeatureExtractor = FeatureExtractorSerial
+ geoTransformer = geoTransformerSerial
+
+ try:
+ import multiprocess
+ except ImportError:
+ FeatureExtractorParallel = no_impl
+ geoTransformerParallel = no_impl
+ else:
+ FeatureExtractor = FeatureExtractorParallel
+ geoTransformer = geoTransformerParallel
+
|
Revert "Commented out unimplemented imports"
|
## Code Before:
import fft
import gmode_utils
import cluster
import proc_utils
import decomposition
def no_impl(*args,**kwargs):
raise NotImplementedError("You need to install Multiprocess package (pip,github) to do a parallel Computation.\n"
"Switching to the serial version. ")
# from .feature_extraction import FeatureExtractorParallel, FeatureExtractorSerial
# from .geometric_transformation import geoTransformerParallel, geoTransformerSerial
#
# FeatureExtractor = FeatureExtractorSerial
# geoTransformer = geoTransformerSerial
#
# try:
# import multiprocess
# except ImportError:
# FeatureExtractorParallel = no_impl
# geoTransformerParallel = no_impl
# else:
# FeatureExtractor = FeatureExtractorParallel
# geoTransformer = geoTransformerParallel
## Instruction:
Revert "Commented out unimplemented imports"
## Code After:
import fft
import gmode_utils
import cluster
import proc_utils
import decomposition
def no_impl(*args,**kwargs):
raise NotImplementedError("You need to install Multiprocess package (pip,github) to do a parallel Computation.\n"
"Switching to the serial version. ")
from .feature_extraction import FeatureExtractorParallel, FeatureExtractorSerial
from .geometric_transformation import geoTransformerParallel, geoTransformerSerial
FeatureExtractor = FeatureExtractorSerial
geoTransformer = geoTransformerSerial
try:
import multiprocess
except ImportError:
FeatureExtractorParallel = no_impl
geoTransformerParallel = no_impl
else:
FeatureExtractor = FeatureExtractorParallel
geoTransformer = geoTransformerParallel
|
import fft
import gmode_utils
import cluster
import proc_utils
import decomposition
def no_impl(*args,**kwargs):
raise NotImplementedError("You need to install Multiprocess package (pip,github) to do a parallel Computation.\n"
"Switching to the serial version. ")
- # from .feature_extraction import FeatureExtractorParallel, FeatureExtractorSerial
? --
+ from .feature_extraction import FeatureExtractorParallel, FeatureExtractorSerial
- # from .geometric_transformation import geoTransformerParallel, geoTransformerSerial
? --
+ from .geometric_transformation import geoTransformerParallel, geoTransformerSerial
- #
+
- # FeatureExtractor = FeatureExtractorSerial
? --
+ FeatureExtractor = FeatureExtractorSerial
- # geoTransformer = geoTransformerSerial
? --
+ geoTransformer = geoTransformerSerial
- #
+
- # try:
? --
+ try:
- # import multiprocess
? --
+ import multiprocess
- # except ImportError:
? --
+ except ImportError:
- # FeatureExtractorParallel = no_impl
? --
+ FeatureExtractorParallel = no_impl
- # geoTransformerParallel = no_impl
? --
+ geoTransformerParallel = no_impl
- # else:
? --
+ else:
- # FeatureExtractor = FeatureExtractorParallel
? --
+ FeatureExtractor = FeatureExtractorParallel
- # geoTransformer = geoTransformerParallel
? --
+ geoTransformer = geoTransformerParallel
|
f4d66a5820582c995f1d31fe6a2442fc42d71077
|
saulify/scrapers/newspaper.py
|
saulify/scrapers/newspaper.py
|
from __future__ import absolute_import
from flask import Markup
from newspaper import Article
from xml.etree import ElementTree
import markdown2
import html2text
def clean_content(url_to_clean):
article = Article(url_to_clean)
article.download()
article.parse()
html_string = ElementTree.tostring(article.clean_top_node)
markdown = html2text.HTML2Text().handle(html_string)
article_html = Markup(markdown2.markdown(markdown))
return {
'html': article_html,
'authors': str(', '.join(article.authors)),
'title': article.title,
'plaintext': markdown.replace('\n', ' '),
'markdown': markdown
}
|
from __future__ import absolute_import
from flask import Markup
from newspaper import Article
from xml.etree import ElementTree
import markdown2
import html2text
def clean_content(url_to_clean):
""" Parse an article at a given url using newspaper.
Args:
url (str): Url where the article is found.
Returns:
Dictionary providing cleaned article and extracted content
(see `construct_result`).
"""
article = Article(url_to_clean)
article.download()
article.parse()
return construct_result(article)
def clean_source(url, source):
""" Parse a pre-downloaded article using newspaper.
Args:
url (str): The url where the article was sourced (necessary for the
newspaper API).
source (str): Html source of the article page.
Returns:
Dictionary providing cleaned article and extracted content
(see `construct_result`).
"""
article = Article(url)
article.set_html(source)
article.parse()
return construct_result(article)
def construct_result(article):
""" Construct article extraction result dictionary in standard format.
Args:
article (Article): A parsed `newspaper` `Article` object.
Returns:
Dictionary providing cleaned article and extracted content;
author, title, markdown, plaintext, html.
"""
html_string = ElementTree.tostring(article.clean_top_node)
markdown = html2text.HTML2Text().handle(html_string)
article_html = Markup(markdown2.markdown(markdown))
return {
'html': article_html,
'authors': str(', '.join(article.authors)),
'title': article.title,
'plaintext': markdown.replace('\n', ' '),
'markdown': markdown
}
|
Split `clean_content` into component functions
|
Split `clean_content` into component functions
Provides ability to use newspaper to parse articles whose source has
already been downloaded.
|
Python
|
agpl-3.0
|
asm-products/saulify-web,asm-products/saulify-web,asm-products/saulify-web
|
from __future__ import absolute_import
from flask import Markup
from newspaper import Article
from xml.etree import ElementTree
import markdown2
import html2text
def clean_content(url_to_clean):
+ """ Parse an article at a given url using newspaper.
+
+ Args:
+ url (str): Url where the article is found.
+
+ Returns:
+ Dictionary providing cleaned article and extracted content
+ (see `construct_result`).
+ """
article = Article(url_to_clean)
article.download()
article.parse()
+
+ return construct_result(article)
+
+
+ def clean_source(url, source):
+ """ Parse a pre-downloaded article using newspaper.
+
+ Args:
+ url (str): The url where the article was sourced (necessary for the
+ newspaper API).
+
+ source (str): Html source of the article page.
+
+ Returns:
+ Dictionary providing cleaned article and extracted content
+ (see `construct_result`).
+ """
+ article = Article(url)
+ article.set_html(source)
+ article.parse()
+
+ return construct_result(article)
+
+
+ def construct_result(article):
+ """ Construct article extraction result dictionary in standard format.
+
+ Args:
+ article (Article): A parsed `newspaper` `Article` object.
+
+ Returns:
+ Dictionary providing cleaned article and extracted content;
+ author, title, markdown, plaintext, html.
+ """
html_string = ElementTree.tostring(article.clean_top_node)
markdown = html2text.HTML2Text().handle(html_string)
article_html = Markup(markdown2.markdown(markdown))
return {
'html': article_html,
'authors': str(', '.join(article.authors)),
'title': article.title,
'plaintext': markdown.replace('\n', ' '),
'markdown': markdown
}
|
Split `clean_content` into component functions
|
## Code Before:
from __future__ import absolute_import
from flask import Markup
from newspaper import Article
from xml.etree import ElementTree
import markdown2
import html2text
def clean_content(url_to_clean):
article = Article(url_to_clean)
article.download()
article.parse()
html_string = ElementTree.tostring(article.clean_top_node)
markdown = html2text.HTML2Text().handle(html_string)
article_html = Markup(markdown2.markdown(markdown))
return {
'html': article_html,
'authors': str(', '.join(article.authors)),
'title': article.title,
'plaintext': markdown.replace('\n', ' '),
'markdown': markdown
}
## Instruction:
Split `clean_content` into component functions
## Code After:
from __future__ import absolute_import
from flask import Markup
from newspaper import Article
from xml.etree import ElementTree
import markdown2
import html2text
def clean_content(url_to_clean):
""" Parse an article at a given url using newspaper.
Args:
url (str): Url where the article is found.
Returns:
Dictionary providing cleaned article and extracted content
(see `construct_result`).
"""
article = Article(url_to_clean)
article.download()
article.parse()
return construct_result(article)
def clean_source(url, source):
""" Parse a pre-downloaded article using newspaper.
Args:
url (str): The url where the article was sourced (necessary for the
newspaper API).
source (str): Html source of the article page.
Returns:
Dictionary providing cleaned article and extracted content
(see `construct_result`).
"""
article = Article(url)
article.set_html(source)
article.parse()
return construct_result(article)
def construct_result(article):
""" Construct article extraction result dictionary in standard format.
Args:
article (Article): A parsed `newspaper` `Article` object.
Returns:
Dictionary providing cleaned article and extracted content;
author, title, markdown, plaintext, html.
"""
html_string = ElementTree.tostring(article.clean_top_node)
markdown = html2text.HTML2Text().handle(html_string)
article_html = Markup(markdown2.markdown(markdown))
return {
'html': article_html,
'authors': str(', '.join(article.authors)),
'title': article.title,
'plaintext': markdown.replace('\n', ' '),
'markdown': markdown
}
|
from __future__ import absolute_import
from flask import Markup
from newspaper import Article
from xml.etree import ElementTree
import markdown2
import html2text
def clean_content(url_to_clean):
+ """ Parse an article at a given url using newspaper.
+
+ Args:
+ url (str): Url where the article is found.
+
+ Returns:
+ Dictionary providing cleaned article and extracted content
+ (see `construct_result`).
+ """
article = Article(url_to_clean)
article.download()
article.parse()
+
+ return construct_result(article)
+
+
+ def clean_source(url, source):
+ """ Parse a pre-downloaded article using newspaper.
+
+ Args:
+ url (str): The url where the article was sourced (necessary for the
+ newspaper API).
+
+ source (str): Html source of the article page.
+
+ Returns:
+ Dictionary providing cleaned article and extracted content
+ (see `construct_result`).
+ """
+ article = Article(url)
+ article.set_html(source)
+ article.parse()
+
+ return construct_result(article)
+
+
+ def construct_result(article):
+ """ Construct article extraction result dictionary in standard format.
+
+ Args:
+ article (Article): A parsed `newspaper` `Article` object.
+
+ Returns:
+ Dictionary providing cleaned article and extracted content;
+ author, title, markdown, plaintext, html.
+ """
html_string = ElementTree.tostring(article.clean_top_node)
markdown = html2text.HTML2Text().handle(html_string)
article_html = Markup(markdown2.markdown(markdown))
return {
'html': article_html,
'authors': str(', '.join(article.authors)),
'title': article.title,
'plaintext': markdown.replace('\n', ' '),
'markdown': markdown
}
|
8eaa0f2fef26cc90e3aea5dea1253b7980400375
|
latest_tweets/templatetags/latest_tweets_tags.py
|
latest_tweets/templatetags/latest_tweets_tags.py
|
from django import template
from latest_tweets.models import Tweet
register = template.Library()
@register.assignment_tag
def get_latest_tweets(*args, **kwargs):
limit = kwargs.pop('limit', None)
include_replies = kwargs.pop('include_replies', False)
tweets = Tweet.objects.all()
# By default we exclude replies
if not include_replies:
tweets = tweets.exclude(is_reply=True)
if args:
tweets = tweets.filter(user__in=args)
if limit is not None:
tweets = tweets[:limit]
return tweets
|
from django import template
from latest_tweets.models import Tweet
register = template.Library()
@register.assignment_tag
def get_latest_tweets(*args, **kwargs):
limit = kwargs.pop('limit', None)
include_replies = kwargs.pop('include_replies', False)
liked_by = kwargs.pop('liked_by', None)
tweets = Tweet.objects.all()
# By default we exclude replies
if not include_replies:
tweets = tweets.exclude(is_reply=True)
if liked_by:
tweets = tweets.filter(like__user=liked_by)
if args:
tweets = tweets.filter(user__in=args)
if limit is not None:
tweets = tweets[:limit]
return tweets
|
Add tag support for getting liked tweets
|
Add tag support for getting liked tweets
|
Python
|
bsd-3-clause
|
blancltd/django-latest-tweets
|
from django import template
from latest_tweets.models import Tweet
register = template.Library()
@register.assignment_tag
def get_latest_tweets(*args, **kwargs):
limit = kwargs.pop('limit', None)
include_replies = kwargs.pop('include_replies', False)
+ liked_by = kwargs.pop('liked_by', None)
tweets = Tweet.objects.all()
# By default we exclude replies
if not include_replies:
tweets = tweets.exclude(is_reply=True)
+
+ if liked_by:
+ tweets = tweets.filter(like__user=liked_by)
if args:
tweets = tweets.filter(user__in=args)
if limit is not None:
tweets = tweets[:limit]
return tweets
|
Add tag support for getting liked tweets
|
## Code Before:
from django import template
from latest_tweets.models import Tweet
register = template.Library()
@register.assignment_tag
def get_latest_tweets(*args, **kwargs):
limit = kwargs.pop('limit', None)
include_replies = kwargs.pop('include_replies', False)
tweets = Tweet.objects.all()
# By default we exclude replies
if not include_replies:
tweets = tweets.exclude(is_reply=True)
if args:
tweets = tweets.filter(user__in=args)
if limit is not None:
tweets = tweets[:limit]
return tweets
## Instruction:
Add tag support for getting liked tweets
## Code After:
from django import template
from latest_tweets.models import Tweet
register = template.Library()
@register.assignment_tag
def get_latest_tweets(*args, **kwargs):
limit = kwargs.pop('limit', None)
include_replies = kwargs.pop('include_replies', False)
liked_by = kwargs.pop('liked_by', None)
tweets = Tweet.objects.all()
# By default we exclude replies
if not include_replies:
tweets = tweets.exclude(is_reply=True)
if liked_by:
tweets = tweets.filter(like__user=liked_by)
if args:
tweets = tweets.filter(user__in=args)
if limit is not None:
tweets = tweets[:limit]
return tweets
|
from django import template
from latest_tweets.models import Tweet
register = template.Library()
@register.assignment_tag
def get_latest_tweets(*args, **kwargs):
limit = kwargs.pop('limit', None)
include_replies = kwargs.pop('include_replies', False)
+ liked_by = kwargs.pop('liked_by', None)
tweets = Tweet.objects.all()
# By default we exclude replies
if not include_replies:
tweets = tweets.exclude(is_reply=True)
+
+ if liked_by:
+ tweets = tweets.filter(like__user=liked_by)
if args:
tweets = tweets.filter(user__in=args)
if limit is not None:
tweets = tweets[:limit]
return tweets
|
d46d908f5cfafcb6962207c45f923d3afb7f35a7
|
pyrobus/__init__.py
|
pyrobus/__init__.py
|
from .robot import Robot
from .modules import *
|
import logging
from .robot import Robot
from .modules import *
nh = logging.NullHandler()
logging.getLogger(__name__).addHandler(nh)
|
Add null handler as default for logging.
|
Add null handler as default for logging.
|
Python
|
mit
|
pollen/pyrobus
|
+ import logging
+
from .robot import Robot
from .modules import *
+
+ nh = logging.NullHandler()
+ logging.getLogger(__name__).addHandler(nh)
+
|
Add null handler as default for logging.
|
## Code Before:
from .robot import Robot
from .modules import *
## Instruction:
Add null handler as default for logging.
## Code After:
import logging
from .robot import Robot
from .modules import *
nh = logging.NullHandler()
logging.getLogger(__name__).addHandler(nh)
|
+ import logging
+
from .robot import Robot
from .modules import *
+
+
+ nh = logging.NullHandler()
+ logging.getLogger(__name__).addHandler(nh)
|
69890f36b1853b3845ff29ec15ccde11f7ac86f2
|
zerver/migrations/0306_custom_profile_field_date_format.py
|
zerver/migrations/0306_custom_profile_field_date_format.py
|
from django.db import migrations
class Migration(migrations.Migration):
"""
We previously accepted invalid ISO 8601 dates like 1909-3-5 for
date values of custom profile fields. Correct them by adding the
missing leading zeros: 1909-03-05.
"""
dependencies = [
("zerver", "0305_realm_deactivated_redirect"),
]
operations = [
migrations.RunSQL(
sql="""\
UPDATE zerver_customprofilefieldvalue
SET value = to_char(to_date(value, 'YYYY-MM-DD'), 'YYYY-MM-DD')
FROM zerver_customprofilefield AS f
WHERE f.id = field_id
AND f.field_type = 4
AND value <> to_char(to_date(value, 'YYYY-MM-DD'), 'YYYY-MM-DD');
""",
reverse_sql="",
),
]
|
from django.db import migrations
class Migration(migrations.Migration):
"""
We previously accepted invalid ISO 8601 dates like 1909-3-5 for
date values of custom profile fields. Correct them by adding the
missing leading zeros: 1909-03-05.
"""
dependencies = [
("zerver", "0305_realm_deactivated_redirect"),
]
operations = [
migrations.RunSQL(
sql="""\
UPDATE zerver_customprofilefieldvalue
SET value = to_char(to_date(value, 'YYYY-MM-DD'), 'YYYY-MM-DD')
FROM zerver_customprofilefield AS f
WHERE f.id = field_id
AND f.field_type = 4
AND CASE
WHEN f.field_type = 4
THEN value <> to_char(to_date(value, 'YYYY-MM-DD'), 'YYYY-MM-DD')
END;
""",
reverse_sql="",
),
]
|
Enforce evaluation order in 0306 WHERE clause.
|
migrations: Enforce evaluation order in 0306 WHERE clause.
Depending on PostgreSQL’s query plan, it was possible for the value
condition to be evaluated before the field_type condition was checked,
leading to errors like
psycopg2.errors.InvalidDatetimeFormat: invalid value "stri" for "YYYY"
DETAIL: Value must be an integer.
Signed-off-by: Anders Kaseorg <[email protected]>
|
Python
|
apache-2.0
|
zulip/zulip,eeshangarg/zulip,hackerkid/zulip,andersk/zulip,kou/zulip,rht/zulip,kou/zulip,rht/zulip,andersk/zulip,punchagan/zulip,hackerkid/zulip,kou/zulip,andersk/zulip,hackerkid/zulip,punchagan/zulip,punchagan/zulip,kou/zulip,andersk/zulip,punchagan/zulip,rht/zulip,hackerkid/zulip,andersk/zulip,rht/zulip,rht/zulip,eeshangarg/zulip,zulip/zulip,zulip/zulip,hackerkid/zulip,punchagan/zulip,eeshangarg/zulip,zulip/zulip,eeshangarg/zulip,kou/zulip,kou/zulip,zulip/zulip,rht/zulip,zulip/zulip,andersk/zulip,hackerkid/zulip,zulip/zulip,punchagan/zulip,eeshangarg/zulip,rht/zulip,kou/zulip,eeshangarg/zulip,eeshangarg/zulip,andersk/zulip,punchagan/zulip,hackerkid/zulip
|
from django.db import migrations
class Migration(migrations.Migration):
"""
We previously accepted invalid ISO 8601 dates like 1909-3-5 for
date values of custom profile fields. Correct them by adding the
missing leading zeros: 1909-03-05.
"""
dependencies = [
("zerver", "0305_realm_deactivated_redirect"),
]
operations = [
migrations.RunSQL(
sql="""\
UPDATE zerver_customprofilefieldvalue
SET value = to_char(to_date(value, 'YYYY-MM-DD'), 'YYYY-MM-DD')
FROM zerver_customprofilefield AS f
WHERE f.id = field_id
AND f.field_type = 4
+ AND CASE
+ WHEN f.field_type = 4
- AND value <> to_char(to_date(value, 'YYYY-MM-DD'), 'YYYY-MM-DD');
+ THEN value <> to_char(to_date(value, 'YYYY-MM-DD'), 'YYYY-MM-DD')
+ END;
""",
reverse_sql="",
),
]
|
Enforce evaluation order in 0306 WHERE clause.
|
## Code Before:
from django.db import migrations
class Migration(migrations.Migration):
"""
We previously accepted invalid ISO 8601 dates like 1909-3-5 for
date values of custom profile fields. Correct them by adding the
missing leading zeros: 1909-03-05.
"""
dependencies = [
("zerver", "0305_realm_deactivated_redirect"),
]
operations = [
migrations.RunSQL(
sql="""\
UPDATE zerver_customprofilefieldvalue
SET value = to_char(to_date(value, 'YYYY-MM-DD'), 'YYYY-MM-DD')
FROM zerver_customprofilefield AS f
WHERE f.id = field_id
AND f.field_type = 4
AND value <> to_char(to_date(value, 'YYYY-MM-DD'), 'YYYY-MM-DD');
""",
reverse_sql="",
),
]
## Instruction:
Enforce evaluation order in 0306 WHERE clause.
## Code After:
from django.db import migrations
class Migration(migrations.Migration):
"""
We previously accepted invalid ISO 8601 dates like 1909-3-5 for
date values of custom profile fields. Correct them by adding the
missing leading zeros: 1909-03-05.
"""
dependencies = [
("zerver", "0305_realm_deactivated_redirect"),
]
operations = [
migrations.RunSQL(
sql="""\
UPDATE zerver_customprofilefieldvalue
SET value = to_char(to_date(value, 'YYYY-MM-DD'), 'YYYY-MM-DD')
FROM zerver_customprofilefield AS f
WHERE f.id = field_id
AND f.field_type = 4
AND CASE
WHEN f.field_type = 4
THEN value <> to_char(to_date(value, 'YYYY-MM-DD'), 'YYYY-MM-DD')
END;
""",
reverse_sql="",
),
]
|
from django.db import migrations
class Migration(migrations.Migration):
"""
We previously accepted invalid ISO 8601 dates like 1909-3-5 for
date values of custom profile fields. Correct them by adding the
missing leading zeros: 1909-03-05.
"""
dependencies = [
("zerver", "0305_realm_deactivated_redirect"),
]
operations = [
migrations.RunSQL(
sql="""\
UPDATE zerver_customprofilefieldvalue
SET value = to_char(to_date(value, 'YYYY-MM-DD'), 'YYYY-MM-DD')
FROM zerver_customprofilefield AS f
WHERE f.id = field_id
AND f.field_type = 4
+ AND CASE
+ WHEN f.field_type = 4
- AND value <> to_char(to_date(value, 'YYYY-MM-DD'), 'YYYY-MM-DD');
? ^ - -
+ THEN value <> to_char(to_date(value, 'YYYY-MM-DD'), 'YYYY-MM-DD')
? ^^^^^^^^^^^
+ END;
""",
reverse_sql="",
),
]
|
12cfaa0bf758a78d854e917f357ac2913d4e73c6
|
tools/win32build/doall.py
|
tools/win32build/doall.py
|
import subprocess
import os
PYVER = "2.5"
# Bootstrap
subprocess.check_call(['python', 'prepare_bootstrap.py'])
# Build binaries
subprocess.check_call(['python', 'build.py', '-p', PYVER], cwd = 'bootstrap-%s' % PYVER)
# Build installer using nsis
subprocess.check_call(['makensis', 'numpy-superinstaller.nsi'], cwd = 'bootstrap-%s' % PYVER)
|
import subprocess
import os
PYVER = "2.5"
# Bootstrap
subprocess.check_call(['python', 'prepare_bootstrap.py', '-p', PYVER])
# Build binaries
subprocess.check_call(['python', 'build.py', '-p', PYVER], cwd = 'bootstrap-%s' % PYVER)
# Build installer using nsis
subprocess.check_call(['makensis', 'numpy-superinstaller.nsi'], cwd = 'bootstrap-%s' % PYVER)
|
Handle python version in prepare_bootstrap script.
|
Handle python version in prepare_bootstrap script.
|
Python
|
bsd-3-clause
|
BabeNovelty/numpy,matthew-brett/numpy,empeeu/numpy,hainm/numpy,GrimDerp/numpy,tdsmith/numpy,mhvk/numpy,rmcgibbo/numpy,mindw/numpy,ogrisel/numpy,stefanv/numpy,trankmichael/numpy,brandon-rhodes/numpy,GaZ3ll3/numpy,GrimDerp/numpy,ajdawson/numpy,jschueller/numpy,tdsmith/numpy,endolith/numpy,sonnyhu/numpy,rgommers/numpy,cowlicks/numpy,madphysicist/numpy,cjermain/numpy,sigma-random/numpy,dimasad/numpy,sigma-random/numpy,tacaswell/numpy,pizzathief/numpy,ddasilva/numpy,rherault-insa/numpy,jankoslavic/numpy,bmorris3/numpy,kirillzhuravlev/numpy,CMartelLML/numpy,ddasilva/numpy,SunghanKim/numpy,bertrand-l/numpy,rgommers/numpy,yiakwy/numpy,dato-code/numpy,dimasad/numpy,pelson/numpy,embray/numpy,WarrenWeckesser/numpy,KaelChen/numpy,ESSS/numpy,pizzathief/numpy,WillieMaddox/numpy,ajdawson/numpy,tdsmith/numpy,rudimeier/numpy,pbrod/numpy,AustereCuriosity/numpy,CMartelLML/numpy,dato-code/numpy,empeeu/numpy,hainm/numpy,Anwesh43/numpy,ChristopherHogan/numpy,endolith/numpy,KaelChen/numpy,ChristopherHogan/numpy,cjermain/numpy,chatcannon/numpy,ajdawson/numpy,ssanderson/numpy,jakirkham/numpy,mingwpy/numpy,ogrisel/numpy,Eric89GXL/numpy,grlee77/numpy,ChristopherHogan/numpy,drasmuss/numpy,astrofrog/numpy,dwf/numpy,shoyer/numpy,WarrenWeckesser/numpy,mwiebe/numpy,bertrand-l/numpy,dato-code/numpy,joferkington/numpy,solarjoe/numpy,charris/numpy,ESSS/numpy,ogrisel/numpy,bringingheavendown/numpy,mingwpy/numpy,mortada/numpy,endolith/numpy,ChanderG/numpy,astrofrog/numpy,nbeaver/numpy,empeeu/numpy,tynn/numpy,ddasilva/numpy,nguyentu1602/numpy,behzadnouri/numpy,dato-code/numpy,charris/numpy,pbrod/numpy,BMJHayward/numpy,numpy/numpy,charris/numpy,argriffing/numpy,Yusa95/numpy,MSeifert04/numpy,skwbc/numpy,MaPePeR/numpy,Eric89GXL/numpy,ahaldane/numpy,mhvk/numpy,mattip/numpy,argriffing/numpy,MaPePeR/numpy,dimasad/numpy,rajathkumarmp/numpy,mhvk/numpy,naritta/numpy,embray/numpy,sinhrks/numpy,grlee77/numpy,nbeaver/numpy,mattip/numpy,bringingheavendown/numpy,rgommers/numpy,empeeu/numpy,astrofrog/numpy,trankmichael/numpy,stuarteberg/numpy,mindw/numpy,moreati/numpy,has2k1/numpy,nbeaver/numpy,sinhrks/numpy,SunghanKim/numpy,nguyentu1602/numpy,NextThought/pypy-numpy,yiakwy/numpy,sigma-random/numpy,felipebetancur/numpy,dch312/numpy,naritta/numpy,pizzathief/numpy,MaPePeR/numpy,njase/numpy,njase/numpy,gfyoung/numpy,anntzer/numpy,andsor/numpy,felipebetancur/numpy,pbrod/numpy,WillieMaddox/numpy,rherault-insa/numpy,shoyer/numpy,dch312/numpy,BMJHayward/numpy,chatcannon/numpy,ewmoore/numpy,ssanderson/numpy,ContinuumIO/numpy,bmorris3/numpy,gmcastil/numpy,drasmuss/numpy,simongibbons/numpy,jschueller/numpy,madphysicist/numpy,ChanderG/numpy,yiakwy/numpy,Srisai85/numpy,has2k1/numpy,chatcannon/numpy,embray/numpy,ogrisel/numpy,kirillzhuravlev/numpy,brandon-rhodes/numpy,maniteja123/numpy,jankoslavic/numpy,drasmuss/numpy,sigma-random/numpy,jakirkham/numpy,dwillmer/numpy,GrimDerp/numpy,behzadnouri/numpy,groutr/numpy,pyparallel/numpy,madphysicist/numpy,mattip/numpy,shoyer/numpy,dch312/numpy,NextThought/pypy-numpy,andsor/numpy,simongibbons/numpy,ahaldane/numpy,mathdd/numpy,rudimeier/numpy,BMJHayward/numpy,jorisvandenbossche/numpy,kirillzhuravlev/numpy,ahaldane/numpy,githubmlai/numpy,dwillmer/numpy,CMartelLML/numpy,rhythmsosad/numpy,Dapid/numpy,simongibbons/numpy,anntzer/numpy,jorisvandenbossche/numpy,trankmichael/numpy,gmcastil/numpy,andsor/numpy,rajathkumarmp/numpy,seberg/numpy,dimasad/numpy,githubmlai/numpy,seberg/numpy,chiffa/numpy,kirillzhuravlev/numpy,cowlicks/numpy,ekalosak/numpy,cjermain/numpy,ahaldane/numpy,shoyer/numpy,jonathanunderwood/numpy,jakirkham/numpy,mortada/numpy,KaelChen/numpy,joferkington/numpy,MichaelAquilina/numpy,Linkid/numpy,pyparallel/numpy,numpy/numpy-refactor,felipebetancur/numpy,pelson/numpy,musically-ut/numpy,jorisvandenbossche/numpy,grlee77/numpy,endolith/numpy,stefanv/numpy,pdebuyl/numpy,Linkid/numpy,ViralLeadership/numpy,sonnyhu/numpy,SiccarPoint/numpy,ContinuumIO/numpy,numpy/numpy,SiccarPoint/numpy,dwf/numpy,stuarteberg/numpy,rhythmsosad/numpy,pbrod/numpy,anntzer/numpy,larsmans/numpy,rajathkumarmp/numpy,numpy/numpy,musically-ut/numpy,simongibbons/numpy,ChanderG/numpy,Linkid/numpy,pelson/numpy,joferkington/numpy,jankoslavic/numpy,sonnyhu/numpy,stefanv/numpy,pizzathief/numpy,b-carter/numpy,BMJHayward/numpy,mortada/numpy,ekalosak/numpy,joferkington/numpy,ewmoore/numpy,abalkin/numpy,pdebuyl/numpy,cjermain/numpy,numpy/numpy-refactor,ewmoore/numpy,Linkid/numpy,dwf/numpy,pelson/numpy,NextThought/pypy-numpy,simongibbons/numpy,anntzer/numpy,SunghanKim/numpy,cowlicks/numpy,BabeNovelty/numpy,Eric89GXL/numpy,b-carter/numpy,mattip/numpy,stefanv/numpy,mingwpy/numpy,kiwifb/numpy,argriffing/numpy,embray/numpy,mathdd/numpy,jankoslavic/numpy,larsmans/numpy,BabeNovelty/numpy,skymanaditya1/numpy,musically-ut/numpy,charris/numpy,bmorris3/numpy,nguyentu1602/numpy,skymanaditya1/numpy,mwiebe/numpy,ChanderG/numpy,ahaldane/numpy,MSeifert04/numpy,kiwifb/numpy,ekalosak/numpy,jonathanunderwood/numpy,njase/numpy,MSeifert04/numpy,GaZ3ll3/numpy,pbrod/numpy,mingwpy/numpy,stefanv/numpy,Anwesh43/numpy,skwbc/numpy,rmcgibbo/numpy,Srisai85/numpy,rmcgibbo/numpy,dwillmer/numpy,SiccarPoint/numpy,grlee77/numpy,tynn/numpy,maniteja123/numpy,embray/numpy,rhythmsosad/numpy,ajdawson/numpy,AustereCuriosity/numpy,kiwifb/numpy,naritta/numpy,ESSS/numpy,madphysicist/numpy,tacaswell/numpy,skwbc/numpy,dwf/numpy,dwf/numpy,immerrr/numpy,behzadnouri/numpy,githubmlai/numpy,bertrand-l/numpy,MichaelAquilina/numpy,pizzathief/numpy,GaZ3ll3/numpy,mathdd/numpy,Dapid/numpy,SiccarPoint/numpy,WarrenWeckesser/numpy,mindw/numpy,Anwesh43/numpy,Anwesh43/numpy,solarjoe/numpy,nguyentu1602/numpy,ViralLeadership/numpy,mathdd/numpy,madphysicist/numpy,astrofrog/numpy,groutr/numpy,felipebetancur/numpy,mhvk/numpy,moreati/numpy,MSeifert04/numpy,jonathanunderwood/numpy,skymanaditya1/numpy,trankmichael/numpy,numpy/numpy,maniteja123/numpy,larsmans/numpy,Dapid/numpy,rajathkumarmp/numpy,jakirkham/numpy,numpy/numpy-refactor,dwillmer/numpy,bmorris3/numpy,pelson/numpy,larsmans/numpy,chiffa/numpy,numpy/numpy-refactor,Eric89GXL/numpy,matthew-brett/numpy,brandon-rhodes/numpy,abalkin/numpy,naritta/numpy,BabeNovelty/numpy,MSeifert04/numpy,Srisai85/numpy,immerrr/numpy,skymanaditya1/numpy,pyparallel/numpy,pdebuyl/numpy,grlee77/numpy,utke1/numpy,Yusa95/numpy,mhvk/numpy,hainm/numpy,hainm/numpy,leifdenby/numpy,matthew-brett/numpy,dch312/numpy,rudimeier/numpy,mwiebe/numpy,seberg/numpy,matthew-brett/numpy,SunghanKim/numpy,seberg/numpy,WarrenWeckesser/numpy,leifdenby/numpy,moreati/numpy,jorisvandenbossche/numpy,solarjoe/numpy,WarrenWeckesser/numpy,stuarteberg/numpy,ekalosak/numpy,GrimDerp/numpy,Yusa95/numpy,GaZ3ll3/numpy,mortada/numpy,rgommers/numpy,pdebuyl/numpy,ewmoore/numpy,tacaswell/numpy,rmcgibbo/numpy,Srisai85/numpy,b-carter/numpy,ViralLeadership/numpy,sonnyhu/numpy,tynn/numpy,has2k1/numpy,ogrisel/numpy,NextThought/pypy-numpy,astrofrog/numpy,MaPePeR/numpy,cowlicks/numpy,tdsmith/numpy,gfyoung/numpy,ChristopherHogan/numpy,jschueller/numpy,jakirkham/numpy,numpy/numpy-refactor,sinhrks/numpy,jorisvandenbossche/numpy,KaelChen/numpy,jschueller/numpy,sinhrks/numpy,ewmoore/numpy,stuarteberg/numpy,rudimeier/numpy,ContinuumIO/numpy,AustereCuriosity/numpy,matthew-brett/numpy,utke1/numpy,has2k1/numpy,utke1/numpy,mindw/numpy,rherault-insa/numpy,gfyoung/numpy,MichaelAquilina/numpy,ssanderson/numpy,bringingheavendown/numpy,chiffa/numpy,yiakwy/numpy,groutr/numpy,brandon-rhodes/numpy,immerrr/numpy,musically-ut/numpy,andsor/numpy,shoyer/numpy,Yusa95/numpy,leifdenby/numpy,abalkin/numpy,githubmlai/numpy,MichaelAquilina/numpy,rhythmsosad/numpy,immerrr/numpy,CMartelLML/numpy,gmcastil/numpy,WillieMaddox/numpy
|
import subprocess
import os
PYVER = "2.5"
# Bootstrap
- subprocess.check_call(['python', 'prepare_bootstrap.py'])
+ subprocess.check_call(['python', 'prepare_bootstrap.py', '-p', PYVER])
# Build binaries
subprocess.check_call(['python', 'build.py', '-p', PYVER], cwd = 'bootstrap-%s' % PYVER)
# Build installer using nsis
subprocess.check_call(['makensis', 'numpy-superinstaller.nsi'], cwd = 'bootstrap-%s' % PYVER)
|
Handle python version in prepare_bootstrap script.
|
## Code Before:
import subprocess
import os
PYVER = "2.5"
# Bootstrap
subprocess.check_call(['python', 'prepare_bootstrap.py'])
# Build binaries
subprocess.check_call(['python', 'build.py', '-p', PYVER], cwd = 'bootstrap-%s' % PYVER)
# Build installer using nsis
subprocess.check_call(['makensis', 'numpy-superinstaller.nsi'], cwd = 'bootstrap-%s' % PYVER)
## Instruction:
Handle python version in prepare_bootstrap script.
## Code After:
import subprocess
import os
PYVER = "2.5"
# Bootstrap
subprocess.check_call(['python', 'prepare_bootstrap.py', '-p', PYVER])
# Build binaries
subprocess.check_call(['python', 'build.py', '-p', PYVER], cwd = 'bootstrap-%s' % PYVER)
# Build installer using nsis
subprocess.check_call(['makensis', 'numpy-superinstaller.nsi'], cwd = 'bootstrap-%s' % PYVER)
|
import subprocess
import os
PYVER = "2.5"
# Bootstrap
- subprocess.check_call(['python', 'prepare_bootstrap.py'])
+ subprocess.check_call(['python', 'prepare_bootstrap.py', '-p', PYVER])
? +++++++++++++
# Build binaries
subprocess.check_call(['python', 'build.py', '-p', PYVER], cwd = 'bootstrap-%s' % PYVER)
# Build installer using nsis
subprocess.check_call(['makensis', 'numpy-superinstaller.nsi'], cwd = 'bootstrap-%s' % PYVER)
|
a8fcd8c56db0ce862c6c0ac79fc58a9e65992f6e
|
onlineweb4/context_processors.py
|
onlineweb4/context_processors.py
|
from django.conf import settings
from apps.feedback.models import FeedbackRelation
def context_settings(request):
context_extras = {}
if hasattr(settings, 'GOOGLE_ANALYTICS_KEY'):
context_extras['GOOGLE_ANALYTICS_KEY'] = settings.GOOGLE_ANALYTICS_KEY
if hasattr(settings, 'HOT_RELOAD'):
context_extras['HOT_RELOAD'] = settings.HOT_RELOAD
return context_extras
def feedback_notifier(request):
context_extras = {}
context_extras['feedback_pending'] = []
if not request.user.is_authenticated():
return context_extras
active_feedbacks = FeedbackRelation.objects.filter(active=True)
for active_feedback in active_feedbacks:
if active_feedback.content_object is None:
continue
# This method returns both bools and a list for some reason. Python crashes with the expression: x in bool,
# so we do this to fetch once and test twice
not_answered = active_feedback.not_answered()
if not_answered == False or request.user not in not_answered:
continue
context_extras['feedback_pending'].append(active_feedback)
return context_extras
|
from django.conf import settings
from django.utils import timezone
from apps.feedback.models import FeedbackRelation
def context_settings(request):
context_extras = {}
if hasattr(settings, 'GOOGLE_ANALYTICS_KEY'):
context_extras['GOOGLE_ANALYTICS_KEY'] = settings.GOOGLE_ANALYTICS_KEY
if hasattr(settings, 'HOT_RELOAD'):
context_extras['HOT_RELOAD'] = settings.HOT_RELOAD
return context_extras
def feedback_notifier(request):
context_extras = {}
context_extras['feedback_pending'] = []
if not request.user.is_authenticated():
return context_extras
active_feedbacks = FeedbackRelation.objects.filter(active=True)
for active_feedback in active_feedbacks:
if active_feedback.content_object is None:
continue
# Making sure we have an end data, and that the event is over
# and that the feedback deadline is not passed (logic reused from apps.feedback.mommy)
end_date = active_feedback.content_end_date()
today_date = timezone.now().date()
if not end_date or end_date.date() >= today_date or (active_feedback.deadline - today_date).days < 0:
continue
# This method returns both bools and a list for some reason. Python crashes with the expression: x in bool,
# so we do this to fetch once and test twice
not_answered = active_feedback.not_answered()
if not_answered == False or request.user not in not_answered:
continue
context_extras['feedback_pending'].append(active_feedback)
return context_extras
|
Add more constraints to active feedback schemas
|
Add more constraints to active feedback schemas
|
Python
|
mit
|
dotKom/onlineweb4,dotKom/onlineweb4,dotKom/onlineweb4,dotKom/onlineweb4
|
+
from django.conf import settings
+ from django.utils import timezone
from apps.feedback.models import FeedbackRelation
def context_settings(request):
context_extras = {}
if hasattr(settings, 'GOOGLE_ANALYTICS_KEY'):
context_extras['GOOGLE_ANALYTICS_KEY'] = settings.GOOGLE_ANALYTICS_KEY
if hasattr(settings, 'HOT_RELOAD'):
context_extras['HOT_RELOAD'] = settings.HOT_RELOAD
return context_extras
def feedback_notifier(request):
context_extras = {}
context_extras['feedback_pending'] = []
if not request.user.is_authenticated():
return context_extras
active_feedbacks = FeedbackRelation.objects.filter(active=True)
for active_feedback in active_feedbacks:
if active_feedback.content_object is None:
continue
+ # Making sure we have an end data, and that the event is over
+ # and that the feedback deadline is not passed (logic reused from apps.feedback.mommy)
+ end_date = active_feedback.content_end_date()
+ today_date = timezone.now().date()
+ if not end_date or end_date.date() >= today_date or (active_feedback.deadline - today_date).days < 0:
+ continue
+
# This method returns both bools and a list for some reason. Python crashes with the expression: x in bool,
# so we do this to fetch once and test twice
not_answered = active_feedback.not_answered()
if not_answered == False or request.user not in not_answered:
continue
context_extras['feedback_pending'].append(active_feedback)
return context_extras
|
Add more constraints to active feedback schemas
|
## Code Before:
from django.conf import settings
from apps.feedback.models import FeedbackRelation
def context_settings(request):
context_extras = {}
if hasattr(settings, 'GOOGLE_ANALYTICS_KEY'):
context_extras['GOOGLE_ANALYTICS_KEY'] = settings.GOOGLE_ANALYTICS_KEY
if hasattr(settings, 'HOT_RELOAD'):
context_extras['HOT_RELOAD'] = settings.HOT_RELOAD
return context_extras
def feedback_notifier(request):
context_extras = {}
context_extras['feedback_pending'] = []
if not request.user.is_authenticated():
return context_extras
active_feedbacks = FeedbackRelation.objects.filter(active=True)
for active_feedback in active_feedbacks:
if active_feedback.content_object is None:
continue
# This method returns both bools and a list for some reason. Python crashes with the expression: x in bool,
# so we do this to fetch once and test twice
not_answered = active_feedback.not_answered()
if not_answered == False or request.user not in not_answered:
continue
context_extras['feedback_pending'].append(active_feedback)
return context_extras
## Instruction:
Add more constraints to active feedback schemas
## Code After:
from django.conf import settings
from django.utils import timezone
from apps.feedback.models import FeedbackRelation
def context_settings(request):
context_extras = {}
if hasattr(settings, 'GOOGLE_ANALYTICS_KEY'):
context_extras['GOOGLE_ANALYTICS_KEY'] = settings.GOOGLE_ANALYTICS_KEY
if hasattr(settings, 'HOT_RELOAD'):
context_extras['HOT_RELOAD'] = settings.HOT_RELOAD
return context_extras
def feedback_notifier(request):
context_extras = {}
context_extras['feedback_pending'] = []
if not request.user.is_authenticated():
return context_extras
active_feedbacks = FeedbackRelation.objects.filter(active=True)
for active_feedback in active_feedbacks:
if active_feedback.content_object is None:
continue
# Making sure we have an end data, and that the event is over
# and that the feedback deadline is not passed (logic reused from apps.feedback.mommy)
end_date = active_feedback.content_end_date()
today_date = timezone.now().date()
if not end_date or end_date.date() >= today_date or (active_feedback.deadline - today_date).days < 0:
continue
# This method returns both bools and a list for some reason. Python crashes with the expression: x in bool,
# so we do this to fetch once and test twice
not_answered = active_feedback.not_answered()
if not_answered == False or request.user not in not_answered:
continue
context_extras['feedback_pending'].append(active_feedback)
return context_extras
|
+
from django.conf import settings
+ from django.utils import timezone
from apps.feedback.models import FeedbackRelation
def context_settings(request):
context_extras = {}
if hasattr(settings, 'GOOGLE_ANALYTICS_KEY'):
context_extras['GOOGLE_ANALYTICS_KEY'] = settings.GOOGLE_ANALYTICS_KEY
if hasattr(settings, 'HOT_RELOAD'):
context_extras['HOT_RELOAD'] = settings.HOT_RELOAD
return context_extras
def feedback_notifier(request):
context_extras = {}
context_extras['feedback_pending'] = []
if not request.user.is_authenticated():
return context_extras
active_feedbacks = FeedbackRelation.objects.filter(active=True)
for active_feedback in active_feedbacks:
if active_feedback.content_object is None:
continue
+ # Making sure we have an end data, and that the event is over
+ # and that the feedback deadline is not passed (logic reused from apps.feedback.mommy)
+ end_date = active_feedback.content_end_date()
+ today_date = timezone.now().date()
+ if not end_date or end_date.date() >= today_date or (active_feedback.deadline - today_date).days < 0:
+ continue
+
# This method returns both bools and a list for some reason. Python crashes with the expression: x in bool,
# so we do this to fetch once and test twice
not_answered = active_feedback.not_answered()
if not_answered == False or request.user not in not_answered:
continue
context_extras['feedback_pending'].append(active_feedback)
return context_extras
|
ce82161dfcc1aa95febe601e331b8ba7044565ff
|
server/rest/twofishes.py
|
server/rest/twofishes.py
|
import requests
from girder.api import access
from girder.api.describe import Description
from girder.api.rest import Resource
class TwoFishes(Resource):
def __init__(self):
self.resourceName = 'minerva_geocoder'
self.route('GET', (), self.geocode)
@access.public
def geocode(self, params):
r = requests.get(params['twofishes'],
params={'query': params['location'],
'responseIncludes': 'WKT_GEOMETRY'})
return r.json()
geocode.description = (
Description('Get geojson for a given location name')
.param('twofishes', 'Twofishes url')
.param('location', 'Location name to get a geojson')
)
|
import requests
from girder.api import access
from girder.api.describe import Description
from girder.api.rest import Resource
class TwoFishes(Resource):
def __init__(self):
self.resourceName = 'minerva_geocoder'
self.route('GET', (), self.geocode)
self.route('GET', ('autocomplete',), self.autocomplete)
@access.public
def geocode(self, params):
r = requests.get(params['twofishes'],
params={'query': params['location'],
'responseIncludes': 'WKT_GEOMETRY'})
return r.json()
geocode.description = (
Description('Get geojson for a given location name')
.param('twofishes', 'Twofishes url')
.param('location', 'Location name to get a geojson')
)
@access.public
def autocomplete(self, params):
r = requests.get(params['twofishes'],
params={'autocomplete': True,
'query': params['location'],
'maxInterpretations': 10,
'autocompleteBias': None})
return [i['feature']['matchedName'] for i in r.json()['interpretations']]
autocomplete.description = (
Description('Autocomplete result for a given location name')
.param('twofishes', 'Twofishes url')
.param('location', 'Location name to autocomplete')
)
|
Add an endpoint which returns autocompleted results
|
Add an endpoint which returns autocompleted results
|
Python
|
apache-2.0
|
Kitware/minerva,Kitware/minerva,Kitware/minerva
|
import requests
from girder.api import access
from girder.api.describe import Description
from girder.api.rest import Resource
+
class TwoFishes(Resource):
def __init__(self):
self.resourceName = 'minerva_geocoder'
self.route('GET', (), self.geocode)
+ self.route('GET', ('autocomplete',), self.autocomplete)
@access.public
def geocode(self, params):
r = requests.get(params['twofishes'],
params={'query': params['location'],
'responseIncludes': 'WKT_GEOMETRY'})
return r.json()
geocode.description = (
Description('Get geojson for a given location name')
.param('twofishes', 'Twofishes url')
.param('location', 'Location name to get a geojson')
)
+ @access.public
+ def autocomplete(self, params):
+ r = requests.get(params['twofishes'],
+ params={'autocomplete': True,
+ 'query': params['location'],
+ 'maxInterpretations': 10,
+ 'autocompleteBias': None})
+
+ return [i['feature']['matchedName'] for i in r.json()['interpretations']]
+
+ autocomplete.description = (
+ Description('Autocomplete result for a given location name')
+ .param('twofishes', 'Twofishes url')
+ .param('location', 'Location name to autocomplete')
+ )
+
|
Add an endpoint which returns autocompleted results
|
## Code Before:
import requests
from girder.api import access
from girder.api.describe import Description
from girder.api.rest import Resource
class TwoFishes(Resource):
def __init__(self):
self.resourceName = 'minerva_geocoder'
self.route('GET', (), self.geocode)
@access.public
def geocode(self, params):
r = requests.get(params['twofishes'],
params={'query': params['location'],
'responseIncludes': 'WKT_GEOMETRY'})
return r.json()
geocode.description = (
Description('Get geojson for a given location name')
.param('twofishes', 'Twofishes url')
.param('location', 'Location name to get a geojson')
)
## Instruction:
Add an endpoint which returns autocompleted results
## Code After:
import requests
from girder.api import access
from girder.api.describe import Description
from girder.api.rest import Resource
class TwoFishes(Resource):
def __init__(self):
self.resourceName = 'minerva_geocoder'
self.route('GET', (), self.geocode)
self.route('GET', ('autocomplete',), self.autocomplete)
@access.public
def geocode(self, params):
r = requests.get(params['twofishes'],
params={'query': params['location'],
'responseIncludes': 'WKT_GEOMETRY'})
return r.json()
geocode.description = (
Description('Get geojson for a given location name')
.param('twofishes', 'Twofishes url')
.param('location', 'Location name to get a geojson')
)
@access.public
def autocomplete(self, params):
r = requests.get(params['twofishes'],
params={'autocomplete': True,
'query': params['location'],
'maxInterpretations': 10,
'autocompleteBias': None})
return [i['feature']['matchedName'] for i in r.json()['interpretations']]
autocomplete.description = (
Description('Autocomplete result for a given location name')
.param('twofishes', 'Twofishes url')
.param('location', 'Location name to autocomplete')
)
|
import requests
from girder.api import access
from girder.api.describe import Description
from girder.api.rest import Resource
+
class TwoFishes(Resource):
def __init__(self):
self.resourceName = 'minerva_geocoder'
self.route('GET', (), self.geocode)
+ self.route('GET', ('autocomplete',), self.autocomplete)
@access.public
def geocode(self, params):
r = requests.get(params['twofishes'],
params={'query': params['location'],
'responseIncludes': 'WKT_GEOMETRY'})
return r.json()
geocode.description = (
Description('Get geojson for a given location name')
.param('twofishes', 'Twofishes url')
.param('location', 'Location name to get a geojson')
)
+
+ @access.public
+ def autocomplete(self, params):
+ r = requests.get(params['twofishes'],
+ params={'autocomplete': True,
+ 'query': params['location'],
+ 'maxInterpretations': 10,
+ 'autocompleteBias': None})
+
+ return [i['feature']['matchedName'] for i in r.json()['interpretations']]
+
+ autocomplete.description = (
+ Description('Autocomplete result for a given location name')
+ .param('twofishes', 'Twofishes url')
+ .param('location', 'Location name to autocomplete')
+ )
|
a55b96a7d64643af6d2adcd6a15fe3348c5d1c41
|
dbaas/workflow/settings.py
|
dbaas/workflow/settings.py
|
DEPLOY_MYSQL_WORKFLOW = (
'util.gen_names',
'util.gen_dbinfra',
'dbaas_cloudstack.create_vm'
)
DEPLOY_VIRTUALMACHINE = (
'workflow.steps.build_databaseinfra.BuildDatabaseInfra',
'workflow.steps.create_virtualmachines.CreateVirtualMachine',
)
DEPLOY_MONGO_WORKFLOW = (
'util.gen_names',
'util.gen_dbinfra',
'dbaas_cloudstack.create_vm'
)
|
DEPLOY_MYSQL_WORKFLOW = (
'util.gen_names',
'util.gen_dbinfra',
'dbaas_cloudstack.create_vm'
)
DEPLOY_VIRTUALMACHINE = (
'workflow.steps.build_databaseinfra.BuildDatabaseInfra',
'workflow.steps.create_virtualmachines.CreateVirtualMachine',
'workflow.steps.create_dns.CreateDns'
)
DEPLOY_MONGO_WORKFLOW = (
'util.gen_names',
'util.gen_dbinfra',
'dbaas_cloudstack.create_vm'
)
|
Add create dns on main workflow
|
Add create dns on main workflow
|
Python
|
bsd-3-clause
|
globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service
|
DEPLOY_MYSQL_WORKFLOW = (
'util.gen_names',
'util.gen_dbinfra',
'dbaas_cloudstack.create_vm'
)
DEPLOY_VIRTUALMACHINE = (
'workflow.steps.build_databaseinfra.BuildDatabaseInfra',
'workflow.steps.create_virtualmachines.CreateVirtualMachine',
+ 'workflow.steps.create_dns.CreateDns'
)
DEPLOY_MONGO_WORKFLOW = (
'util.gen_names',
'util.gen_dbinfra',
'dbaas_cloudstack.create_vm'
)
|
Add create dns on main workflow
|
## Code Before:
DEPLOY_MYSQL_WORKFLOW = (
'util.gen_names',
'util.gen_dbinfra',
'dbaas_cloudstack.create_vm'
)
DEPLOY_VIRTUALMACHINE = (
'workflow.steps.build_databaseinfra.BuildDatabaseInfra',
'workflow.steps.create_virtualmachines.CreateVirtualMachine',
)
DEPLOY_MONGO_WORKFLOW = (
'util.gen_names',
'util.gen_dbinfra',
'dbaas_cloudstack.create_vm'
)
## Instruction:
Add create dns on main workflow
## Code After:
DEPLOY_MYSQL_WORKFLOW = (
'util.gen_names',
'util.gen_dbinfra',
'dbaas_cloudstack.create_vm'
)
DEPLOY_VIRTUALMACHINE = (
'workflow.steps.build_databaseinfra.BuildDatabaseInfra',
'workflow.steps.create_virtualmachines.CreateVirtualMachine',
'workflow.steps.create_dns.CreateDns'
)
DEPLOY_MONGO_WORKFLOW = (
'util.gen_names',
'util.gen_dbinfra',
'dbaas_cloudstack.create_vm'
)
|
DEPLOY_MYSQL_WORKFLOW = (
'util.gen_names',
'util.gen_dbinfra',
'dbaas_cloudstack.create_vm'
)
DEPLOY_VIRTUALMACHINE = (
'workflow.steps.build_databaseinfra.BuildDatabaseInfra',
'workflow.steps.create_virtualmachines.CreateVirtualMachine',
+ 'workflow.steps.create_dns.CreateDns'
)
DEPLOY_MONGO_WORKFLOW = (
'util.gen_names',
'util.gen_dbinfra',
'dbaas_cloudstack.create_vm'
)
|
3080c44c23adcb3a09fb94343da872b8b26ce9fc
|
tests/conftest.py
|
tests/conftest.py
|
"""Configuration for test environment"""
import sys
from .fixtures import *
collect_ignore = []
if sys.version_info < (3, 5):
collect_ignore.append("test_async.py")
if sys.version_info < (3, 4):
collect_ignore.append("test_coroutines.py")
|
"""Configuration for test environment"""
import sys
from .fixtures import *
|
Remove no longer necessary test ignore logic
|
Remove no longer necessary test ignore logic
|
Python
|
mit
|
timothycrosley/hug,timothycrosley/hug,timothycrosley/hug
|
"""Configuration for test environment"""
import sys
from .fixtures import *
- collect_ignore = []
-
- if sys.version_info < (3, 5):
- collect_ignore.append("test_async.py")
-
- if sys.version_info < (3, 4):
- collect_ignore.append("test_coroutines.py")
-
|
Remove no longer necessary test ignore logic
|
## Code Before:
"""Configuration for test environment"""
import sys
from .fixtures import *
collect_ignore = []
if sys.version_info < (3, 5):
collect_ignore.append("test_async.py")
if sys.version_info < (3, 4):
collect_ignore.append("test_coroutines.py")
## Instruction:
Remove no longer necessary test ignore logic
## Code After:
"""Configuration for test environment"""
import sys
from .fixtures import *
|
"""Configuration for test environment"""
import sys
from .fixtures import *
-
- collect_ignore = []
-
- if sys.version_info < (3, 5):
- collect_ignore.append("test_async.py")
-
- if sys.version_info < (3, 4):
- collect_ignore.append("test_coroutines.py")
|
260daaad18e4889c0e468befd46c38d02bb1316a
|
tests/test_py35/test_client.py
|
tests/test_py35/test_client.py
|
import aiohttp
async def test_async_with_session(loop):
async with aiohttp.ClientSession(loop=loop) as session:
pass
assert session.closed
|
from contextlib import suppress
import aiohttp
from aiohttp import web
async def test_async_with_session(loop):
async with aiohttp.ClientSession(loop=loop) as session:
pass
assert session.closed
async def test_close_resp_on_error_async_with_session(loop, test_server):
async def handler(request):
return web.Response()
app = web.Application(loop=loop)
app.router.add_get('/', handler)
server = await test_server(app)
async with aiohttp.ClientSession(loop=loop) as session:
with suppress(RuntimeError):
async with session.get(server.make_url('/')) as resp:
resp.content.set_exception(RuntimeError())
await resp.read()
assert len(session._connector._conns) == 0
async def test_release_resp_on_normal_exit_from_cm(loop, test_server):
async def handler(request):
return web.Response()
app = web.Application(loop=loop)
app.router.add_get('/', handler)
server = await test_server(app)
async with aiohttp.ClientSession(loop=loop) as session:
async with session.get(server.make_url('/')) as resp:
await resp.read()
assert len(session._connector._conns) == 1
|
Add tests on closing connection by error
|
Add tests on closing connection by error
|
Python
|
apache-2.0
|
AraHaanOrg/aiohttp,rutsky/aiohttp,juliatem/aiohttp,singulared/aiohttp,moden-py/aiohttp,singulared/aiohttp,hellysmile/aiohttp,z2v/aiohttp,alex-eri/aiohttp-1,z2v/aiohttp,moden-py/aiohttp,alex-eri/aiohttp-1,z2v/aiohttp,arthurdarcet/aiohttp,KeepSafe/aiohttp,arthurdarcet/aiohttp,KeepSafe/aiohttp,rutsky/aiohttp,arthurdarcet/aiohttp,hellysmile/aiohttp,singulared/aiohttp,pfreixes/aiohttp,KeepSafe/aiohttp,Eyepea/aiohttp,alex-eri/aiohttp-1,juliatem/aiohttp,moden-py/aiohttp,AraHaanOrg/aiohttp,pfreixes/aiohttp,playpauseandstop/aiohttp,rutsky/aiohttp
|
+ from contextlib import suppress
+
import aiohttp
+ from aiohttp import web
async def test_async_with_session(loop):
async with aiohttp.ClientSession(loop=loop) as session:
pass
assert session.closed
+
+ async def test_close_resp_on_error_async_with_session(loop, test_server):
+ async def handler(request):
+ return web.Response()
+
+ app = web.Application(loop=loop)
+ app.router.add_get('/', handler)
+ server = await test_server(app)
+
+ async with aiohttp.ClientSession(loop=loop) as session:
+ with suppress(RuntimeError):
+ async with session.get(server.make_url('/')) as resp:
+ resp.content.set_exception(RuntimeError())
+ await resp.read()
+
+ assert len(session._connector._conns) == 0
+
+
+ async def test_release_resp_on_normal_exit_from_cm(loop, test_server):
+ async def handler(request):
+ return web.Response()
+
+ app = web.Application(loop=loop)
+ app.router.add_get('/', handler)
+ server = await test_server(app)
+
+ async with aiohttp.ClientSession(loop=loop) as session:
+ async with session.get(server.make_url('/')) as resp:
+ await resp.read()
+
+ assert len(session._connector._conns) == 1
+
|
Add tests on closing connection by error
|
## Code Before:
import aiohttp
async def test_async_with_session(loop):
async with aiohttp.ClientSession(loop=loop) as session:
pass
assert session.closed
## Instruction:
Add tests on closing connection by error
## Code After:
from contextlib import suppress
import aiohttp
from aiohttp import web
async def test_async_with_session(loop):
async with aiohttp.ClientSession(loop=loop) as session:
pass
assert session.closed
async def test_close_resp_on_error_async_with_session(loop, test_server):
async def handler(request):
return web.Response()
app = web.Application(loop=loop)
app.router.add_get('/', handler)
server = await test_server(app)
async with aiohttp.ClientSession(loop=loop) as session:
with suppress(RuntimeError):
async with session.get(server.make_url('/')) as resp:
resp.content.set_exception(RuntimeError())
await resp.read()
assert len(session._connector._conns) == 0
async def test_release_resp_on_normal_exit_from_cm(loop, test_server):
async def handler(request):
return web.Response()
app = web.Application(loop=loop)
app.router.add_get('/', handler)
server = await test_server(app)
async with aiohttp.ClientSession(loop=loop) as session:
async with session.get(server.make_url('/')) as resp:
await resp.read()
assert len(session._connector._conns) == 1
|
+ from contextlib import suppress
+
import aiohttp
+ from aiohttp import web
async def test_async_with_session(loop):
async with aiohttp.ClientSession(loop=loop) as session:
pass
assert session.closed
+
+
+ async def test_close_resp_on_error_async_with_session(loop, test_server):
+ async def handler(request):
+ return web.Response()
+
+ app = web.Application(loop=loop)
+ app.router.add_get('/', handler)
+ server = await test_server(app)
+
+ async with aiohttp.ClientSession(loop=loop) as session:
+ with suppress(RuntimeError):
+ async with session.get(server.make_url('/')) as resp:
+ resp.content.set_exception(RuntimeError())
+ await resp.read()
+
+ assert len(session._connector._conns) == 0
+
+
+ async def test_release_resp_on_normal_exit_from_cm(loop, test_server):
+ async def handler(request):
+ return web.Response()
+
+ app = web.Application(loop=loop)
+ app.router.add_get('/', handler)
+ server = await test_server(app)
+
+ async with aiohttp.ClientSession(loop=loop) as session:
+ async with session.get(server.make_url('/')) as resp:
+ await resp.read()
+
+ assert len(session._connector._conns) == 1
|
be23c953f8f27a8d178022d3ecb44f461100bbc5
|
tests/__init__.py
|
tests/__init__.py
|
"""Tests for running TopoFlow components in CMI."""
import os
def locate_topoflow(cache_dir):
for x in os.listdir(cache_dir):
if x.startswith('topoflow'):
return x
root_dir = '/home/csdms/wmt/topoflow.0'
cache_dir = os.path.join(root_dir, 'cache')
topoflow_dir = locate_topoflow(cache_dir)
example_dir = os.path.join(cache_dir, topoflow_dir,
'topoflow', 'examples', 'Treynor_Iowa')
|
"""Tests for running TopoFlow components in CMI."""
import os
def locate_topoflow(cache_dir):
for x in os.listdir(cache_dir):
if x.startswith('topoflow'):
return x
root_dir = '/home/csdms/wmt/topoflow.0'
cache_dir = os.path.join(root_dir, 'cache')
topoflow_dir = locate_topoflow(cache_dir)
example_dir = os.path.join(cache_dir, topoflow_dir,
'topoflow', 'examples', 'Treynor_Iowa')
# Used by tests for D8 and Erode components.
data_dir = os.path.join(os.path.abspath('..'), 'data')
test_dir = os.path.dirname(__file__)
|
Add path to data directory
|
Add path to data directory
|
Python
|
mit
|
mdpiper/topoflow-cmi-testing
|
"""Tests for running TopoFlow components in CMI."""
import os
def locate_topoflow(cache_dir):
for x in os.listdir(cache_dir):
if x.startswith('topoflow'):
return x
root_dir = '/home/csdms/wmt/topoflow.0'
cache_dir = os.path.join(root_dir, 'cache')
topoflow_dir = locate_topoflow(cache_dir)
example_dir = os.path.join(cache_dir, topoflow_dir,
'topoflow', 'examples', 'Treynor_Iowa')
+ # Used by tests for D8 and Erode components.
+ data_dir = os.path.join(os.path.abspath('..'), 'data')
+ test_dir = os.path.dirname(__file__)
+
|
Add path to data directory
|
## Code Before:
"""Tests for running TopoFlow components in CMI."""
import os
def locate_topoflow(cache_dir):
for x in os.listdir(cache_dir):
if x.startswith('topoflow'):
return x
root_dir = '/home/csdms/wmt/topoflow.0'
cache_dir = os.path.join(root_dir, 'cache')
topoflow_dir = locate_topoflow(cache_dir)
example_dir = os.path.join(cache_dir, topoflow_dir,
'topoflow', 'examples', 'Treynor_Iowa')
## Instruction:
Add path to data directory
## Code After:
"""Tests for running TopoFlow components in CMI."""
import os
def locate_topoflow(cache_dir):
for x in os.listdir(cache_dir):
if x.startswith('topoflow'):
return x
root_dir = '/home/csdms/wmt/topoflow.0'
cache_dir = os.path.join(root_dir, 'cache')
topoflow_dir = locate_topoflow(cache_dir)
example_dir = os.path.join(cache_dir, topoflow_dir,
'topoflow', 'examples', 'Treynor_Iowa')
# Used by tests for D8 and Erode components.
data_dir = os.path.join(os.path.abspath('..'), 'data')
test_dir = os.path.dirname(__file__)
|
"""Tests for running TopoFlow components in CMI."""
import os
def locate_topoflow(cache_dir):
for x in os.listdir(cache_dir):
if x.startswith('topoflow'):
return x
root_dir = '/home/csdms/wmt/topoflow.0'
cache_dir = os.path.join(root_dir, 'cache')
topoflow_dir = locate_topoflow(cache_dir)
example_dir = os.path.join(cache_dir, topoflow_dir,
'topoflow', 'examples', 'Treynor_Iowa')
+
+ # Used by tests for D8 and Erode components.
+ data_dir = os.path.join(os.path.abspath('..'), 'data')
+ test_dir = os.path.dirname(__file__)
|
0ce6ea2ca75c3839b0a1e41f0fa32e5a9816f653
|
setup.py
|
setup.py
|
from distutils.core import setup
def read(name):
from os import path
return open(path.join(path.dirname(__file__), name)).read()
setup(
name='django-facebook-auth',
version='3.3.3',
description="Authorisation app for Facebook API.",
long_description=read("README.rst"),
maintainer="Tomasz Wysocki",
maintainer_email="[email protected]",
install_requires=(
'celery',
'django<1.6',
'facepy',
),
packages=[
'facebook_auth',
'facebook_auth.migrations',
],
)
|
from distutils.core import setup
def read(name):
from os import path
return open(path.join(path.dirname(__file__), name)).read()
setup(
name='django-facebook-auth',
version='3.3.4',
description="Authorisation app for Facebook API.",
long_description=read("README.rst"),
maintainer="Tomasz Wysocki",
maintainer_email="[email protected]",
install_requires=(
'celery',
'django<1.6',
'facepy',
),
packages=[
'facebook_auth',
'facebook_auth.migrations',
'facebook_auth.management',
'facebook_auth.management.commands',
],
)
|
Add management commands to package.
|
Add management commands to package.
Change-Id: I6c35981fbe47639e72066ddd802eb4d4d4d2d4a0
Reviewed-on: http://review.pozytywnie.pl:8080/19737
Reviewed-by: Jan <[email protected]>
Tested-by: Jenkins
|
Python
|
mit
|
jgoclawski/django-facebook-auth,jgoclawski/django-facebook-auth,pozytywnie/django-facebook-auth,pozytywnie/django-facebook-auth
|
from distutils.core import setup
def read(name):
from os import path
return open(path.join(path.dirname(__file__), name)).read()
setup(
name='django-facebook-auth',
- version='3.3.3',
+ version='3.3.4',
description="Authorisation app for Facebook API.",
long_description=read("README.rst"),
maintainer="Tomasz Wysocki",
maintainer_email="[email protected]",
install_requires=(
'celery',
'django<1.6',
'facepy',
),
packages=[
'facebook_auth',
'facebook_auth.migrations',
+ 'facebook_auth.management',
+ 'facebook_auth.management.commands',
],
)
|
Add management commands to package.
|
## Code Before:
from distutils.core import setup
def read(name):
from os import path
return open(path.join(path.dirname(__file__), name)).read()
setup(
name='django-facebook-auth',
version='3.3.3',
description="Authorisation app for Facebook API.",
long_description=read("README.rst"),
maintainer="Tomasz Wysocki",
maintainer_email="[email protected]",
install_requires=(
'celery',
'django<1.6',
'facepy',
),
packages=[
'facebook_auth',
'facebook_auth.migrations',
],
)
## Instruction:
Add management commands to package.
## Code After:
from distutils.core import setup
def read(name):
from os import path
return open(path.join(path.dirname(__file__), name)).read()
setup(
name='django-facebook-auth',
version='3.3.4',
description="Authorisation app for Facebook API.",
long_description=read("README.rst"),
maintainer="Tomasz Wysocki",
maintainer_email="[email protected]",
install_requires=(
'celery',
'django<1.6',
'facepy',
),
packages=[
'facebook_auth',
'facebook_auth.migrations',
'facebook_auth.management',
'facebook_auth.management.commands',
],
)
|
from distutils.core import setup
def read(name):
from os import path
return open(path.join(path.dirname(__file__), name)).read()
setup(
name='django-facebook-auth',
- version='3.3.3',
? ^
+ version='3.3.4',
? ^
description="Authorisation app for Facebook API.",
long_description=read("README.rst"),
maintainer="Tomasz Wysocki",
maintainer_email="[email protected]",
install_requires=(
'celery',
'django<1.6',
'facepy',
),
packages=[
'facebook_auth',
'facebook_auth.migrations',
+ 'facebook_auth.management',
+ 'facebook_auth.management.commands',
],
)
|
ae2be1dc39baa8f8cd73e574d384619290b0c707
|
tests/api/views/users/read_test.py
|
tests/api/views/users/read_test.py
|
from tests.data import add_fixtures, users
def test_read_user(db_session, client):
john = users.john()
add_fixtures(db_session, john)
res = client.get('/users/{id}'.format(id=john.id))
assert res.status_code == 200
assert res.json == {
u'id': john.id,
u'firstName': u'John',
u'lastName': u'Doe',
u'name': u'John Doe',
u'club': None,
u'trackingCallsign': None,
u'trackingDelay': 0,
u'followers': 0,
u'following': 0,
}
def test_read_missing_user(client):
res = client.get('/users/1000000000000')
assert res.status_code == 404
def test_read_user_with_invalid_id(client):
res = client.get('/users/abc')
assert res.status_code == 404
|
from skylines.model import Follower
from tests.api import auth_for
from tests.data import add_fixtures, users
def test_read_user(db_session, client):
john = users.john()
add_fixtures(db_session, john)
res = client.get('/users/{id}'.format(id=john.id))
assert res.status_code == 200
assert res.json == {
u'id': john.id,
u'firstName': u'John',
u'lastName': u'Doe',
u'name': u'John Doe',
u'club': None,
u'trackingCallsign': None,
u'trackingDelay': 0,
u'followers': 0,
u'following': 0,
}
def test_following(db_session, client):
john = users.john()
jane = users.jane()
Follower.follow(john, jane)
add_fixtures(db_session, john, jane)
res = client.get('/users/{id}'.format(id=john.id))
assert res.status_code == 200
assert res.json['following'] == 1
res = client.get('/users/{id}'.format(id=jane.id))
assert res.status_code == 200
assert res.json['followers'] == 1
assert 'followed' not in res.json
res = client.get('/users/{id}'.format(id=jane.id), headers=auth_for(john))
assert res.status_code == 200
assert res.json['followers'] == 1
assert res.json['followed'] == True
def test_read_missing_user(client):
res = client.get('/users/1000000000000')
assert res.status_code == 404
def test_read_user_with_invalid_id(client):
res = client.get('/users/abc')
assert res.status_code == 404
|
Add more "GET /users/:id" tests
|
tests/api: Add more "GET /users/:id" tests
|
Python
|
agpl-3.0
|
Turbo87/skylines,skylines-project/skylines,shadowoneau/skylines,Harry-R/skylines,shadowoneau/skylines,RBE-Avionik/skylines,skylines-project/skylines,RBE-Avionik/skylines,Turbo87/skylines,RBE-Avionik/skylines,shadowoneau/skylines,Harry-R/skylines,Turbo87/skylines,skylines-project/skylines,Harry-R/skylines,Harry-R/skylines,shadowoneau/skylines,skylines-project/skylines,RBE-Avionik/skylines,Turbo87/skylines
|
+ from skylines.model import Follower
+ from tests.api import auth_for
from tests.data import add_fixtures, users
def test_read_user(db_session, client):
john = users.john()
add_fixtures(db_session, john)
res = client.get('/users/{id}'.format(id=john.id))
assert res.status_code == 200
assert res.json == {
u'id': john.id,
u'firstName': u'John',
u'lastName': u'Doe',
u'name': u'John Doe',
u'club': None,
u'trackingCallsign': None,
u'trackingDelay': 0,
u'followers': 0,
u'following': 0,
}
+ def test_following(db_session, client):
+ john = users.john()
+ jane = users.jane()
+ Follower.follow(john, jane)
+ add_fixtures(db_session, john, jane)
+
+ res = client.get('/users/{id}'.format(id=john.id))
+ assert res.status_code == 200
+ assert res.json['following'] == 1
+
+ res = client.get('/users/{id}'.format(id=jane.id))
+ assert res.status_code == 200
+ assert res.json['followers'] == 1
+ assert 'followed' not in res.json
+
+ res = client.get('/users/{id}'.format(id=jane.id), headers=auth_for(john))
+ assert res.status_code == 200
+ assert res.json['followers'] == 1
+ assert res.json['followed'] == True
+
+
def test_read_missing_user(client):
res = client.get('/users/1000000000000')
assert res.status_code == 404
def test_read_user_with_invalid_id(client):
res = client.get('/users/abc')
assert res.status_code == 404
|
Add more "GET /users/:id" tests
|
## Code Before:
from tests.data import add_fixtures, users
def test_read_user(db_session, client):
john = users.john()
add_fixtures(db_session, john)
res = client.get('/users/{id}'.format(id=john.id))
assert res.status_code == 200
assert res.json == {
u'id': john.id,
u'firstName': u'John',
u'lastName': u'Doe',
u'name': u'John Doe',
u'club': None,
u'trackingCallsign': None,
u'trackingDelay': 0,
u'followers': 0,
u'following': 0,
}
def test_read_missing_user(client):
res = client.get('/users/1000000000000')
assert res.status_code == 404
def test_read_user_with_invalid_id(client):
res = client.get('/users/abc')
assert res.status_code == 404
## Instruction:
Add more "GET /users/:id" tests
## Code After:
from skylines.model import Follower
from tests.api import auth_for
from tests.data import add_fixtures, users
def test_read_user(db_session, client):
john = users.john()
add_fixtures(db_session, john)
res = client.get('/users/{id}'.format(id=john.id))
assert res.status_code == 200
assert res.json == {
u'id': john.id,
u'firstName': u'John',
u'lastName': u'Doe',
u'name': u'John Doe',
u'club': None,
u'trackingCallsign': None,
u'trackingDelay': 0,
u'followers': 0,
u'following': 0,
}
def test_following(db_session, client):
john = users.john()
jane = users.jane()
Follower.follow(john, jane)
add_fixtures(db_session, john, jane)
res = client.get('/users/{id}'.format(id=john.id))
assert res.status_code == 200
assert res.json['following'] == 1
res = client.get('/users/{id}'.format(id=jane.id))
assert res.status_code == 200
assert res.json['followers'] == 1
assert 'followed' not in res.json
res = client.get('/users/{id}'.format(id=jane.id), headers=auth_for(john))
assert res.status_code == 200
assert res.json['followers'] == 1
assert res.json['followed'] == True
def test_read_missing_user(client):
res = client.get('/users/1000000000000')
assert res.status_code == 404
def test_read_user_with_invalid_id(client):
res = client.get('/users/abc')
assert res.status_code == 404
|
+ from skylines.model import Follower
+ from tests.api import auth_for
from tests.data import add_fixtures, users
def test_read_user(db_session, client):
john = users.john()
add_fixtures(db_session, john)
res = client.get('/users/{id}'.format(id=john.id))
assert res.status_code == 200
assert res.json == {
u'id': john.id,
u'firstName': u'John',
u'lastName': u'Doe',
u'name': u'John Doe',
u'club': None,
u'trackingCallsign': None,
u'trackingDelay': 0,
u'followers': 0,
u'following': 0,
}
+ def test_following(db_session, client):
+ john = users.john()
+ jane = users.jane()
+ Follower.follow(john, jane)
+ add_fixtures(db_session, john, jane)
+
+ res = client.get('/users/{id}'.format(id=john.id))
+ assert res.status_code == 200
+ assert res.json['following'] == 1
+
+ res = client.get('/users/{id}'.format(id=jane.id))
+ assert res.status_code == 200
+ assert res.json['followers'] == 1
+ assert 'followed' not in res.json
+
+ res = client.get('/users/{id}'.format(id=jane.id), headers=auth_for(john))
+ assert res.status_code == 200
+ assert res.json['followers'] == 1
+ assert res.json['followed'] == True
+
+
def test_read_missing_user(client):
res = client.get('/users/1000000000000')
assert res.status_code == 404
def test_read_user_with_invalid_id(client):
res = client.get('/users/abc')
assert res.status_code == 404
|
eedb22b1be419130ffc4a349c3ec4b83879b44bd
|
client/demo_assignments/hw1_tests.py
|
client/demo_assignments/hw1_tests.py
|
"""Tests for hw1 demo assignment."""
TEST_INFO = {
'assignment': 'hw1',
'imports': ['from hw1 import *'],
}
TESTS = [
# Test square
{
'name': ('Q1', 'q1', '1'),
'suites': [
[
['square(4)', '16'],
['square(-5)', '25'],
],
],
},
# Test double
{
'name': ('Q2', 'q2', '2'),
'suites': [
[
['double(4)', '8'],
['double(-5)', '-10'],
],
],
},
]
|
"""Tests for hw1 demo assignment."""
assignment = {
'name': 'hw1',
'imports': ['from hw1 import *'],
'version': '1.0',
# Specify tests that should not be locked
'no_lock': {
},
'tests': [
# Test square
{
# The first name is the "official" name.
'name': ['Q1', 'q1', '1'],
# No explicit point value -- each test suite counts as 1 point
'suites': [
[
{
'type': 'code', # Code question.
'input': 'square(4)',
'output': ['16'], # List of outputs, even if only one
},
{
'type': 'concept', # Concept question.
'input': """
What type of input does the square function take?
""",
'output': [
# Denote multiple choice with a list, rather than
# a string.
[
'number', # Correct choice comes first.
'string',
'None',
]
],
},
{
# If type is omitted, default type is 'code'.
'input': """
x = -5
square(-5)
""",
# Last line in a multiline input is used as the prompt.
'output': ['25'],
# Additional statuses can be included here.
'status': {
'lock': False,
}
},
],
],
},
# Test double
{
'name': ['Q2', 'q2', '2'],
# Point value specified -- points are partitioned evenly across
# suites.
'points': 4,
'suites': [
[
{
'input': 'double(4)',
'output': ['8'],
}
],
[
{
# Cases with multiple outputs: lines with expected output
# are denoted by '$ '.
'input': """
x = double(4)
$ x
$ double(x)
""",
'output': ['8', '16']
},
{
'input': """
x = double(2)
$ x
$ square(x)
""",
'output': ['4', '16'],
},
],
],
},
],
}
|
Make proposed testing format with demo assignment
|
Make proposed testing format with demo assignment
|
Python
|
apache-2.0
|
jordonwii/ok,jackzhao-mj/ok,jackzhao-mj/ok,Cal-CS-61A-Staff/ok,jordonwii/ok,Cal-CS-61A-Staff/ok,jackzhao-mj/ok,Cal-CS-61A-Staff/ok,jordonwii/ok,Cal-CS-61A-Staff/ok,Cal-CS-61A-Staff/ok,jordonwii/ok,jackzhao-mj/ok
|
"""Tests for hw1 demo assignment."""
- TEST_INFO = {
+ assignment = {
- 'assignment': 'hw1',
+ 'name': 'hw1',
- 'imports': ['from hw1 import *'],
+ 'imports': ['from hw1 import *'],
+ 'version': '1.0',
+
+ # Specify tests that should not be locked
+ 'no_lock': {
+ },
+
+ 'tests': [
+ # Test square
+ {
+ # The first name is the "official" name.
+ 'name': ['Q1', 'q1', '1'],
+ # No explicit point value -- each test suite counts as 1 point
+ 'suites': [
+ [
+ {
+ 'type': 'code', # Code question.
+ 'input': 'square(4)',
+ 'output': ['16'], # List of outputs, even if only one
+ },
+ {
+ 'type': 'concept', # Concept question.
+ 'input': """
+ What type of input does the square function take?
+ """,
+ 'output': [
+ # Denote multiple choice with a list, rather than
+ # a string.
+ [
+ 'number', # Correct choice comes first.
+ 'string',
+ 'None',
+ ]
+ ],
+ },
+ {
+ # If type is omitted, default type is 'code'.
+ 'input': """
+ x = -5
+ square(-5)
+ """,
+ # Last line in a multiline input is used as the prompt.
+ 'output': ['25'],
+ # Additional statuses can be included here.
+ 'status': {
+ 'lock': False,
+ }
+ },
+ ],
+ ],
+ },
+ # Test double
+ {
+ 'name': ['Q2', 'q2', '2'],
+ # Point value specified -- points are partitioned evenly across
+ # suites.
+ 'points': 4,
+ 'suites': [
+ [
+ {
+ 'input': 'double(4)',
+ 'output': ['8'],
+ }
+ ],
+ [
+ {
+ # Cases with multiple outputs: lines with expected output
+ # are denoted by '$ '.
+ 'input': """
+ x = double(4)
+ $ x
+ $ double(x)
+ """,
+ 'output': ['8', '16']
+ },
+ {
+ 'input': """
+ x = double(2)
+ $ x
+ $ square(x)
+ """,
+ 'output': ['4', '16'],
+ },
+ ],
+ ],
+ },
+ ],
}
- TESTS = [
-
- # Test square
- {
- 'name': ('Q1', 'q1', '1'),
- 'suites': [
- [
- ['square(4)', '16'],
- ['square(-5)', '25'],
- ],
- ],
- },
-
-
- # Test double
- {
- 'name': ('Q2', 'q2', '2'),
- 'suites': [
- [
- ['double(4)', '8'],
- ['double(-5)', '-10'],
- ],
- ],
- },
-
- ]
-
|
Make proposed testing format with demo assignment
|
## Code Before:
"""Tests for hw1 demo assignment."""
TEST_INFO = {
'assignment': 'hw1',
'imports': ['from hw1 import *'],
}
TESTS = [
# Test square
{
'name': ('Q1', 'q1', '1'),
'suites': [
[
['square(4)', '16'],
['square(-5)', '25'],
],
],
},
# Test double
{
'name': ('Q2', 'q2', '2'),
'suites': [
[
['double(4)', '8'],
['double(-5)', '-10'],
],
],
},
]
## Instruction:
Make proposed testing format with demo assignment
## Code After:
"""Tests for hw1 demo assignment."""
assignment = {
'name': 'hw1',
'imports': ['from hw1 import *'],
'version': '1.0',
# Specify tests that should not be locked
'no_lock': {
},
'tests': [
# Test square
{
# The first name is the "official" name.
'name': ['Q1', 'q1', '1'],
# No explicit point value -- each test suite counts as 1 point
'suites': [
[
{
'type': 'code', # Code question.
'input': 'square(4)',
'output': ['16'], # List of outputs, even if only one
},
{
'type': 'concept', # Concept question.
'input': """
What type of input does the square function take?
""",
'output': [
# Denote multiple choice with a list, rather than
# a string.
[
'number', # Correct choice comes first.
'string',
'None',
]
],
},
{
# If type is omitted, default type is 'code'.
'input': """
x = -5
square(-5)
""",
# Last line in a multiline input is used as the prompt.
'output': ['25'],
# Additional statuses can be included here.
'status': {
'lock': False,
}
},
],
],
},
# Test double
{
'name': ['Q2', 'q2', '2'],
# Point value specified -- points are partitioned evenly across
# suites.
'points': 4,
'suites': [
[
{
'input': 'double(4)',
'output': ['8'],
}
],
[
{
# Cases with multiple outputs: lines with expected output
# are denoted by '$ '.
'input': """
x = double(4)
$ x
$ double(x)
""",
'output': ['8', '16']
},
{
'input': """
x = double(2)
$ x
$ square(x)
""",
'output': ['4', '16'],
},
],
],
},
],
}
|
"""Tests for hw1 demo assignment."""
- TEST_INFO = {
+ assignment = {
- 'assignment': 'hw1',
? -- ----- --
+ 'name': 'hw1',
? +
- 'imports': ['from hw1 import *'],
? --
+ 'imports': ['from hw1 import *'],
- }
+ 'version': '1.0',
- TESTS = [
+ # Specify tests that should not be locked
+ 'no_lock': {
+ },
+ 'tests': [
# Test square
{
+ # The first name is the "official" name.
- 'name': ('Q1', 'q1', '1'),
? -- ^ ^
+ 'name': ['Q1', 'q1', '1'],
? ^ ^
+ # No explicit point value -- each test suite counts as 1 point
- 'suites': [
? --
+ 'suites': [
+ [
+ {
+ 'type': 'code', # Code question.
+ 'input': 'square(4)',
+ 'output': ['16'], # List of outputs, even if only one
+ },
+ {
+ 'type': 'concept', # Concept question.
+ 'input': """
+ What type of input does the square function take?
+ """,
+ 'output': [
+ # Denote multiple choice with a list, rather than
+ # a string.
- [
+ [
? ++
- ['square(4)', '16'],
- ['square(-5)', '25'],
+ 'number', # Correct choice comes first.
+ 'string',
+ 'None',
+ ]
],
+ },
+ {
+ # If type is omitted, default type is 'code'.
+ 'input': """
+ x = -5
+ square(-5)
+ """,
+ # Last line in a multiline input is used as the prompt.
+ 'output': ['25'],
+ # Additional statuses can be included here.
+ 'status': {
+ 'lock': False,
+ }
+ },
],
+ ],
},
-
-
# Test double
{
- 'name': ('Q2', 'q2', '2'),
? -- ^ ^
+ 'name': ['Q2', 'q2', '2'],
? ^ ^
+ # Point value specified -- points are partitioned evenly across
+ # suites.
+ 'points': 4,
- 'suites': [
? --
+ 'suites': [
+ [
- [
? ^^^
+ {
? ^
- ['double(4)', '8'],
- ['double(-5)', '-10'],
+ 'input': 'double(4)',
+ 'output': ['8'],
- ],
? ^^^^
+ }
? ^
],
+ [
+ {
+ # Cases with multiple outputs: lines with expected output
+ # are denoted by '$ '.
+ 'input': """
+ x = double(4)
+ $ x
+ $ double(x)
+ """,
+ 'output': ['8', '16']
+ },
+ {
+ 'input': """
+ x = double(2)
+ $ x
+ $ square(x)
+ """,
+ 'output': ['4', '16'],
+ },
+ ],
+ ],
},
-
- ]
+ ],
+ }
|
e275fb1406f0a8e70bb3a9d4a50a82400f7e2c29
|
signac/gui/__init__.py
|
signac/gui/__init__.py
|
import warnings
try:
import PySide # noqa
except ImportError:
warnings.warn("Failed to import PySide. "
"gui will not be available.", ImportWarning)
def main():
"""Start signac-gui.
The gui is only available if PySide is installed."""
raise ImportError(
"You need to install PySide to use the gui.")
else:
from .gui import main
__all__ = ['main']
|
import warnings
try:
import PySide # noqa
import pymongo # noqa
except ImportError as error:
msg = "{}. The signac gui is not available.".format(error)
warnings.warn(msg, ImportWarning)
def main():
"""Start signac-gui.
The gui requires PySide and pymongo."""
raise ImportError(msg)
else:
from .gui import main
__all__ = ['main']
|
Remove hard dependency for pymongo.
|
Remove hard dependency for pymongo.
Caused by pulling the gui package into the signac namespace.
Fixes issue #24.
|
Python
|
bsd-3-clause
|
csadorf/signac,csadorf/signac
|
import warnings
try:
import PySide # noqa
+ import pymongo # noqa
- except ImportError:
+ except ImportError as error:
- warnings.warn("Failed to import PySide. "
- "gui will not be available.", ImportWarning)
+ msg = "{}. The signac gui is not available.".format(error)
+ warnings.warn(msg, ImportWarning)
def main():
"""Start signac-gui.
- The gui is only available if PySide is installed."""
+ The gui requires PySide and pymongo."""
- raise ImportError(
+ raise ImportError(msg)
- "You need to install PySide to use the gui.")
else:
from .gui import main
__all__ = ['main']
|
Remove hard dependency for pymongo.
|
## Code Before:
import warnings
try:
import PySide # noqa
except ImportError:
warnings.warn("Failed to import PySide. "
"gui will not be available.", ImportWarning)
def main():
"""Start signac-gui.
The gui is only available if PySide is installed."""
raise ImportError(
"You need to install PySide to use the gui.")
else:
from .gui import main
__all__ = ['main']
## Instruction:
Remove hard dependency for pymongo.
## Code After:
import warnings
try:
import PySide # noqa
import pymongo # noqa
except ImportError as error:
msg = "{}. The signac gui is not available.".format(error)
warnings.warn(msg, ImportWarning)
def main():
"""Start signac-gui.
The gui requires PySide and pymongo."""
raise ImportError(msg)
else:
from .gui import main
__all__ = ['main']
|
import warnings
try:
import PySide # noqa
+ import pymongo # noqa
- except ImportError:
+ except ImportError as error:
? +++++++++
- warnings.warn("Failed to import PySide. "
- "gui will not be available.", ImportWarning)
+ msg = "{}. The signac gui is not available.".format(error)
+ warnings.warn(msg, ImportWarning)
def main():
"""Start signac-gui.
- The gui is only available if PySide is installed."""
+ The gui requires PySide and pymongo."""
- raise ImportError(
+ raise ImportError(msg)
? ++++
- "You need to install PySide to use the gui.")
else:
from .gui import main
__all__ = ['main']
|
554e79ada3f351ecb6287b08d0f7d1c4e5a5b5f6
|
setup.py
|
setup.py
|
import sys
from distutils.core import setup
setup_args = {}
setup_args.update(dict(
name='param',
version='0.05',
description='Declarative Python programming using Parameters.',
long_description=open('README.txt').read(),
author= "IOAM",
author_email= "[email protected]",
maintainer= "IOAM",
maintainer_email= "[email protected]",
platforms=['Windows', 'Mac OS X', 'Linux'],
license='BSD',
url='http://ioam.github.com/param/',
packages = ["param"],
classifiers = [
"License :: OSI Approved :: BSD License",
# (until packaging tested)
"Development Status :: 4 - Beta",
"Programming Language :: Python :: 2.5",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Operating System :: OS Independent",
"Intended Audience :: Science/Research",
"Intended Audience :: Developers",
"Natural Language :: English",
"Topic :: Scientific/Engineering",
"Topic :: Software Development :: Libraries"]
))
if __name__=="__main__":
setup(**setup_args)
|
import sys
from distutils.core import setup
setup_args = {}
setup_args.update(dict(
name='param',
version='1.0',
description='Declarative Python programming using Parameters.',
long_description=open('README.txt').read(),
author= "IOAM",
author_email= "[email protected]",
maintainer= "IOAM",
maintainer_email= "[email protected]",
platforms=['Windows', 'Mac OS X', 'Linux'],
license='BSD',
url='http://ioam.github.com/param/',
packages = ["param"],
classifiers = [
"License :: OSI Approved :: BSD License",
"Development Status :: 5 - Production/Stable",
"Programming Language :: Python :: 2.5",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Operating System :: OS Independent",
"Intended Audience :: Science/Research",
"Intended Audience :: Developers",
"Natural Language :: English",
"Topic :: Scientific/Engineering",
"Topic :: Software Development :: Libraries"]
))
if __name__=="__main__":
setup(**setup_args)
|
Update version number to 1.0.
|
Update version number to 1.0.
|
Python
|
bsd-3-clause
|
ceball/param,ioam/param
|
import sys
from distutils.core import setup
setup_args = {}
setup_args.update(dict(
name='param',
- version='0.05',
+ version='1.0',
description='Declarative Python programming using Parameters.',
long_description=open('README.txt').read(),
author= "IOAM",
author_email= "[email protected]",
maintainer= "IOAM",
maintainer_email= "[email protected]",
platforms=['Windows', 'Mac OS X', 'Linux'],
license='BSD',
url='http://ioam.github.com/param/',
packages = ["param"],
classifiers = [
"License :: OSI Approved :: BSD License",
- # (until packaging tested)
- "Development Status :: 4 - Beta",
+ "Development Status :: 5 - Production/Stable",
"Programming Language :: Python :: 2.5",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Operating System :: OS Independent",
"Intended Audience :: Science/Research",
"Intended Audience :: Developers",
"Natural Language :: English",
"Topic :: Scientific/Engineering",
"Topic :: Software Development :: Libraries"]
))
if __name__=="__main__":
setup(**setup_args)
|
Update version number to 1.0.
|
## Code Before:
import sys
from distutils.core import setup
setup_args = {}
setup_args.update(dict(
name='param',
version='0.05',
description='Declarative Python programming using Parameters.',
long_description=open('README.txt').read(),
author= "IOAM",
author_email= "[email protected]",
maintainer= "IOAM",
maintainer_email= "[email protected]",
platforms=['Windows', 'Mac OS X', 'Linux'],
license='BSD',
url='http://ioam.github.com/param/',
packages = ["param"],
classifiers = [
"License :: OSI Approved :: BSD License",
# (until packaging tested)
"Development Status :: 4 - Beta",
"Programming Language :: Python :: 2.5",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Operating System :: OS Independent",
"Intended Audience :: Science/Research",
"Intended Audience :: Developers",
"Natural Language :: English",
"Topic :: Scientific/Engineering",
"Topic :: Software Development :: Libraries"]
))
if __name__=="__main__":
setup(**setup_args)
## Instruction:
Update version number to 1.0.
## Code After:
import sys
from distutils.core import setup
setup_args = {}
setup_args.update(dict(
name='param',
version='1.0',
description='Declarative Python programming using Parameters.',
long_description=open('README.txt').read(),
author= "IOAM",
author_email= "[email protected]",
maintainer= "IOAM",
maintainer_email= "[email protected]",
platforms=['Windows', 'Mac OS X', 'Linux'],
license='BSD',
url='http://ioam.github.com/param/',
packages = ["param"],
classifiers = [
"License :: OSI Approved :: BSD License",
"Development Status :: 5 - Production/Stable",
"Programming Language :: Python :: 2.5",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Operating System :: OS Independent",
"Intended Audience :: Science/Research",
"Intended Audience :: Developers",
"Natural Language :: English",
"Topic :: Scientific/Engineering",
"Topic :: Software Development :: Libraries"]
))
if __name__=="__main__":
setup(**setup_args)
|
import sys
from distutils.core import setup
setup_args = {}
setup_args.update(dict(
name='param',
- version='0.05',
? ^ -
+ version='1.0',
? ^
description='Declarative Python programming using Parameters.',
long_description=open('README.txt').read(),
author= "IOAM",
author_email= "[email protected]",
maintainer= "IOAM",
maintainer_email= "[email protected]",
platforms=['Windows', 'Mac OS X', 'Linux'],
license='BSD',
url='http://ioam.github.com/param/',
packages = ["param"],
classifiers = [
"License :: OSI Approved :: BSD License",
- # (until packaging tested)
- "Development Status :: 4 - Beta",
? ^ ^^
+ "Development Status :: 5 - Production/Stable",
? ^ ^^^^^^^^^^^^ +++
"Programming Language :: Python :: 2.5",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Operating System :: OS Independent",
"Intended Audience :: Science/Research",
"Intended Audience :: Developers",
"Natural Language :: English",
"Topic :: Scientific/Engineering",
"Topic :: Software Development :: Libraries"]
))
if __name__=="__main__":
setup(**setup_args)
|
131033fa3ab170ac2a66c1dd89074ea74702fb52
|
icekit/page_types/articles/migrations/0002_auto_20161012_2231.py
|
icekit/page_types/articles/migrations/0002_auto_20161012_2231.py
|
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('icekit_articles', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='article',
name='slug',
field=models.SlugField(max_length=255, default='woo'),
preserve_default=False,
),
migrations.AddField(
model_name='article',
name='title',
field=models.CharField(max_length=255, default='woo'),
preserve_default=False,
),
]
|
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('icekit_articles', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='article',
name='slug',
field=models.SlugField(max_length=255),
preserve_default=False,
),
migrations.AddField(
model_name='article',
name='title',
field=models.CharField(max_length=255),
preserve_default=False,
),
]
|
Remove vestigial (?) "woo" default for article slug and title fields.
|
Remove vestigial (?) "woo" default for article slug and title fields.
|
Python
|
mit
|
ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit
|
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('icekit_articles', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='article',
name='slug',
- field=models.SlugField(max_length=255, default='woo'),
+ field=models.SlugField(max_length=255),
preserve_default=False,
),
migrations.AddField(
model_name='article',
name='title',
- field=models.CharField(max_length=255, default='woo'),
+ field=models.CharField(max_length=255),
preserve_default=False,
),
]
|
Remove vestigial (?) "woo" default for article slug and title fields.
|
## Code Before:
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('icekit_articles', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='article',
name='slug',
field=models.SlugField(max_length=255, default='woo'),
preserve_default=False,
),
migrations.AddField(
model_name='article',
name='title',
field=models.CharField(max_length=255, default='woo'),
preserve_default=False,
),
]
## Instruction:
Remove vestigial (?) "woo" default for article slug and title fields.
## Code After:
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('icekit_articles', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='article',
name='slug',
field=models.SlugField(max_length=255),
preserve_default=False,
),
migrations.AddField(
model_name='article',
name='title',
field=models.CharField(max_length=255),
preserve_default=False,
),
]
|
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('icekit_articles', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='article',
name='slug',
- field=models.SlugField(max_length=255, default='woo'),
? ---------------
+ field=models.SlugField(max_length=255),
preserve_default=False,
),
migrations.AddField(
model_name='article',
name='title',
- field=models.CharField(max_length=255, default='woo'),
? ---------------
+ field=models.CharField(max_length=255),
preserve_default=False,
),
]
|
88cd50a331c20fb65c495e92cc93867f03cd3826
|
lib/exp/featx/__init__.py
|
lib/exp/featx/__init__.py
|
__all__ = []
from lib.exp.featx.base import Feats
from lib.exp.tools.slider import Slider
from lib.exp.tools.video import Video
from lib.exp.pre import Reducer
class Featx(Feats):
def __init__(self, root, name):
Feats.__init__(self, root, name)
def get_slide_feats(self):
ss = Slider(self.root, self.name)
imgl = ss.get_slides(None, gray=True, resize=True)
self.feats(imgl, prefix="s")
def get_frame_feats(self):
rr = Reducer(self.root, self.name)
vv = Video(self.root, self.name)
imgl = vv.get_frames(rr.frame_ids(), gray=True)
self.feats(imgl, prefix="f")
def get_feats_pair(self, sid, fid):
"""
Get features by given `slide`, `frame` pairs
"""
sk = self.load("s_{:03d}_kps".format(sid))
sd = self.load("s_{:03d}_des".format(sid))
fk = self.load("f_{:03d}_kps".format(fid))
fd = self.load("f_{:03d}_des".format(fid))
return dict(sk=sk, sd=sd, fk=fk, fd=fd)
|
__all__ = []
from lib.exp.featx.base import Feats
from lib.exp.tools.slider import Slider
from lib.exp.tools.video import Video
from lib.exp.pre import Reducer
class Featx(Feats):
def __init__(self, root, name):
Feats.__init__(self, root, name)
def get_slide_feats(self):
ss = Slider(self.root, self.name)
imgl = ss.get_slides(None, gray=True, resize=True)
self.feats(imgl, prefix="s")
def get_frame_feats(self):
rr = Reducer(self.root, self.name)
vv = Video(self.root, self.name)
imgl = vv.get_frames(rr.frame_ids(), gray=True)
self.feats(imgl, prefix="f")
def load_feats(self, key):
fd = self.load(key)
if fd is None:
return []
return fd
def get_feats_pair(self, sid, fid):
"""
Get features by given `slide`, `frame` pairs
"""
sk = self.load_feats("s_{:03d}_kps".format(sid))
sd = self.load_feats("s_{:03d}_des".format(sid))
fk = self.load_feats("f_{:03d}_kps".format(fid))
fd = self.load_feats("f_{:03d}_des".format(fid))
return dict(sk=sk, sd=sd, fk=fk, fd=fd)
|
Load feats with zero length
|
Load feats with zero length
|
Python
|
agpl-3.0
|
speed-of-light/pyslider
|
__all__ = []
from lib.exp.featx.base import Feats
from lib.exp.tools.slider import Slider
from lib.exp.tools.video import Video
from lib.exp.pre import Reducer
class Featx(Feats):
def __init__(self, root, name):
Feats.__init__(self, root, name)
def get_slide_feats(self):
ss = Slider(self.root, self.name)
imgl = ss.get_slides(None, gray=True, resize=True)
self.feats(imgl, prefix="s")
def get_frame_feats(self):
rr = Reducer(self.root, self.name)
vv = Video(self.root, self.name)
imgl = vv.get_frames(rr.frame_ids(), gray=True)
self.feats(imgl, prefix="f")
+ def load_feats(self, key):
+ fd = self.load(key)
+ if fd is None:
+ return []
+ return fd
+
def get_feats_pair(self, sid, fid):
"""
Get features by given `slide`, `frame` pairs
"""
- sk = self.load("s_{:03d}_kps".format(sid))
+ sk = self.load_feats("s_{:03d}_kps".format(sid))
- sd = self.load("s_{:03d}_des".format(sid))
+ sd = self.load_feats("s_{:03d}_des".format(sid))
- fk = self.load("f_{:03d}_kps".format(fid))
+ fk = self.load_feats("f_{:03d}_kps".format(fid))
- fd = self.load("f_{:03d}_des".format(fid))
+ fd = self.load_feats("f_{:03d}_des".format(fid))
return dict(sk=sk, sd=sd, fk=fk, fd=fd)
|
Load feats with zero length
|
## Code Before:
__all__ = []
from lib.exp.featx.base import Feats
from lib.exp.tools.slider import Slider
from lib.exp.tools.video import Video
from lib.exp.pre import Reducer
class Featx(Feats):
def __init__(self, root, name):
Feats.__init__(self, root, name)
def get_slide_feats(self):
ss = Slider(self.root, self.name)
imgl = ss.get_slides(None, gray=True, resize=True)
self.feats(imgl, prefix="s")
def get_frame_feats(self):
rr = Reducer(self.root, self.name)
vv = Video(self.root, self.name)
imgl = vv.get_frames(rr.frame_ids(), gray=True)
self.feats(imgl, prefix="f")
def get_feats_pair(self, sid, fid):
"""
Get features by given `slide`, `frame` pairs
"""
sk = self.load("s_{:03d}_kps".format(sid))
sd = self.load("s_{:03d}_des".format(sid))
fk = self.load("f_{:03d}_kps".format(fid))
fd = self.load("f_{:03d}_des".format(fid))
return dict(sk=sk, sd=sd, fk=fk, fd=fd)
## Instruction:
Load feats with zero length
## Code After:
__all__ = []
from lib.exp.featx.base import Feats
from lib.exp.tools.slider import Slider
from lib.exp.tools.video import Video
from lib.exp.pre import Reducer
class Featx(Feats):
def __init__(self, root, name):
Feats.__init__(self, root, name)
def get_slide_feats(self):
ss = Slider(self.root, self.name)
imgl = ss.get_slides(None, gray=True, resize=True)
self.feats(imgl, prefix="s")
def get_frame_feats(self):
rr = Reducer(self.root, self.name)
vv = Video(self.root, self.name)
imgl = vv.get_frames(rr.frame_ids(), gray=True)
self.feats(imgl, prefix="f")
def load_feats(self, key):
fd = self.load(key)
if fd is None:
return []
return fd
def get_feats_pair(self, sid, fid):
"""
Get features by given `slide`, `frame` pairs
"""
sk = self.load_feats("s_{:03d}_kps".format(sid))
sd = self.load_feats("s_{:03d}_des".format(sid))
fk = self.load_feats("f_{:03d}_kps".format(fid))
fd = self.load_feats("f_{:03d}_des".format(fid))
return dict(sk=sk, sd=sd, fk=fk, fd=fd)
|
__all__ = []
from lib.exp.featx.base import Feats
from lib.exp.tools.slider import Slider
from lib.exp.tools.video import Video
from lib.exp.pre import Reducer
class Featx(Feats):
def __init__(self, root, name):
Feats.__init__(self, root, name)
def get_slide_feats(self):
ss = Slider(self.root, self.name)
imgl = ss.get_slides(None, gray=True, resize=True)
self.feats(imgl, prefix="s")
def get_frame_feats(self):
rr = Reducer(self.root, self.name)
vv = Video(self.root, self.name)
imgl = vv.get_frames(rr.frame_ids(), gray=True)
self.feats(imgl, prefix="f")
+ def load_feats(self, key):
+ fd = self.load(key)
+ if fd is None:
+ return []
+ return fd
+
def get_feats_pair(self, sid, fid):
"""
Get features by given `slide`, `frame` pairs
"""
- sk = self.load("s_{:03d}_kps".format(sid))
+ sk = self.load_feats("s_{:03d}_kps".format(sid))
? ++++++
- sd = self.load("s_{:03d}_des".format(sid))
+ sd = self.load_feats("s_{:03d}_des".format(sid))
? ++++++
- fk = self.load("f_{:03d}_kps".format(fid))
+ fk = self.load_feats("f_{:03d}_kps".format(fid))
? ++++++
- fd = self.load("f_{:03d}_des".format(fid))
+ fd = self.load_feats("f_{:03d}_des".format(fid))
? ++++++
return dict(sk=sk, sd=sd, fk=fk, fd=fd)
|
5702672ab40ef23089c7a2dfee22aaf539b19a54
|
dpaste/settings/tests.py
|
dpaste/settings/tests.py
|
from .base import *
|
from .base import *
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
|
Use in-memory sqlite db for testing.
|
Use in-memory sqlite db for testing.
|
Python
|
mit
|
bartTC/dpaste,bartTC/dpaste,bartTC/dpaste
|
from .base import *
+ DATABASES = {
+ 'default': {
+ 'ENGINE': 'django.db.backends.sqlite3',
+ 'NAME': ':memory:',
+ }
+ }
+
|
Use in-memory sqlite db for testing.
|
## Code Before:
from .base import *
## Instruction:
Use in-memory sqlite db for testing.
## Code After:
from .base import *
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
|
from .base import *
+
+ DATABASES = {
+ 'default': {
+ 'ENGINE': 'django.db.backends.sqlite3',
+ 'NAME': ':memory:',
+ }
+ }
|
b94edbbb717313cc831fa97d3ccf9ab715ff3ade
|
testing/test_cffitsio.py
|
testing/test_cffitsio.py
|
from cffitsio import FitsFile
import os
def test_create_file(tmpdir):
filename = str(tmpdir.join('test.fits'))
f = FitsFile.create(filename)
assert os.path.isfile(filename)
|
import pytest
import cffitsio
import os
@pytest.fixture
def test_dir():
return os.path.join(
os.path.dirname(__file__),
'data')
def test_create_file(tmpdir):
filename = str(tmpdir.join('test.fits'))
f = cffitsio.FitsFile.create(filename)
assert os.path.isfile(filename)
def test_open_file(test_dir):
filename = os.path.join(test_dir, 'all.fits')
with cffitsio.open_fits(filename) as infile:
assert isinstance(infile, cffitsio.FitsFile)
|
Add test for opening file
|
Add test for opening file
|
Python
|
mit
|
mindriot101/fitsio-cffi
|
- from cffitsio import FitsFile
+ import pytest
+ import cffitsio
import os
+
+
+ @pytest.fixture
+ def test_dir():
+ return os.path.join(
+ os.path.dirname(__file__),
+ 'data')
def test_create_file(tmpdir):
filename = str(tmpdir.join('test.fits'))
- f = FitsFile.create(filename)
+ f = cffitsio.FitsFile.create(filename)
assert os.path.isfile(filename)
+
+ def test_open_file(test_dir):
+ filename = os.path.join(test_dir, 'all.fits')
+ with cffitsio.open_fits(filename) as infile:
+ assert isinstance(infile, cffitsio.FitsFile)
+
|
Add test for opening file
|
## Code Before:
from cffitsio import FitsFile
import os
def test_create_file(tmpdir):
filename = str(tmpdir.join('test.fits'))
f = FitsFile.create(filename)
assert os.path.isfile(filename)
## Instruction:
Add test for opening file
## Code After:
import pytest
import cffitsio
import os
@pytest.fixture
def test_dir():
return os.path.join(
os.path.dirname(__file__),
'data')
def test_create_file(tmpdir):
filename = str(tmpdir.join('test.fits'))
f = cffitsio.FitsFile.create(filename)
assert os.path.isfile(filename)
def test_open_file(test_dir):
filename = os.path.join(test_dir, 'all.fits')
with cffitsio.open_fits(filename) as infile:
assert isinstance(infile, cffitsio.FitsFile)
|
- from cffitsio import FitsFile
+ import pytest
+ import cffitsio
import os
+
+
+ @pytest.fixture
+ def test_dir():
+ return os.path.join(
+ os.path.dirname(__file__),
+ 'data')
def test_create_file(tmpdir):
filename = str(tmpdir.join('test.fits'))
- f = FitsFile.create(filename)
+ f = cffitsio.FitsFile.create(filename)
? +++++++++
assert os.path.isfile(filename)
+
+
+ def test_open_file(test_dir):
+ filename = os.path.join(test_dir, 'all.fits')
+ with cffitsio.open_fits(filename) as infile:
+ assert isinstance(infile, cffitsio.FitsFile)
|
9a9ee99129cee92c93fbc9e2cc24b7b933d51aac
|
confirmation/migrations/0001_initial.py
|
confirmation/migrations/0001_initial.py
|
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Confirmation',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('object_id', models.PositiveIntegerField()),
('date_sent', models.DateTimeField(verbose_name='sent')),
('confirmation_key', models.CharField(max_length=40, verbose_name='activation key')),
('content_type', models.ForeignKey(to='contenttypes.ContentType')),
],
options={
'verbose_name': 'confirmation email',
'verbose_name_plural': 'confirmation emails',
},
bases=(models.Model,),
),
]
|
from __future__ import unicode_literals
from django.db import models, migrations
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Confirmation',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('object_id', models.PositiveIntegerField()),
('date_sent', models.DateTimeField(verbose_name='sent')),
('confirmation_key', models.CharField(max_length=40, verbose_name='activation key')),
('content_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='contenttypes.ContentType')),
],
options={
'verbose_name': 'confirmation email',
'verbose_name_plural': 'confirmation emails',
},
bases=(models.Model,),
),
]
|
Add on_delete in foreign keys.
|
confirmation: Add on_delete in foreign keys.
on_delete will be a required arg for ForeignKey in Django 2.0. Set it
to models.CASCADE on models and in existing migrations if you want to
maintain the current default behavior.
See https://docs.djangoproject.com/en/1.11/ref/models/fields/#django.db.models.ForeignKey.on_delete
|
Python
|
apache-2.0
|
hackerkid/zulip,dhcrzf/zulip,Galexrt/zulip,showell/zulip,amanharitsh123/zulip,zulip/zulip,vabs22/zulip,vaidap/zulip,jackrzhang/zulip,jrowan/zulip,hackerkid/zulip,zulip/zulip,tommyip/zulip,kou/zulip,jackrzhang/zulip,brockwhittaker/zulip,brockwhittaker/zulip,tommyip/zulip,shubhamdhama/zulip,eeshangarg/zulip,shubhamdhama/zulip,brockwhittaker/zulip,Galexrt/zulip,brainwane/zulip,andersk/zulip,brockwhittaker/zulip,amanharitsh123/zulip,vabs22/zulip,synicalsyntax/zulip,tommyip/zulip,verma-varsha/zulip,kou/zulip,mahim97/zulip,rht/zulip,amanharitsh123/zulip,shubhamdhama/zulip,dhcrzf/zulip,mahim97/zulip,jrowan/zulip,andersk/zulip,timabbott/zulip,eeshangarg/zulip,synicalsyntax/zulip,brainwane/zulip,andersk/zulip,shubhamdhama/zulip,rht/zulip,rishig/zulip,synicalsyntax/zulip,synicalsyntax/zulip,Galexrt/zulip,rishig/zulip,zulip/zulip,jrowan/zulip,verma-varsha/zulip,dhcrzf/zulip,rht/zulip,synicalsyntax/zulip,tommyip/zulip,eeshangarg/zulip,amanharitsh123/zulip,timabbott/zulip,hackerkid/zulip,kou/zulip,andersk/zulip,tommyip/zulip,punchagan/zulip,hackerkid/zulip,vabs22/zulip,mahim97/zulip,punchagan/zulip,verma-varsha/zulip,showell/zulip,vaidap/zulip,jrowan/zulip,mahim97/zulip,rishig/zulip,jackrzhang/zulip,timabbott/zulip,timabbott/zulip,vabs22/zulip,kou/zulip,Galexrt/zulip,brainwane/zulip,tommyip/zulip,tommyip/zulip,rishig/zulip,showell/zulip,shubhamdhama/zulip,verma-varsha/zulip,rishig/zulip,dhcrzf/zulip,Galexrt/zulip,verma-varsha/zulip,Galexrt/zulip,timabbott/zulip,eeshangarg/zulip,jrowan/zulip,brainwane/zulip,zulip/zulip,eeshangarg/zulip,dhcrzf/zulip,rishig/zulip,brockwhittaker/zulip,punchagan/zulip,mahim97/zulip,vabs22/zulip,vabs22/zulip,andersk/zulip,jackrzhang/zulip,vaidap/zulip,eeshangarg/zulip,hackerkid/zulip,jackrzhang/zulip,zulip/zulip,shubhamdhama/zulip,mahim97/zulip,timabbott/zulip,kou/zulip,verma-varsha/zulip,rishig/zulip,hackerkid/zulip,brainwane/zulip,kou/zulip,zulip/zulip,showell/zulip,punchagan/zulip,jrowan/zulip,vaidap/zulip,punchagan/zulip,punchagan/zulip,rht/zulip,eeshangarg/zulip,kou/zulip,showell/zulip,punchagan/zulip,shubhamdhama/zulip,rht/zulip,timabbott/zulip,amanharitsh123/zulip,jackrzhang/zulip,andersk/zulip,dhcrzf/zulip,dhcrzf/zulip,rht/zulip,showell/zulip,brainwane/zulip,synicalsyntax/zulip,amanharitsh123/zulip,showell/zulip,jackrzhang/zulip,zulip/zulip,vaidap/zulip,Galexrt/zulip,hackerkid/zulip,vaidap/zulip,andersk/zulip,brockwhittaker/zulip,rht/zulip,brainwane/zulip,synicalsyntax/zulip
|
from __future__ import unicode_literals
from django.db import models, migrations
+ import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Confirmation',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('object_id', models.PositiveIntegerField()),
('date_sent', models.DateTimeField(verbose_name='sent')),
('confirmation_key', models.CharField(max_length=40, verbose_name='activation key')),
- ('content_type', models.ForeignKey(to='contenttypes.ContentType')),
+ ('content_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='contenttypes.ContentType')),
],
options={
'verbose_name': 'confirmation email',
'verbose_name_plural': 'confirmation emails',
},
bases=(models.Model,),
),
]
|
Add on_delete in foreign keys.
|
## Code Before:
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Confirmation',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('object_id', models.PositiveIntegerField()),
('date_sent', models.DateTimeField(verbose_name='sent')),
('confirmation_key', models.CharField(max_length=40, verbose_name='activation key')),
('content_type', models.ForeignKey(to='contenttypes.ContentType')),
],
options={
'verbose_name': 'confirmation email',
'verbose_name_plural': 'confirmation emails',
},
bases=(models.Model,),
),
]
## Instruction:
Add on_delete in foreign keys.
## Code After:
from __future__ import unicode_literals
from django.db import models, migrations
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Confirmation',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('object_id', models.PositiveIntegerField()),
('date_sent', models.DateTimeField(verbose_name='sent')),
('confirmation_key', models.CharField(max_length=40, verbose_name='activation key')),
('content_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='contenttypes.ContentType')),
],
options={
'verbose_name': 'confirmation email',
'verbose_name_plural': 'confirmation emails',
},
bases=(models.Model,),
),
]
|
from __future__ import unicode_literals
from django.db import models, migrations
+ import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Confirmation',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('object_id', models.PositiveIntegerField()),
('date_sent', models.DateTimeField(verbose_name='sent')),
('confirmation_key', models.CharField(max_length=40, verbose_name='activation key')),
- ('content_type', models.ForeignKey(to='contenttypes.ContentType')),
+ ('content_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='contenttypes.ContentType')),
? +++++++++++++++++++++++++++++++++++++++++++++
],
options={
'verbose_name': 'confirmation email',
'verbose_name_plural': 'confirmation emails',
},
bases=(models.Model,),
),
]
|
5bb90727efb62525995caad3b52fd588d8b08298
|
pregnancy/urls.py
|
pregnancy/urls.py
|
from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
import contractions.views
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'pregnancy.views.home', name='home'),
# url(r'^pregnancy/', include('pregnancy.foo.urls')),
url(r'^contractions/$', contractions.views.ContractionList.as_view(), name='ContractionList'),
url(r'^update_intensity/(?P<pk>\d+)/$', contractions.views.UpdateIntensity.as_view(), name='UpdateIntensity'),
url(r'^update_intensity2/(?P<pk>\d+)/$', contractions.views.UpdateIntensity2.as_view(), name='UpdateIntensity2'),
url(r'^ContractionListTable/$', contractions.views.ContractionListTable.as_view(), name='ContractionListTable'),
url(r'^StartContraction/$', contractions.views.StartContraction.as_view(), name='StartContraction'),
url(r'^StopContraction/(?P<pk>\d+)/$', contractions.views.StopContraction.as_view(), name='StopContraction'),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
|
from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
import contractions.views
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'pregnancy.views.home', name='home'),
# url(r'^pregnancy/', include('pregnancy.foo.urls')),
url(r'^$', contractions.views.ContractionList.as_view(), name='ContractionList'),
url(r'^contractions/$', contractions.views.ContractionList.as_view(), name='ContractionList'),
url(r'^update_intensity/(?P<pk>\d+)/$', contractions.views.UpdateIntensity.as_view(), name='UpdateIntensity'),
url(r'^update_intensity2/(?P<pk>\d+)/$', contractions.views.UpdateIntensity2.as_view(), name='UpdateIntensity2'),
url(r'^ContractionListTable/$', contractions.views.ContractionListTable.as_view(), name='ContractionListTable'),
url(r'^StartContraction/$', contractions.views.StartContraction.as_view(), name='StartContraction'),
url(r'^StopContraction/(?P<pk>\d+)/$', contractions.views.StopContraction.as_view(), name='StopContraction'),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
|
Update url to point / to the contractions app
|
Update url to point / to the contractions app
|
Python
|
bsd-2-clause
|
dreinhold/pregnancy,dreinhold/pregnancy,dreinhold/pregnancy
|
from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
import contractions.views
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'pregnancy.views.home', name='home'),
# url(r'^pregnancy/', include('pregnancy.foo.urls')),
+ url(r'^$', contractions.views.ContractionList.as_view(), name='ContractionList'),
url(r'^contractions/$', contractions.views.ContractionList.as_view(), name='ContractionList'),
url(r'^update_intensity/(?P<pk>\d+)/$', contractions.views.UpdateIntensity.as_view(), name='UpdateIntensity'),
url(r'^update_intensity2/(?P<pk>\d+)/$', contractions.views.UpdateIntensity2.as_view(), name='UpdateIntensity2'),
url(r'^ContractionListTable/$', contractions.views.ContractionListTable.as_view(), name='ContractionListTable'),
url(r'^StartContraction/$', contractions.views.StartContraction.as_view(), name='StartContraction'),
url(r'^StopContraction/(?P<pk>\d+)/$', contractions.views.StopContraction.as_view(), name='StopContraction'),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
|
Update url to point / to the contractions app
|
## Code Before:
from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
import contractions.views
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'pregnancy.views.home', name='home'),
# url(r'^pregnancy/', include('pregnancy.foo.urls')),
url(r'^contractions/$', contractions.views.ContractionList.as_view(), name='ContractionList'),
url(r'^update_intensity/(?P<pk>\d+)/$', contractions.views.UpdateIntensity.as_view(), name='UpdateIntensity'),
url(r'^update_intensity2/(?P<pk>\d+)/$', contractions.views.UpdateIntensity2.as_view(), name='UpdateIntensity2'),
url(r'^ContractionListTable/$', contractions.views.ContractionListTable.as_view(), name='ContractionListTable'),
url(r'^StartContraction/$', contractions.views.StartContraction.as_view(), name='StartContraction'),
url(r'^StopContraction/(?P<pk>\d+)/$', contractions.views.StopContraction.as_view(), name='StopContraction'),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
## Instruction:
Update url to point / to the contractions app
## Code After:
from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
import contractions.views
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'pregnancy.views.home', name='home'),
# url(r'^pregnancy/', include('pregnancy.foo.urls')),
url(r'^$', contractions.views.ContractionList.as_view(), name='ContractionList'),
url(r'^contractions/$', contractions.views.ContractionList.as_view(), name='ContractionList'),
url(r'^update_intensity/(?P<pk>\d+)/$', contractions.views.UpdateIntensity.as_view(), name='UpdateIntensity'),
url(r'^update_intensity2/(?P<pk>\d+)/$', contractions.views.UpdateIntensity2.as_view(), name='UpdateIntensity2'),
url(r'^ContractionListTable/$', contractions.views.ContractionListTable.as_view(), name='ContractionListTable'),
url(r'^StartContraction/$', contractions.views.StartContraction.as_view(), name='StartContraction'),
url(r'^StopContraction/(?P<pk>\d+)/$', contractions.views.StopContraction.as_view(), name='StopContraction'),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
|
from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
import contractions.views
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'pregnancy.views.home', name='home'),
# url(r'^pregnancy/', include('pregnancy.foo.urls')),
+ url(r'^$', contractions.views.ContractionList.as_view(), name='ContractionList'),
url(r'^contractions/$', contractions.views.ContractionList.as_view(), name='ContractionList'),
url(r'^update_intensity/(?P<pk>\d+)/$', contractions.views.UpdateIntensity.as_view(), name='UpdateIntensity'),
url(r'^update_intensity2/(?P<pk>\d+)/$', contractions.views.UpdateIntensity2.as_view(), name='UpdateIntensity2'),
url(r'^ContractionListTable/$', contractions.views.ContractionListTable.as_view(), name='ContractionListTable'),
url(r'^StartContraction/$', contractions.views.StartContraction.as_view(), name='StartContraction'),
url(r'^StopContraction/(?P<pk>\d+)/$', contractions.views.StopContraction.as_view(), name='StopContraction'),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
|
4208538a2b7c5f2280f67520a73bd87b74de26dd
|
scripts/getsent.py
|
scripts/getsent.py
|
import sys
import depio
sentnum = int(sys.argv[2])
fnames = [sys.argv[1]]
for fname in fnames:
sents = list(depio.depread(fname))
i=0
out = open("%d.%s" % (sentnum,fname),'w')
for outl in sents[sentnum]:
out.write('\t'.join(outl) + '\n')
break
out.close()
|
import sys
import depio
sentnum = int(sys.argv[2])
fnames = [sys.argv[1]]
for fname in fnames:
sents = list(depio.depread(fname))
i=0
out = open("%d.%s" % (sentnum,fname),'w')
for outl in sents[sentnum]:
out.write('\t'.join(outl) + '\n')
out.write('\n')
out.close()
|
Fix script to output new line at end of file
|
Fix script to output new line at end of file
|
Python
|
apache-2.0
|
habeanf/yap,habeanf/yap
|
import sys
import depio
sentnum = int(sys.argv[2])
fnames = [sys.argv[1]]
for fname in fnames:
sents = list(depio.depread(fname))
i=0
out = open("%d.%s" % (sentnum,fname),'w')
for outl in sents[sentnum]:
out.write('\t'.join(outl) + '\n')
- break
+ out.write('\n')
out.close()
|
Fix script to output new line at end of file
|
## Code Before:
import sys
import depio
sentnum = int(sys.argv[2])
fnames = [sys.argv[1]]
for fname in fnames:
sents = list(depio.depread(fname))
i=0
out = open("%d.%s" % (sentnum,fname),'w')
for outl in sents[sentnum]:
out.write('\t'.join(outl) + '\n')
break
out.close()
## Instruction:
Fix script to output new line at end of file
## Code After:
import sys
import depio
sentnum = int(sys.argv[2])
fnames = [sys.argv[1]]
for fname in fnames:
sents = list(depio.depread(fname))
i=0
out = open("%d.%s" % (sentnum,fname),'w')
for outl in sents[sentnum]:
out.write('\t'.join(outl) + '\n')
out.write('\n')
out.close()
|
import sys
import depio
sentnum = int(sys.argv[2])
fnames = [sys.argv[1]]
for fname in fnames:
sents = list(depio.depread(fname))
i=0
out = open("%d.%s" % (sentnum,fname),'w')
for outl in sents[sentnum]:
out.write('\t'.join(outl) + '\n')
- break
+ out.write('\n')
out.close()
|
4abd7baafcd982993471d5c0137d4b506ea49e8b
|
src/runcommands/util/enums.py
|
src/runcommands/util/enums.py
|
import enum
import os
import subprocess
import sys
import blessings
from .misc import isatty
if isatty(sys.stdout) and os.getenv("TERM"):
Terminal = blessings.Terminal
else:
class Terminal:
def __getattr__(self, name):
return ""
TERM = Terminal()
class Color(enum.Enum):
none = ""
reset = TERM.normal
black = TERM.black
red = TERM.red
green = TERM.green
yellow = TERM.yellow
blue = TERM.blue
magenta = TERM.magenta
cyan = TERM.cyan
white = TERM.white
def __str__(self):
return self.value
class StreamOptions(enum.Enum):
"""Choices for stream handling."""
capture = "capture"
hide = "hide"
none = "none"
@property
def option(self):
return {
"capture": subprocess.PIPE,
"hide": subprocess.DEVNULL,
"none": None,
}[self.value]
|
import enum
import os
import subprocess
import sys
import blessings
from .misc import isatty
if isatty(sys.stdout) and os.getenv("TERM"):
Terminal = blessings.Terminal
else:
# XXX: Mock terminal that returns "" for all attributes
class TerminalValue:
registry = {}
@classmethod
def get(cls, name):
if name not in cls.registry:
cls.registry[name] = cls(name)
return cls.registry[name]
def __init__(self, name):
self.name = name
def __repr__(self):
return f"{self.__class__.__name__}({self.name})"
def __str__(self):
return ""
class Terminal:
def __getattr__(self, name):
return TerminalValue.get(name)
TERM = Terminal()
class Color(enum.Enum):
none = ""
reset = TERM.normal
black = TERM.black
red = TERM.red
green = TERM.green
yellow = TERM.yellow
blue = TERM.blue
magenta = TERM.magenta
cyan = TERM.cyan
white = TERM.white
def __str__(self):
return str(self.value)
class StreamOptions(enum.Enum):
"""Choices for stream handling."""
capture = "capture"
hide = "hide"
none = "none"
@property
def option(self):
return {
"capture": subprocess.PIPE,
"hide": subprocess.DEVNULL,
"none": None,
}[self.value]
|
Fix Color enum setup when TERM isn't set
|
Fix Color enum setup when TERM isn't set
The previous version of this didn't work right because all the values
were the same empty string.
This works around that by creating distinct values that evaluate to "".
Amends 94b55ead63523f7f5677989f1a4999994b205cdf
|
Python
|
mit
|
wylee/runcommands,wylee/runcommands
|
import enum
import os
import subprocess
import sys
import blessings
from .misc import isatty
if isatty(sys.stdout) and os.getenv("TERM"):
Terminal = blessings.Terminal
else:
+ # XXX: Mock terminal that returns "" for all attributes
+ class TerminalValue:
+ registry = {}
+
+ @classmethod
+ def get(cls, name):
+ if name not in cls.registry:
+ cls.registry[name] = cls(name)
+ return cls.registry[name]
+
+ def __init__(self, name):
+ self.name = name
+
+ def __repr__(self):
+ return f"{self.__class__.__name__}({self.name})"
+
+ def __str__(self):
+ return ""
class Terminal:
def __getattr__(self, name):
- return ""
+ return TerminalValue.get(name)
TERM = Terminal()
class Color(enum.Enum):
none = ""
reset = TERM.normal
black = TERM.black
red = TERM.red
green = TERM.green
yellow = TERM.yellow
blue = TERM.blue
magenta = TERM.magenta
cyan = TERM.cyan
white = TERM.white
def __str__(self):
- return self.value
+ return str(self.value)
class StreamOptions(enum.Enum):
"""Choices for stream handling."""
capture = "capture"
hide = "hide"
none = "none"
@property
def option(self):
return {
"capture": subprocess.PIPE,
"hide": subprocess.DEVNULL,
"none": None,
}[self.value]
|
Fix Color enum setup when TERM isn't set
|
## Code Before:
import enum
import os
import subprocess
import sys
import blessings
from .misc import isatty
if isatty(sys.stdout) and os.getenv("TERM"):
Terminal = blessings.Terminal
else:
class Terminal:
def __getattr__(self, name):
return ""
TERM = Terminal()
class Color(enum.Enum):
none = ""
reset = TERM.normal
black = TERM.black
red = TERM.red
green = TERM.green
yellow = TERM.yellow
blue = TERM.blue
magenta = TERM.magenta
cyan = TERM.cyan
white = TERM.white
def __str__(self):
return self.value
class StreamOptions(enum.Enum):
"""Choices for stream handling."""
capture = "capture"
hide = "hide"
none = "none"
@property
def option(self):
return {
"capture": subprocess.PIPE,
"hide": subprocess.DEVNULL,
"none": None,
}[self.value]
## Instruction:
Fix Color enum setup when TERM isn't set
## Code After:
import enum
import os
import subprocess
import sys
import blessings
from .misc import isatty
if isatty(sys.stdout) and os.getenv("TERM"):
Terminal = blessings.Terminal
else:
# XXX: Mock terminal that returns "" for all attributes
class TerminalValue:
registry = {}
@classmethod
def get(cls, name):
if name not in cls.registry:
cls.registry[name] = cls(name)
return cls.registry[name]
def __init__(self, name):
self.name = name
def __repr__(self):
return f"{self.__class__.__name__}({self.name})"
def __str__(self):
return ""
class Terminal:
def __getattr__(self, name):
return TerminalValue.get(name)
TERM = Terminal()
class Color(enum.Enum):
none = ""
reset = TERM.normal
black = TERM.black
red = TERM.red
green = TERM.green
yellow = TERM.yellow
blue = TERM.blue
magenta = TERM.magenta
cyan = TERM.cyan
white = TERM.white
def __str__(self):
return str(self.value)
class StreamOptions(enum.Enum):
"""Choices for stream handling."""
capture = "capture"
hide = "hide"
none = "none"
@property
def option(self):
return {
"capture": subprocess.PIPE,
"hide": subprocess.DEVNULL,
"none": None,
}[self.value]
|
import enum
import os
import subprocess
import sys
import blessings
from .misc import isatty
if isatty(sys.stdout) and os.getenv("TERM"):
Terminal = blessings.Terminal
else:
+ # XXX: Mock terminal that returns "" for all attributes
+ class TerminalValue:
+ registry = {}
+
+ @classmethod
+ def get(cls, name):
+ if name not in cls.registry:
+ cls.registry[name] = cls(name)
+ return cls.registry[name]
+
+ def __init__(self, name):
+ self.name = name
+
+ def __repr__(self):
+ return f"{self.__class__.__name__}({self.name})"
+
+ def __str__(self):
+ return ""
class Terminal:
def __getattr__(self, name):
- return ""
+ return TerminalValue.get(name)
TERM = Terminal()
class Color(enum.Enum):
none = ""
reset = TERM.normal
black = TERM.black
red = TERM.red
green = TERM.green
yellow = TERM.yellow
blue = TERM.blue
magenta = TERM.magenta
cyan = TERM.cyan
white = TERM.white
def __str__(self):
- return self.value
+ return str(self.value)
? ++++ +
class StreamOptions(enum.Enum):
"""Choices for stream handling."""
capture = "capture"
hide = "hide"
none = "none"
@property
def option(self):
return {
"capture": subprocess.PIPE,
"hide": subprocess.DEVNULL,
"none": None,
}[self.value]
|
3bdc7250f7a40ef4b3ad5f431c6b6e3e92ccacc8
|
app.py
|
app.py
|
from flask import Flask, render_template, request, redirect
import requests
import pandas as pd
from datetime import datetime
from bokeh.plotting import figure, output_notebook, output_file, save
app = Flask(__name__)
# @app.route('/')
# def main():
# return redirect('/index')
@app.route('/', methods=['GET', 'POST'])
def index():
if request.method == 'GET':
return render_template('index.html')
else:
pitcher = request.form['pitcher']
image_file = pitcher.lower()
image_file = image_file.split()
image_file = '_'.join(image_file) + '.png'
return render_template('results.html', image_file = image_file)
if __name__ == '__main__':
app.run(port=33508)
|
from flask import Flask, render_template, request, redirect
import requests
import pandas as pd
from datetime import datetime
from bokeh.plotting import figure, output_notebook, output_file, save
app = Flask(__name__)
@app.route('/')
def main():
return redirect('/index')
@app.route('/index', methods=['GET', 'POST'])
def index():
if request.method == 'GET':
return render_template('index.html')
else:
pitcher = request.form['pitcher']
image_file = pitcher.lower()
image_file = image_file.split()
image_file = '_'.join(image_file) + '.png'
return render_template('results.html', image_file = image_file)
if __name__ == '__main__':
app.run(port=33508)
|
Revert "Remove redirect to avoid Chrome privacy error"
|
Revert "Remove redirect to avoid Chrome privacy error"
This reverts commit e5322958f14b2428b74de726476fd98adae8c454.
|
Python
|
mit
|
gsganden/pitcher-reports,gsganden/pitcher-reports
|
from flask import Flask, render_template, request, redirect
import requests
import pandas as pd
from datetime import datetime
from bokeh.plotting import figure, output_notebook, output_file, save
app = Flask(__name__)
- # @app.route('/')
+ @app.route('/')
- # def main():
+ def main():
- # return redirect('/index')
+ return redirect('/index')
- @app.route('/', methods=['GET', 'POST'])
+ @app.route('/index', methods=['GET', 'POST'])
def index():
if request.method == 'GET':
return render_template('index.html')
else:
pitcher = request.form['pitcher']
image_file = pitcher.lower()
image_file = image_file.split()
image_file = '_'.join(image_file) + '.png'
return render_template('results.html', image_file = image_file)
if __name__ == '__main__':
app.run(port=33508)
|
Revert "Remove redirect to avoid Chrome privacy error"
|
## Code Before:
from flask import Flask, render_template, request, redirect
import requests
import pandas as pd
from datetime import datetime
from bokeh.plotting import figure, output_notebook, output_file, save
app = Flask(__name__)
# @app.route('/')
# def main():
# return redirect('/index')
@app.route('/', methods=['GET', 'POST'])
def index():
if request.method == 'GET':
return render_template('index.html')
else:
pitcher = request.form['pitcher']
image_file = pitcher.lower()
image_file = image_file.split()
image_file = '_'.join(image_file) + '.png'
return render_template('results.html', image_file = image_file)
if __name__ == '__main__':
app.run(port=33508)
## Instruction:
Revert "Remove redirect to avoid Chrome privacy error"
## Code After:
from flask import Flask, render_template, request, redirect
import requests
import pandas as pd
from datetime import datetime
from bokeh.plotting import figure, output_notebook, output_file, save
app = Flask(__name__)
@app.route('/')
def main():
return redirect('/index')
@app.route('/index', methods=['GET', 'POST'])
def index():
if request.method == 'GET':
return render_template('index.html')
else:
pitcher = request.form['pitcher']
image_file = pitcher.lower()
image_file = image_file.split()
image_file = '_'.join(image_file) + '.png'
return render_template('results.html', image_file = image_file)
if __name__ == '__main__':
app.run(port=33508)
|
from flask import Flask, render_template, request, redirect
import requests
import pandas as pd
from datetime import datetime
from bokeh.plotting import figure, output_notebook, output_file, save
app = Flask(__name__)
- # @app.route('/')
? --
+ @app.route('/')
- # def main():
? --
+ def main():
- # return redirect('/index')
? --
+ return redirect('/index')
- @app.route('/', methods=['GET', 'POST'])
+ @app.route('/index', methods=['GET', 'POST'])
? +++++
def index():
if request.method == 'GET':
return render_template('index.html')
else:
pitcher = request.form['pitcher']
image_file = pitcher.lower()
image_file = image_file.split()
image_file = '_'.join(image_file) + '.png'
return render_template('results.html', image_file = image_file)
if __name__ == '__main__':
app.run(port=33508)
|
90a724313902e3d95f1a37d9102af1544c9bc61d
|
segments/set_term_title.py
|
segments/set_term_title.py
|
def add_term_title_segment():
term = os.getenv('TERM')
if not (('xterm' in term) or ('rxvt' in term)):
return
if powerline.args.shell == 'bash':
set_title = '\\[\\e]0;\\u@\\h: \\w\\a\\]'
elif powerline.args.shell == 'zsh':
set_title = '\\e]0;%n@%m: %~\\a'
else:
import socket
set_title = '\\e]0;%s@%s: %s\\a' % (os.getenv('USER'), socket.gethostname().split('.')[0], powerline.cwd or os.getenv('PWD'))
powerline.append(set_title, None, None, '')
add_term_title_segment()
|
def add_term_title_segment():
term = os.getenv('TERM')
if not (('xterm' in term) or ('rxvt' in term)):
return
if powerline.args.shell == 'bash':
set_title = '\\[\\e]0;\\u@\\h: \\w\\a\\]'
elif powerline.args.shell == 'zsh':
set_title = '\033]0;%n@%m: %~\007'
else:
import socket
set_title = '\033]0;%s@%s: %s\007' % (os.getenv('USER'), socket.gethostname().split('.')[0], powerline.cwd or os.getenv('PWD'))
powerline.append(set_title, None, None, '')
add_term_title_segment()
|
Fix use of escape characters in "set terminal title" segment.
|
Fix use of escape characters in "set terminal title" segment.
Escape characters were incorrect for non-BASH shells.
|
Python
|
mit
|
nicholascapo/powerline-shell,b-ryan/powerline-shell,junix/powerline-shell,wrgoldstein/powerline-shell,rbanffy/powerline-shell,b-ryan/powerline-shell,mart-e/powerline-shell,blieque/powerline-shell,paulhybryant/powerline-shell,tswsl1989/powerline-shell,torbjornvatn/powerline-shell,MartinWetterwald/powerline-shell,iKrishneel/powerline-shell,fellipecastro/powerline-shell,ceholden/powerline-shell,banga/powerline-shell,banga/powerline-shell,handsomecheung/powerline-shell,saghul/shline,strycore/powerline-shell,bitIO/powerline-shell,intfrr/powerline-shell,yc2prime/powerline-shell,mcdope/powerline-shell,milkbikis/powerline-shell,paulhybryant/powerline-shell,JulianVolodia/powerline-shell,dtrip/powerline-shell,paol/powerline-shell,Menci/powerline-shell,LeonardoGentile/powerline-shell
|
def add_term_title_segment():
term = os.getenv('TERM')
if not (('xterm' in term) or ('rxvt' in term)):
return
if powerline.args.shell == 'bash':
set_title = '\\[\\e]0;\\u@\\h: \\w\\a\\]'
elif powerline.args.shell == 'zsh':
- set_title = '\\e]0;%n@%m: %~\\a'
+ set_title = '\033]0;%n@%m: %~\007'
else:
import socket
- set_title = '\\e]0;%s@%s: %s\\a' % (os.getenv('USER'), socket.gethostname().split('.')[0], powerline.cwd or os.getenv('PWD'))
+ set_title = '\033]0;%s@%s: %s\007' % (os.getenv('USER'), socket.gethostname().split('.')[0], powerline.cwd or os.getenv('PWD'))
powerline.append(set_title, None, None, '')
add_term_title_segment()
|
Fix use of escape characters in "set terminal title" segment.
|
## Code Before:
def add_term_title_segment():
term = os.getenv('TERM')
if not (('xterm' in term) or ('rxvt' in term)):
return
if powerline.args.shell == 'bash':
set_title = '\\[\\e]0;\\u@\\h: \\w\\a\\]'
elif powerline.args.shell == 'zsh':
set_title = '\\e]0;%n@%m: %~\\a'
else:
import socket
set_title = '\\e]0;%s@%s: %s\\a' % (os.getenv('USER'), socket.gethostname().split('.')[0], powerline.cwd or os.getenv('PWD'))
powerline.append(set_title, None, None, '')
add_term_title_segment()
## Instruction:
Fix use of escape characters in "set terminal title" segment.
## Code After:
def add_term_title_segment():
term = os.getenv('TERM')
if not (('xterm' in term) or ('rxvt' in term)):
return
if powerline.args.shell == 'bash':
set_title = '\\[\\e]0;\\u@\\h: \\w\\a\\]'
elif powerline.args.shell == 'zsh':
set_title = '\033]0;%n@%m: %~\007'
else:
import socket
set_title = '\033]0;%s@%s: %s\007' % (os.getenv('USER'), socket.gethostname().split('.')[0], powerline.cwd or os.getenv('PWD'))
powerline.append(set_title, None, None, '')
add_term_title_segment()
|
def add_term_title_segment():
term = os.getenv('TERM')
if not (('xterm' in term) or ('rxvt' in term)):
return
if powerline.args.shell == 'bash':
set_title = '\\[\\e]0;\\u@\\h: \\w\\a\\]'
elif powerline.args.shell == 'zsh':
- set_title = '\\e]0;%n@%m: %~\\a'
? ^^ ^^
+ set_title = '\033]0;%n@%m: %~\007'
? ^^^ ^^^
else:
import socket
- set_title = '\\e]0;%s@%s: %s\\a' % (os.getenv('USER'), socket.gethostname().split('.')[0], powerline.cwd or os.getenv('PWD'))
? ^^ ^^
+ set_title = '\033]0;%s@%s: %s\007' % (os.getenv('USER'), socket.gethostname().split('.')[0], powerline.cwd or os.getenv('PWD'))
? ^^^ ^^^
powerline.append(set_title, None, None, '')
add_term_title_segment()
|
353728aba17695396c6167543e74181f9f853fdc
|
examples/template_render.py
|
examples/template_render.py
|
import django.template.loader
import django.conf
import sys
sys.path.append('django_test')
django.conf.settings.configure(INSTALLED_APPS=(), TEMPLATE_DIRS=('.', 'examples',))
for x in range(0,100):
django.template.loader.render_to_string('template.html')
|
import django.template.loader
import django.conf
import sys, os
os.chdir(os.path.dirname(__file__))
django.conf.settings.configure(
INSTALLED_APPS=(),
TEMPLATES=[{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": ['.']
}],
)
django.setup()
for x in range(0,100):
django.template.loader.render_to_string('template.html')
|
Update template render example for Django 1.8+
|
Update template render example for Django 1.8+
|
Python
|
bsd-3-clause
|
joerick/pyinstrument,joerick/pyinstrument,joerick/pyinstrument,joerick/pyinstrument,joerick/pyinstrument,joerick/pyinstrument
|
import django.template.loader
import django.conf
- import sys
+ import sys, os
- sys.path.append('django_test')
- django.conf.settings.configure(INSTALLED_APPS=(), TEMPLATE_DIRS=('.', 'examples',))
+ os.chdir(os.path.dirname(__file__))
+
+ django.conf.settings.configure(
+ INSTALLED_APPS=(),
+ TEMPLATES=[{
+ "BACKEND": "django.template.backends.django.DjangoTemplates",
+ "DIRS": ['.']
+ }],
+ )
+ django.setup()
for x in range(0,100):
django.template.loader.render_to_string('template.html')
|
Update template render example for Django 1.8+
|
## Code Before:
import django.template.loader
import django.conf
import sys
sys.path.append('django_test')
django.conf.settings.configure(INSTALLED_APPS=(), TEMPLATE_DIRS=('.', 'examples',))
for x in range(0,100):
django.template.loader.render_to_string('template.html')
## Instruction:
Update template render example for Django 1.8+
## Code After:
import django.template.loader
import django.conf
import sys, os
os.chdir(os.path.dirname(__file__))
django.conf.settings.configure(
INSTALLED_APPS=(),
TEMPLATES=[{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": ['.']
}],
)
django.setup()
for x in range(0,100):
django.template.loader.render_to_string('template.html')
|
import django.template.loader
import django.conf
- import sys
+ import sys, os
? ++++
- sys.path.append('django_test')
- django.conf.settings.configure(INSTALLED_APPS=(), TEMPLATE_DIRS=('.', 'examples',))
+ os.chdir(os.path.dirname(__file__))
+
+ django.conf.settings.configure(
+ INSTALLED_APPS=(),
+ TEMPLATES=[{
+ "BACKEND": "django.template.backends.django.DjangoTemplates",
+ "DIRS": ['.']
+ }],
+ )
+ django.setup()
for x in range(0,100):
django.template.loader.render_to_string('template.html')
|
7bfefe50c00d86b55c0620207e9848c97aa28227
|
rml/units.py
|
rml/units.py
|
import numpy as np
from scipy.interpolate import PchipInterpolator
class UcPoly():
def __init__(self, coef):
self.p = np.poly1d(coef)
def machine_to_physics(self, machine_value):
return self.p(machine_value)
def physics_to_machine(self, physics_value):
roots = (self.p - physics_value).roots
positive_roots = [root for root in roots if root > 0]
if len(positive_roots) > 0:
return positive_roots[0]
else:
raise ValueError("No corresponding positive machine value:", roots)
class UcPchip():
def __init__(self, x, y):
self.x = x
self.y = y
self.pp = PchipInterpolator(x, y)
def machine_to_physics(self, machine_value):
return self.pp(machine_value)
def physics_to_machine(self, physics_value):
pass
|
import numpy as np
from scipy.interpolate import PchipInterpolator
class UcPoly(object):
def __init__(self, coef):
self.p = np.poly1d(coef)
def machine_to_physics(self, machine_value):
return self.p(machine_value)
def physics_to_machine(self, physics_value):
roots = (self.p - physics_value).roots
positive_roots = [root for root in roots if root > 0]
if len(positive_roots) > 0:
return positive_roots[0]
else:
raise ValueError("No corresponding positive machine value:", roots)
class UcPchip(object):
def __init__(self, x, y):
self.x = x
self.y = y
self.pp = PchipInterpolator(x, y)
def machine_to_physics(self, machine_value):
return self.pp(machine_value)
def physics_to_machine(self, physics_value):
pass
|
Correct the definitions of old-style classes
|
Correct the definitions of old-style classes
|
Python
|
apache-2.0
|
razvanvasile/RML,willrogers/pml,willrogers/pml
|
import numpy as np
from scipy.interpolate import PchipInterpolator
- class UcPoly():
+ class UcPoly(object):
def __init__(self, coef):
self.p = np.poly1d(coef)
def machine_to_physics(self, machine_value):
return self.p(machine_value)
def physics_to_machine(self, physics_value):
roots = (self.p - physics_value).roots
positive_roots = [root for root in roots if root > 0]
if len(positive_roots) > 0:
return positive_roots[0]
else:
raise ValueError("No corresponding positive machine value:", roots)
- class UcPchip():
+ class UcPchip(object):
def __init__(self, x, y):
self.x = x
self.y = y
self.pp = PchipInterpolator(x, y)
def machine_to_physics(self, machine_value):
return self.pp(machine_value)
def physics_to_machine(self, physics_value):
pass
|
Correct the definitions of old-style classes
|
## Code Before:
import numpy as np
from scipy.interpolate import PchipInterpolator
class UcPoly():
def __init__(self, coef):
self.p = np.poly1d(coef)
def machine_to_physics(self, machine_value):
return self.p(machine_value)
def physics_to_machine(self, physics_value):
roots = (self.p - physics_value).roots
positive_roots = [root for root in roots if root > 0]
if len(positive_roots) > 0:
return positive_roots[0]
else:
raise ValueError("No corresponding positive machine value:", roots)
class UcPchip():
def __init__(self, x, y):
self.x = x
self.y = y
self.pp = PchipInterpolator(x, y)
def machine_to_physics(self, machine_value):
return self.pp(machine_value)
def physics_to_machine(self, physics_value):
pass
## Instruction:
Correct the definitions of old-style classes
## Code After:
import numpy as np
from scipy.interpolate import PchipInterpolator
class UcPoly(object):
def __init__(self, coef):
self.p = np.poly1d(coef)
def machine_to_physics(self, machine_value):
return self.p(machine_value)
def physics_to_machine(self, physics_value):
roots = (self.p - physics_value).roots
positive_roots = [root for root in roots if root > 0]
if len(positive_roots) > 0:
return positive_roots[0]
else:
raise ValueError("No corresponding positive machine value:", roots)
class UcPchip(object):
def __init__(self, x, y):
self.x = x
self.y = y
self.pp = PchipInterpolator(x, y)
def machine_to_physics(self, machine_value):
return self.pp(machine_value)
def physics_to_machine(self, physics_value):
pass
|
import numpy as np
from scipy.interpolate import PchipInterpolator
- class UcPoly():
+ class UcPoly(object):
? ++++++
def __init__(self, coef):
self.p = np.poly1d(coef)
def machine_to_physics(self, machine_value):
return self.p(machine_value)
def physics_to_machine(self, physics_value):
roots = (self.p - physics_value).roots
positive_roots = [root for root in roots if root > 0]
if len(positive_roots) > 0:
return positive_roots[0]
else:
raise ValueError("No corresponding positive machine value:", roots)
- class UcPchip():
+ class UcPchip(object):
? ++++++
def __init__(self, x, y):
self.x = x
self.y = y
self.pp = PchipInterpolator(x, y)
def machine_to_physics(self, machine_value):
return self.pp(machine_value)
def physics_to_machine(self, physics_value):
pass
|
418e7a7d8c8261578df046d251041ab0794d1580
|
decorators.py
|
decorators.py
|
class RequiresType(object):
"""
Checks that the first (or position given by the keyword argument 'position'
argument to the function is an instance of one of the types given in the
positional decorator arguments
"""
def __init__(self, *types, **kwargs):
self.types = types
self.position = 0
self.returnvalue = False
if 'position' in kwargs:
self.position = int(kwargs['position']) - 1
if 'returnvalue' in kwargs:
self.returnvalue = kwargs['returnvalue']
def __call__(self, f):
def wrapped_f(*args, **kwargs):
if type(args[self.position]) not in self.types:
return self.returnvalue
return f(*args, **kwargs)
return wrapped_f
|
class RequiresType(object):
"""
Checks that the first (or position given by the keyword argument 'position'
argument to the function is an instance of one of the types given in the
positional decorator arguments
"""
def __init__(self, *types, **kwargs):
self.types = types
self.position = 0
if 'position' in kwargs:
self.position = int(kwargs['position']) - 1
def __call__(self, f):
def wrapped_f(*args, **kwargs):
if type(args[self.position]) not in self.types:
raise TypeError("Invalid argument type '%s' at position %d. " +
"Expected one of (%s)" % (
type(args[self.position]).__name__, self.position,
", ".join([t.__name__ for t in self.types])))
return f(*args, **kwargs)
return wrapped_f
|
Raise TypeError instead of returning
|
Raise TypeError instead of returning
|
Python
|
bsd-3-clause
|
rasher/reddit-modbot
|
class RequiresType(object):
"""
Checks that the first (or position given by the keyword argument 'position'
argument to the function is an instance of one of the types given in the
positional decorator arguments
"""
def __init__(self, *types, **kwargs):
self.types = types
self.position = 0
- self.returnvalue = False
if 'position' in kwargs:
self.position = int(kwargs['position']) - 1
- if 'returnvalue' in kwargs:
- self.returnvalue = kwargs['returnvalue']
def __call__(self, f):
def wrapped_f(*args, **kwargs):
if type(args[self.position]) not in self.types:
- return self.returnvalue
+ raise TypeError("Invalid argument type '%s' at position %d. " +
+ "Expected one of (%s)" % (
+ type(args[self.position]).__name__, self.position,
+ ", ".join([t.__name__ for t in self.types])))
return f(*args, **kwargs)
return wrapped_f
|
Raise TypeError instead of returning
|
## Code Before:
class RequiresType(object):
"""
Checks that the first (or position given by the keyword argument 'position'
argument to the function is an instance of one of the types given in the
positional decorator arguments
"""
def __init__(self, *types, **kwargs):
self.types = types
self.position = 0
self.returnvalue = False
if 'position' in kwargs:
self.position = int(kwargs['position']) - 1
if 'returnvalue' in kwargs:
self.returnvalue = kwargs['returnvalue']
def __call__(self, f):
def wrapped_f(*args, **kwargs):
if type(args[self.position]) not in self.types:
return self.returnvalue
return f(*args, **kwargs)
return wrapped_f
## Instruction:
Raise TypeError instead of returning
## Code After:
class RequiresType(object):
"""
Checks that the first (or position given by the keyword argument 'position'
argument to the function is an instance of one of the types given in the
positional decorator arguments
"""
def __init__(self, *types, **kwargs):
self.types = types
self.position = 0
if 'position' in kwargs:
self.position = int(kwargs['position']) - 1
def __call__(self, f):
def wrapped_f(*args, **kwargs):
if type(args[self.position]) not in self.types:
raise TypeError("Invalid argument type '%s' at position %d. " +
"Expected one of (%s)" % (
type(args[self.position]).__name__, self.position,
", ".join([t.__name__ for t in self.types])))
return f(*args, **kwargs)
return wrapped_f
|
class RequiresType(object):
"""
Checks that the first (or position given by the keyword argument 'position'
argument to the function is an instance of one of the types given in the
positional decorator arguments
"""
def __init__(self, *types, **kwargs):
self.types = types
self.position = 0
- self.returnvalue = False
if 'position' in kwargs:
self.position = int(kwargs['position']) - 1
- if 'returnvalue' in kwargs:
- self.returnvalue = kwargs['returnvalue']
def __call__(self, f):
def wrapped_f(*args, **kwargs):
if type(args[self.position]) not in self.types:
- return self.returnvalue
+ raise TypeError("Invalid argument type '%s' at position %d. " +
+ "Expected one of (%s)" % (
+ type(args[self.position]).__name__, self.position,
+ ", ".join([t.__name__ for t in self.types])))
return f(*args, **kwargs)
return wrapped_f
|
87bdef439a3faf465bb8c23166beeb8a142400f7
|
fapistrano/plugins/curl.py
|
fapistrano/plugins/curl.py
|
from fabric.api import cd, env, run
from .. import signal, configuration
def init():
configuration.setdefault('curl_url', '')
configuration.setdefault('curl_options', '')
signal.register('deploy.updating', download_artifact)
def download_artifact(**kwargs):
with cd(env.release_path):
run('curl %(curl_url)s %(curl_options)s' % env)
|
from fabric.api import cd, env, run
from .. import signal, configuration
def init():
configuration.setdefault('curl_url', '')
configuration.setdefault('curl_options', '')
configuration.setdefault('curl_extract_tar', '')
configuration.setdefault('curl_postinstall_script', '')
signal.register('deploy.updating', download_artifact)
def download_artifact(**kwargs):
with cd(env.release_path):
cmd = 'curl %(curl_url)s %(curl_options)s' % env
if env.curl_extract_tar:
cmd += ' | tar -x'
run(cmd)
if env.curl_postinstall_script:
run(env.curl_postinstall_script)
|
Add extract_tar and post_install_script option.
|
Add extract_tar and post_install_script option.
|
Python
|
mit
|
liwushuo/fapistrano
|
from fabric.api import cd, env, run
from .. import signal, configuration
def init():
configuration.setdefault('curl_url', '')
configuration.setdefault('curl_options', '')
+ configuration.setdefault('curl_extract_tar', '')
+ configuration.setdefault('curl_postinstall_script', '')
signal.register('deploy.updating', download_artifact)
def download_artifact(**kwargs):
with cd(env.release_path):
- run('curl %(curl_url)s %(curl_options)s' % env)
+ cmd = 'curl %(curl_url)s %(curl_options)s' % env
+ if env.curl_extract_tar:
+ cmd += ' | tar -x'
+ run(cmd)
+ if env.curl_postinstall_script:
+ run(env.curl_postinstall_script)
|
Add extract_tar and post_install_script option.
|
## Code Before:
from fabric.api import cd, env, run
from .. import signal, configuration
def init():
configuration.setdefault('curl_url', '')
configuration.setdefault('curl_options', '')
signal.register('deploy.updating', download_artifact)
def download_artifact(**kwargs):
with cd(env.release_path):
run('curl %(curl_url)s %(curl_options)s' % env)
## Instruction:
Add extract_tar and post_install_script option.
## Code After:
from fabric.api import cd, env, run
from .. import signal, configuration
def init():
configuration.setdefault('curl_url', '')
configuration.setdefault('curl_options', '')
configuration.setdefault('curl_extract_tar', '')
configuration.setdefault('curl_postinstall_script', '')
signal.register('deploy.updating', download_artifact)
def download_artifact(**kwargs):
with cd(env.release_path):
cmd = 'curl %(curl_url)s %(curl_options)s' % env
if env.curl_extract_tar:
cmd += ' | tar -x'
run(cmd)
if env.curl_postinstall_script:
run(env.curl_postinstall_script)
|
from fabric.api import cd, env, run
from .. import signal, configuration
def init():
configuration.setdefault('curl_url', '')
configuration.setdefault('curl_options', '')
+ configuration.setdefault('curl_extract_tar', '')
+ configuration.setdefault('curl_postinstall_script', '')
signal.register('deploy.updating', download_artifact)
def download_artifact(**kwargs):
with cd(env.release_path):
- run('curl %(curl_url)s %(curl_options)s' % env)
? ^^^^ -
+ cmd = 'curl %(curl_url)s %(curl_options)s' % env
? ^^^^^^
+ if env.curl_extract_tar:
+ cmd += ' | tar -x'
+ run(cmd)
+ if env.curl_postinstall_script:
+ run(env.curl_postinstall_script)
|
c70a409c7717fa62517b69f8a5f20f10d5325751
|
test/common/memcached_workload_common.py
|
test/common/memcached_workload_common.py
|
import contextlib
import rdb_workload_common
@contextlib.contextmanager
def make_memcache_connection(opts):
with rdb_workload_common.make_table_and_connection(opts) as (table, conn):
yield MemcacheRdbShim(table, conn)
class MemcacheRdbShim(object):
def __init__(self, table, conn):
self.table = table
self.conn = conn
def get(self, key):
response = self.table.get(key).run(self.conn)
if response:
return response['val']
def set(self, key, val):
response = self.table.insert({
'id': key,
'val': val
},
upsert=True
).run(self.conn)
error = response.get('first_error')
if error:
raise Exception(error)
return response['inserted'] | response['replaced'] | response['unchanged']
def delete(self, key):
response = self.table.get(key).delete().run(self.conn)
error = response.get('first_error')
if error:
raise Exception(error)
return response['deleted']
def option_parser_for_memcache():
return rdb_workload_common.option_parser_for_connect()
|
import contextlib
import rdb_workload_common
@contextlib.contextmanager
def make_memcache_connection(opts):
with rdb_workload_common.make_table_and_connection(opts) as (table, conn):
yield MemcacheRdbShim(table, conn)
class MemcacheRdbShim(object):
def __init__(self, table, conn):
self.table = table
self.conn = conn
def get(self, key):
response = self.table.get(key).run(self.conn)
if response:
return response['val']
def set(self, key, val):
response = self.table.insert({
'id': key,
'val': val
},
conflict='replace'
).run(self.conn)
error = response.get('first_error')
if error:
raise Exception(error)
return response['inserted'] | response['replaced'] | response['unchanged']
def delete(self, key):
response = self.table.get(key).delete().run(self.conn)
error = response.get('first_error')
if error:
raise Exception(error)
return response['deleted']
def option_parser_for_memcache():
return rdb_workload_common.option_parser_for_connect()
|
Replace upsert=True with conflict='replace' in tests
|
Replace upsert=True with conflict='replace' in tests
Review 1804 by @gchpaco
Related to #2733
|
Python
|
apache-2.0
|
jesseditson/rethinkdb,lenstr/rethinkdb,gdi2290/rethinkdb,grandquista/rethinkdb,catroot/rethinkdb,bpradipt/rethinkdb,gavioto/rethinkdb,jesseditson/rethinkdb,sbusso/rethinkdb,sontek/rethinkdb,bchavez/rethinkdb,wujf/rethinkdb,greyhwndz/rethinkdb,Qinusty/rethinkdb,gdi2290/rethinkdb,urandu/rethinkdb,jmptrader/rethinkdb,bchavez/rethinkdb,pap/rethinkdb,grandquista/rethinkdb,AntouanK/rethinkdb,elkingtonmcb/rethinkdb,alash3al/rethinkdb,losywee/rethinkdb,jmptrader/rethinkdb,wkennington/rethinkdb,mbroadst/rethinkdb,robertjpayne/rethinkdb,matthaywardwebdesign/rethinkdb,losywee/rethinkdb,lenstr/rethinkdb,sbusso/rethinkdb,Qinusty/rethinkdb,niieani/rethinkdb,scripni/rethinkdb,bpradipt/rethinkdb,KSanthanam/rethinkdb,jmptrader/rethinkdb,yakovenkodenis/rethinkdb,gavioto/rethinkdb,dparnell/rethinkdb,pap/rethinkdb,catroot/rethinkdb,rrampage/rethinkdb,rrampage/rethinkdb,eliangidoni/rethinkdb,sebadiaz/rethinkdb,catroot/rethinkdb,marshall007/rethinkdb,RubenKelevra/rethinkdb,Qinusty/rethinkdb,4talesa/rethinkdb,niieani/rethinkdb,scripni/rethinkdb,wujf/rethinkdb,yaolinz/rethinkdb,dparnell/rethinkdb,sontek/rethinkdb,gavioto/rethinkdb,mquandalle/rethinkdb,alash3al/rethinkdb,JackieXie168/rethinkdb,rrampage/rethinkdb,AntouanK/rethinkdb,wujf/rethinkdb,niieani/rethinkdb,JackieXie168/rethinkdb,marshall007/rethinkdb,yaolinz/rethinkdb,wojons/rethinkdb,tempbottle/rethinkdb,ajose01/rethinkdb,sbusso/rethinkdb,robertjpayne/rethinkdb,matthaywardwebdesign/rethinkdb,AntouanK/rethinkdb,yaolinz/rethinkdb,mquandalle/rethinkdb,tempbottle/rethinkdb,tempbottle/rethinkdb,marshall007/rethinkdb,catroot/rethinkdb,Wilbeibi/rethinkdb,jesseditson/rethinkdb,bpradipt/rethinkdb,4talesa/rethinkdb,rrampage/rethinkdb,yaolinz/rethinkdb,urandu/rethinkdb,bchavez/rethinkdb,sebadiaz/rethinkdb,wojons/rethinkdb,captainpete/rethinkdb,4talesa/rethinkdb,sbusso/rethinkdb,eliangidoni/rethinkdb,grandquista/rethinkdb,sbusso/rethinkdb,RubenKelevra/rethinkdb,spblightadv/rethinkdb,mcanthony/rethinkdb,captainpete/rethinkdb,gavioto/rethinkdb,ayumilong/rethinkdb,catroot/rethinkdb,Qinusty/rethinkdb,mbroadst/rethinkdb,spblightadv/rethinkdb,jesseditson/rethinkdb,sontek/rethinkdb,bchavez/rethinkdb,RubenKelevra/rethinkdb,catroot/rethinkdb,JackieXie168/rethinkdb,dparnell/rethinkdb,victorbriz/rethinkdb,niieani/rethinkdb,sontek/rethinkdb,elkingtonmcb/rethinkdb,bpradipt/rethinkdb,RubenKelevra/rethinkdb,robertjpayne/rethinkdb,ajose01/rethinkdb,dparnell/rethinkdb,lenstr/rethinkdb,KSanthanam/rethinkdb,catroot/rethinkdb,eliangidoni/rethinkdb,captainpete/rethinkdb,grandquista/rethinkdb,eliangidoni/rethinkdb,greyhwndz/rethinkdb,grandquista/rethinkdb,gdi2290/rethinkdb,mquandalle/rethinkdb,alash3al/rethinkdb,victorbriz/rethinkdb,wujf/rethinkdb,matthaywardwebdesign/rethinkdb,RubenKelevra/rethinkdb,sebadiaz/rethinkdb,lenstr/rethinkdb,alash3al/rethinkdb,tempbottle/rethinkdb,lenstr/rethinkdb,jmptrader/rethinkdb,ajose01/rethinkdb,scripni/rethinkdb,sontek/rethinkdb,yaolinz/rethinkdb,tempbottle/rethinkdb,tempbottle/rethinkdb,victorbriz/rethinkdb,mcanthony/rethinkdb,rrampage/rethinkdb,dparnell/rethinkdb,scripni/rethinkdb,yakovenkodenis/rethinkdb,spblightadv/rethinkdb,matthaywardwebdesign/rethinkdb,wojons/rethinkdb,losywee/rethinkdb,Qinusty/rethinkdb,KSanthanam/rethinkdb,gdi2290/rethinkdb,grandquista/rethinkdb,urandu/rethinkdb,scripni/rethinkdb,wujf/rethinkdb,4talesa/rethinkdb,bpradipt/rethinkdb,victorbriz/rethinkdb,JackieXie168/rethinkdb,AntouanK/rethinkdb,mquandalle/rethinkdb,dparnell/rethinkdb,victorbriz/rethinkdb,elkingtonmcb/rethinkdb,grandquista/rethinkdb,wujf/rethinkdb,niieani/rethinkdb,alash3al/rethinkdb,captainpete/rethinkdb,wojons/rethinkdb,eliangidoni/rethinkdb,spblightadv/rethinkdb,Wilbeibi/rethinkdb,sebadiaz/rethinkdb,ayumilong/rethinkdb,bpradipt/rethinkdb,lenstr/rethinkdb,gdi2290/rethinkdb,catroot/rethinkdb,pap/rethinkdb,matthaywardwebdesign/rethinkdb,yakovenkodenis/rethinkdb,jesseditson/rethinkdb,Wilbeibi/rethinkdb,urandu/rethinkdb,captainpete/rethinkdb,ayumilong/rethinkdb,wkennington/rethinkdb,yakovenkodenis/rethinkdb,mcanthony/rethinkdb,jesseditson/rethinkdb,robertjpayne/rethinkdb,marshall007/rethinkdb,bchavez/rethinkdb,marshall007/rethinkdb,elkingtonmcb/rethinkdb,sbusso/rethinkdb,robertjpayne/rethinkdb,marshall007/rethinkdb,eliangidoni/rethinkdb,gdi2290/rethinkdb,JackieXie168/rethinkdb,elkingtonmcb/rethinkdb,losywee/rethinkdb,KSanthanam/rethinkdb,KSanthanam/rethinkdb,gavioto/rethinkdb,AntouanK/rethinkdb,marshall007/rethinkdb,jesseditson/rethinkdb,losywee/rethinkdb,bchavez/rethinkdb,spblightadv/rethinkdb,Qinusty/rethinkdb,elkingtonmcb/rethinkdb,yaolinz/rethinkdb,mbroadst/rethinkdb,wojons/rethinkdb,gdi2290/rethinkdb,ajose01/rethinkdb,mbroadst/rethinkdb,urandu/rethinkdb,wkennington/rethinkdb,rrampage/rethinkdb,ayumilong/rethinkdb,sontek/rethinkdb,sebadiaz/rethinkdb,alash3al/rethinkdb,urandu/rethinkdb,Wilbeibi/rethinkdb,mbroadst/rethinkdb,mquandalle/rethinkdb,Qinusty/rethinkdb,spblightadv/rethinkdb,rrampage/rethinkdb,KSanthanam/rethinkdb,matthaywardwebdesign/rethinkdb,tempbottle/rethinkdb,gavioto/rethinkdb,greyhwndz/rethinkdb,Qinusty/rethinkdb,eliangidoni/rethinkdb,jmptrader/rethinkdb,yaolinz/rethinkdb,losywee/rethinkdb,losywee/rethinkdb,yakovenkodenis/rethinkdb,yakovenkodenis/rethinkdb,bchavez/rethinkdb,yakovenkodenis/rethinkdb,4talesa/rethinkdb,sontek/rethinkdb,dparnell/rethinkdb,bpradipt/rethinkdb,matthaywardwebdesign/rethinkdb,RubenKelevra/rethinkdb,mcanthony/rethinkdb,eliangidoni/rethinkdb,greyhwndz/rethinkdb,matthaywardwebdesign/rethinkdb,lenstr/rethinkdb,sbusso/rethinkdb,captainpete/rethinkdb,Wilbeibi/rethinkdb,4talesa/rethinkdb,ajose01/rethinkdb,AntouanK/rethinkdb,robertjpayne/rethinkdb,ayumilong/rethinkdb,mquandalle/rethinkdb,mcanthony/rethinkdb,victorbriz/rethinkdb,bchavez/rethinkdb,yakovenkodenis/rethinkdb,wkennington/rethinkdb,gavioto/rethinkdb,jesseditson/rethinkdb,mquandalle/rethinkdb,wujf/rethinkdb,sebadiaz/rethinkdb,mbroadst/rethinkdb,Qinusty/rethinkdb,AntouanK/rethinkdb,grandquista/rethinkdb,JackieXie168/rethinkdb,spblightadv/rethinkdb,Wilbeibi/rethinkdb,wkennington/rethinkdb,urandu/rethinkdb,bchavez/rethinkdb,mcanthony/rethinkdb,KSanthanam/rethinkdb,ayumilong/rethinkdb,jmptrader/rethinkdb,wojons/rethinkdb,greyhwndz/rethinkdb,victorbriz/rethinkdb,niieani/rethinkdb,mbroadst/rethinkdb,dparnell/rethinkdb,ayumilong/rethinkdb,mcanthony/rethinkdb,urandu/rethinkdb,wkennington/rethinkdb,ayumilong/rethinkdb,victorbriz/rethinkdb,spblightadv/rethinkdb,niieani/rethinkdb,pap/rethinkdb,wkennington/rethinkdb,sebadiaz/rethinkdb,niieani/rethinkdb,ajose01/rethinkdb,captainpete/rethinkdb,wojons/rethinkdb,Wilbeibi/rethinkdb,mbroadst/rethinkdb,AntouanK/rethinkdb,RubenKelevra/rethinkdb,greyhwndz/rethinkdb,robertjpayne/rethinkdb,JackieXie168/rethinkdb,RubenKelevra/rethinkdb,elkingtonmcb/rethinkdb,robertjpayne/rethinkdb,jmptrader/rethinkdb,mcanthony/rethinkdb,ajose01/rethinkdb,marshall007/rethinkdb,robertjpayne/rethinkdb,bpradipt/rethinkdb,jmptrader/rethinkdb,scripni/rethinkdb,eliangidoni/rethinkdb,ajose01/rethinkdb,captainpete/rethinkdb,alash3al/rethinkdb,4talesa/rethinkdb,pap/rethinkdb,JackieXie168/rethinkdb,Wilbeibi/rethinkdb,elkingtonmcb/rethinkdb,alash3al/rethinkdb,pap/rethinkdb,lenstr/rethinkdb,pap/rethinkdb,4talesa/rethinkdb,tempbottle/rethinkdb,mquandalle/rethinkdb,wojons/rethinkdb,grandquista/rethinkdb,rrampage/rethinkdb,KSanthanam/rethinkdb,gavioto/rethinkdb,sontek/rethinkdb,greyhwndz/rethinkdb,scripni/rethinkdb,sbusso/rethinkdb,JackieXie168/rethinkdb,pap/rethinkdb,losywee/rethinkdb,bpradipt/rethinkdb,mbroadst/rethinkdb,dparnell/rethinkdb,wkennington/rethinkdb,yaolinz/rethinkdb,sebadiaz/rethinkdb,scripni/rethinkdb,greyhwndz/rethinkdb
|
import contextlib
import rdb_workload_common
@contextlib.contextmanager
def make_memcache_connection(opts):
with rdb_workload_common.make_table_and_connection(opts) as (table, conn):
yield MemcacheRdbShim(table, conn)
class MemcacheRdbShim(object):
def __init__(self, table, conn):
self.table = table
self.conn = conn
def get(self, key):
response = self.table.get(key).run(self.conn)
if response:
return response['val']
def set(self, key, val):
response = self.table.insert({
'id': key,
'val': val
},
- upsert=True
+ conflict='replace'
).run(self.conn)
error = response.get('first_error')
if error:
raise Exception(error)
return response['inserted'] | response['replaced'] | response['unchanged']
def delete(self, key):
response = self.table.get(key).delete().run(self.conn)
error = response.get('first_error')
if error:
raise Exception(error)
return response['deleted']
def option_parser_for_memcache():
return rdb_workload_common.option_parser_for_connect()
|
Replace upsert=True with conflict='replace' in tests
|
## Code Before:
import contextlib
import rdb_workload_common
@contextlib.contextmanager
def make_memcache_connection(opts):
with rdb_workload_common.make_table_and_connection(opts) as (table, conn):
yield MemcacheRdbShim(table, conn)
class MemcacheRdbShim(object):
def __init__(self, table, conn):
self.table = table
self.conn = conn
def get(self, key):
response = self.table.get(key).run(self.conn)
if response:
return response['val']
def set(self, key, val):
response = self.table.insert({
'id': key,
'val': val
},
upsert=True
).run(self.conn)
error = response.get('first_error')
if error:
raise Exception(error)
return response['inserted'] | response['replaced'] | response['unchanged']
def delete(self, key):
response = self.table.get(key).delete().run(self.conn)
error = response.get('first_error')
if error:
raise Exception(error)
return response['deleted']
def option_parser_for_memcache():
return rdb_workload_common.option_parser_for_connect()
## Instruction:
Replace upsert=True with conflict='replace' in tests
## Code After:
import contextlib
import rdb_workload_common
@contextlib.contextmanager
def make_memcache_connection(opts):
with rdb_workload_common.make_table_and_connection(opts) as (table, conn):
yield MemcacheRdbShim(table, conn)
class MemcacheRdbShim(object):
def __init__(self, table, conn):
self.table = table
self.conn = conn
def get(self, key):
response = self.table.get(key).run(self.conn)
if response:
return response['val']
def set(self, key, val):
response = self.table.insert({
'id': key,
'val': val
},
conflict='replace'
).run(self.conn)
error = response.get('first_error')
if error:
raise Exception(error)
return response['inserted'] | response['replaced'] | response['unchanged']
def delete(self, key):
response = self.table.get(key).delete().run(self.conn)
error = response.get('first_error')
if error:
raise Exception(error)
return response['deleted']
def option_parser_for_memcache():
return rdb_workload_common.option_parser_for_connect()
|
import contextlib
import rdb_workload_common
@contextlib.contextmanager
def make_memcache_connection(opts):
with rdb_workload_common.make_table_and_connection(opts) as (table, conn):
yield MemcacheRdbShim(table, conn)
class MemcacheRdbShim(object):
def __init__(self, table, conn):
self.table = table
self.conn = conn
def get(self, key):
response = self.table.get(key).run(self.conn)
if response:
return response['val']
def set(self, key, val):
response = self.table.insert({
'id': key,
'val': val
},
- upsert=True
+ conflict='replace'
).run(self.conn)
error = response.get('first_error')
if error:
raise Exception(error)
return response['inserted'] | response['replaced'] | response['unchanged']
def delete(self, key):
response = self.table.get(key).delete().run(self.conn)
error = response.get('first_error')
if error:
raise Exception(error)
return response['deleted']
def option_parser_for_memcache():
return rdb_workload_common.option_parser_for_connect()
|
715987e85b61807a7ba5a3ae8ead8a44fff425cb
|
src/sentry/tasks/base.py
|
src/sentry/tasks/base.py
|
from __future__ import absolute_import
from celery.task import current
from functools import wraps
from sentry.celery import app
from sentry.utils import metrics
def instrumented_task(name, stat_suffix=None, **kwargs):
def wrapped(func):
@wraps(func)
def _wrapped(*args, **kwargs):
key = 'jobs.duration'
if stat_suffix:
instance = '{}.{}'.format(name, stat_suffix(*args, **kwargs))
else:
instance = name
with metrics.timer(key, instance=instance):
result = func(*args, **kwargs)
return result
return app.task(name=name, **kwargs)(_wrapped)
return wrapped
def retry(func):
@wraps(func)
def wrapped(*args, **kwargs):
try:
return func(*args, **kwargs)
except Exception as exc:
current.retry(exc=exc)
return wrapped
|
from __future__ import absolute_import
from celery.task import current
from raven.contrib.django.models import client as Raven
from functools import wraps
from sentry.celery import app
from sentry.utils import metrics
def instrumented_task(name, stat_suffix=None, **kwargs):
def wrapped(func):
@wraps(func)
def _wrapped(*args, **kwargs):
key = 'jobs.duration'
if stat_suffix:
instance = '{}.{}'.format(name, stat_suffix(*args, **kwargs))
else:
instance = name
with metrics.timer(key, instance=instance):
try:
result = func(*args, **kwargs)
finally:
Raven.context.clear()
return result
return app.task(name=name, **kwargs)(_wrapped)
return wrapped
def retry(func):
@wraps(func)
def wrapped(*args, **kwargs):
try:
return func(*args, **kwargs)
except Exception as exc:
current.retry(exc=exc)
return wrapped
|
Clear context for celery tasks
|
Clear context for celery tasks
|
Python
|
bsd-3-clause
|
jean/sentry,ifduyue/sentry,mitsuhiko/sentry,gencer/sentry,mvaled/sentry,jean/sentry,zenefits/sentry,alexm92/sentry,looker/sentry,fotinakis/sentry,jean/sentry,beeftornado/sentry,beeftornado/sentry,looker/sentry,daevaorn/sentry,JamesMura/sentry,alexm92/sentry,JackDanger/sentry,ifduyue/sentry,JackDanger/sentry,looker/sentry,JamesMura/sentry,BuildingLink/sentry,JamesMura/sentry,looker/sentry,ifduyue/sentry,gencer/sentry,mvaled/sentry,nicholasserra/sentry,mvaled/sentry,BuildingLink/sentry,ifduyue/sentry,BuildingLink/sentry,gencer/sentry,zenefits/sentry,BuildingLink/sentry,daevaorn/sentry,zenefits/sentry,daevaorn/sentry,zenefits/sentry,daevaorn/sentry,fotinakis/sentry,alexm92/sentry,jean/sentry,JamesMura/sentry,JackDanger/sentry,nicholasserra/sentry,zenefits/sentry,ifduyue/sentry,gencer/sentry,fotinakis/sentry,JamesMura/sentry,mvaled/sentry,mvaled/sentry,nicholasserra/sentry,mvaled/sentry,BuildingLink/sentry,gencer/sentry,fotinakis/sentry,beeftornado/sentry,looker/sentry,jean/sentry,mitsuhiko/sentry
|
from __future__ import absolute_import
from celery.task import current
+ from raven.contrib.django.models import client as Raven
from functools import wraps
from sentry.celery import app
from sentry.utils import metrics
def instrumented_task(name, stat_suffix=None, **kwargs):
def wrapped(func):
@wraps(func)
def _wrapped(*args, **kwargs):
key = 'jobs.duration'
if stat_suffix:
instance = '{}.{}'.format(name, stat_suffix(*args, **kwargs))
else:
instance = name
with metrics.timer(key, instance=instance):
+ try:
- result = func(*args, **kwargs)
+ result = func(*args, **kwargs)
+ finally:
+ Raven.context.clear()
return result
return app.task(name=name, **kwargs)(_wrapped)
return wrapped
def retry(func):
@wraps(func)
def wrapped(*args, **kwargs):
try:
return func(*args, **kwargs)
except Exception as exc:
current.retry(exc=exc)
return wrapped
|
Clear context for celery tasks
|
## Code Before:
from __future__ import absolute_import
from celery.task import current
from functools import wraps
from sentry.celery import app
from sentry.utils import metrics
def instrumented_task(name, stat_suffix=None, **kwargs):
def wrapped(func):
@wraps(func)
def _wrapped(*args, **kwargs):
key = 'jobs.duration'
if stat_suffix:
instance = '{}.{}'.format(name, stat_suffix(*args, **kwargs))
else:
instance = name
with metrics.timer(key, instance=instance):
result = func(*args, **kwargs)
return result
return app.task(name=name, **kwargs)(_wrapped)
return wrapped
def retry(func):
@wraps(func)
def wrapped(*args, **kwargs):
try:
return func(*args, **kwargs)
except Exception as exc:
current.retry(exc=exc)
return wrapped
## Instruction:
Clear context for celery tasks
## Code After:
from __future__ import absolute_import
from celery.task import current
from raven.contrib.django.models import client as Raven
from functools import wraps
from sentry.celery import app
from sentry.utils import metrics
def instrumented_task(name, stat_suffix=None, **kwargs):
def wrapped(func):
@wraps(func)
def _wrapped(*args, **kwargs):
key = 'jobs.duration'
if stat_suffix:
instance = '{}.{}'.format(name, stat_suffix(*args, **kwargs))
else:
instance = name
with metrics.timer(key, instance=instance):
try:
result = func(*args, **kwargs)
finally:
Raven.context.clear()
return result
return app.task(name=name, **kwargs)(_wrapped)
return wrapped
def retry(func):
@wraps(func)
def wrapped(*args, **kwargs):
try:
return func(*args, **kwargs)
except Exception as exc:
current.retry(exc=exc)
return wrapped
|
from __future__ import absolute_import
from celery.task import current
+ from raven.contrib.django.models import client as Raven
from functools import wraps
from sentry.celery import app
from sentry.utils import metrics
def instrumented_task(name, stat_suffix=None, **kwargs):
def wrapped(func):
@wraps(func)
def _wrapped(*args, **kwargs):
key = 'jobs.duration'
if stat_suffix:
instance = '{}.{}'.format(name, stat_suffix(*args, **kwargs))
else:
instance = name
with metrics.timer(key, instance=instance):
+ try:
- result = func(*args, **kwargs)
+ result = func(*args, **kwargs)
? ++++
+ finally:
+ Raven.context.clear()
return result
return app.task(name=name, **kwargs)(_wrapped)
return wrapped
def retry(func):
@wraps(func)
def wrapped(*args, **kwargs):
try:
return func(*args, **kwargs)
except Exception as exc:
current.retry(exc=exc)
return wrapped
|
7015766b70bf56f9338713c4302aa3cba75510c5
|
app/tests/test_views.py
|
app/tests/test_views.py
|
from django.test import TestCase
from django.core.urlresolvers import reverse
class IndexViewCase(TestCase):
"""Index view case"""
def setUp(self):
self.url = reverse('home')
def test_get_ok(self):
"""Test status=200"""
response = self.client.get(self.url)
self.assertEqual(response.status_code, 200)
|
import sure
from django.test import TestCase
from django.core.urlresolvers import reverse
class IndexViewCase(TestCase):
"""Index view case"""
def setUp(self):
self.url = reverse('home')
def test_get_ok(self):
"""Test status=200"""
response = self.client.get(self.url)
response.status_code.should.be.equal(200)
|
Use sure in app tests
|
Use sure in app tests
|
Python
|
mit
|
nvbn/coviolations_web,nvbn/coviolations_web
|
+ import sure
from django.test import TestCase
from django.core.urlresolvers import reverse
class IndexViewCase(TestCase):
"""Index view case"""
def setUp(self):
self.url = reverse('home')
def test_get_ok(self):
"""Test status=200"""
response = self.client.get(self.url)
- self.assertEqual(response.status_code, 200)
+ response.status_code.should.be.equal(200)
|
Use sure in app tests
|
## Code Before:
from django.test import TestCase
from django.core.urlresolvers import reverse
class IndexViewCase(TestCase):
"""Index view case"""
def setUp(self):
self.url = reverse('home')
def test_get_ok(self):
"""Test status=200"""
response = self.client.get(self.url)
self.assertEqual(response.status_code, 200)
## Instruction:
Use sure in app tests
## Code After:
import sure
from django.test import TestCase
from django.core.urlresolvers import reverse
class IndexViewCase(TestCase):
"""Index view case"""
def setUp(self):
self.url = reverse('home')
def test_get_ok(self):
"""Test status=200"""
response = self.client.get(self.url)
response.status_code.should.be.equal(200)
|
+ import sure
from django.test import TestCase
from django.core.urlresolvers import reverse
class IndexViewCase(TestCase):
"""Index view case"""
def setUp(self):
self.url = reverse('home')
def test_get_ok(self):
"""Test status=200"""
response = self.client.get(self.url)
- self.assertEqual(response.status_code, 200)
+ response.status_code.should.be.equal(200)
|
77e4fb7ef74bcfd58b548cca8ec9898eb936e7ef
|
conanfile.py
|
conanfile.py
|
from conans import ConanFile, CMake
class EsappConan(ConanFile):
name = 'esapp'
version = '0.4.1'
url = 'https://github.com/jason2506/esapp'
license = 'BSD 3-Clause'
author = 'Chi-En Wu'
requires = 'desa/0.1.0@jason2506/testing'
settings = 'os', 'compiler', 'build_type', 'arch'
generators = 'cmake'
default_options = (
'desa:build_tests=False'
)
exports = (
'CMakeLists.txt',
'cmake/*.cmake',
'include/*.hpp'
)
def build(self):
cmake = CMake(self.settings)
args = []
args.append('-DENABLE_CONAN=%s' % self.options.enable_conan)
args.append('-DBUILD_TESTING=%s' % self.options.build_tests)
args.append('-DCMAKE_INSTALL_PREFIX="%s"' % self.package_folder)
self.run('cmake "%s" %s %s' % (
self.conanfile_directory,
cmake.command_line,
' '.join(args)
))
self.run('cmake --build .')
def package(self):
cmake = CMake(self.settings)
self.run('cmake --build . --target install %s' % cmake.build_config)
|
from conans import ConanFile, CMake
class EsappConan(ConanFile):
name = 'esapp'
version = '0.4.1'
url = 'https://github.com/jason2506/esapp'
license = 'BSD 3-Clause'
author = 'Chi-En Wu'
requires = 'desa/0.1.0@jason2506/testing'
settings = 'os', 'compiler', 'build_type', 'arch'
generators = 'cmake'
exports = (
'CMakeLists.txt',
'cmake/*.cmake',
'include/*.hpp'
)
def build(self):
cmake = CMake(self.settings)
args = []
args.append('-DENABLE_CONAN=%s' % self.options.enable_conan)
args.append('-DBUILD_TESTING=%s' % self.options.build_tests)
args.append('-DCMAKE_INSTALL_PREFIX="%s"' % self.package_folder)
self.run('cmake "%s" %s %s' % (
self.conanfile_directory,
cmake.command_line,
' '.join(args)
))
self.run('cmake --build .')
def package(self):
cmake = CMake(self.settings)
self.run('cmake --build . --target install %s' % cmake.build_config)
|
Remove default option for `desa`
|
Remove default option for `desa`
|
Python
|
bsd-3-clause
|
jason2506/esapp,jason2506/esapp
|
from conans import ConanFile, CMake
class EsappConan(ConanFile):
name = 'esapp'
version = '0.4.1'
url = 'https://github.com/jason2506/esapp'
license = 'BSD 3-Clause'
author = 'Chi-En Wu'
requires = 'desa/0.1.0@jason2506/testing'
settings = 'os', 'compiler', 'build_type', 'arch'
generators = 'cmake'
- default_options = (
- 'desa:build_tests=False'
- )
exports = (
'CMakeLists.txt',
'cmake/*.cmake',
'include/*.hpp'
)
def build(self):
cmake = CMake(self.settings)
args = []
args.append('-DENABLE_CONAN=%s' % self.options.enable_conan)
args.append('-DBUILD_TESTING=%s' % self.options.build_tests)
args.append('-DCMAKE_INSTALL_PREFIX="%s"' % self.package_folder)
self.run('cmake "%s" %s %s' % (
self.conanfile_directory,
cmake.command_line,
' '.join(args)
))
self.run('cmake --build .')
def package(self):
cmake = CMake(self.settings)
self.run('cmake --build . --target install %s' % cmake.build_config)
|
Remove default option for `desa`
|
## Code Before:
from conans import ConanFile, CMake
class EsappConan(ConanFile):
name = 'esapp'
version = '0.4.1'
url = 'https://github.com/jason2506/esapp'
license = 'BSD 3-Clause'
author = 'Chi-En Wu'
requires = 'desa/0.1.0@jason2506/testing'
settings = 'os', 'compiler', 'build_type', 'arch'
generators = 'cmake'
default_options = (
'desa:build_tests=False'
)
exports = (
'CMakeLists.txt',
'cmake/*.cmake',
'include/*.hpp'
)
def build(self):
cmake = CMake(self.settings)
args = []
args.append('-DENABLE_CONAN=%s' % self.options.enable_conan)
args.append('-DBUILD_TESTING=%s' % self.options.build_tests)
args.append('-DCMAKE_INSTALL_PREFIX="%s"' % self.package_folder)
self.run('cmake "%s" %s %s' % (
self.conanfile_directory,
cmake.command_line,
' '.join(args)
))
self.run('cmake --build .')
def package(self):
cmake = CMake(self.settings)
self.run('cmake --build . --target install %s' % cmake.build_config)
## Instruction:
Remove default option for `desa`
## Code After:
from conans import ConanFile, CMake
class EsappConan(ConanFile):
name = 'esapp'
version = '0.4.1'
url = 'https://github.com/jason2506/esapp'
license = 'BSD 3-Clause'
author = 'Chi-En Wu'
requires = 'desa/0.1.0@jason2506/testing'
settings = 'os', 'compiler', 'build_type', 'arch'
generators = 'cmake'
exports = (
'CMakeLists.txt',
'cmake/*.cmake',
'include/*.hpp'
)
def build(self):
cmake = CMake(self.settings)
args = []
args.append('-DENABLE_CONAN=%s' % self.options.enable_conan)
args.append('-DBUILD_TESTING=%s' % self.options.build_tests)
args.append('-DCMAKE_INSTALL_PREFIX="%s"' % self.package_folder)
self.run('cmake "%s" %s %s' % (
self.conanfile_directory,
cmake.command_line,
' '.join(args)
))
self.run('cmake --build .')
def package(self):
cmake = CMake(self.settings)
self.run('cmake --build . --target install %s' % cmake.build_config)
|
from conans import ConanFile, CMake
class EsappConan(ConanFile):
name = 'esapp'
version = '0.4.1'
url = 'https://github.com/jason2506/esapp'
license = 'BSD 3-Clause'
author = 'Chi-En Wu'
requires = 'desa/0.1.0@jason2506/testing'
settings = 'os', 'compiler', 'build_type', 'arch'
generators = 'cmake'
- default_options = (
- 'desa:build_tests=False'
- )
exports = (
'CMakeLists.txt',
'cmake/*.cmake',
'include/*.hpp'
)
def build(self):
cmake = CMake(self.settings)
args = []
args.append('-DENABLE_CONAN=%s' % self.options.enable_conan)
args.append('-DBUILD_TESTING=%s' % self.options.build_tests)
args.append('-DCMAKE_INSTALL_PREFIX="%s"' % self.package_folder)
self.run('cmake "%s" %s %s' % (
self.conanfile_directory,
cmake.command_line,
' '.join(args)
))
self.run('cmake --build .')
def package(self):
cmake = CMake(self.settings)
self.run('cmake --build . --target install %s' % cmake.build_config)
|
9ae4ebf7e95cb301321911886cbb4041fae1eff6
|
bookmarks/search_indexes.py
|
bookmarks/search_indexes.py
|
from haystack.indexes import CharField, DateTimeField, RealTimeSearchIndex, Indexable
from models import Bookmark
class BookmarkIndex(RealTimeSearchIndex, Indexable):
text = CharField(document=True, use_template=True)
title = CharField(model_attr='description')
author = CharField(model_attr='adder')
pub_date = DateTimeField(model_attr='added')
summary = CharField(model_attr='note')
sites = CharField(model_attr='site_slugs')
def index_queryset(self):
"""Used when the entire index for model is updated."""
return self.get_model().objects.all()
def get_model(self):
return Bookmark
|
from haystack.indexes import CharField, DateTimeField, SearchIndex, Indexable
from models import Bookmark
class BookmarkIndex(SearchIndex, Indexable):
text = CharField(document=True, use_template=True)
title = CharField(model_attr='description')
author = CharField(model_attr='adder')
pub_date = DateTimeField(model_attr='added')
summary = CharField(model_attr='note')
sites = CharField(model_attr='site_slugs')
def index_queryset(self):
"""Used when the entire index for model is updated."""
return self.get_model().objects.all()
def get_model(self):
return Bookmark
|
Use `SearchIndex` instead of deprecated `RealTimeSearchIndex`.
|
Use `SearchIndex` instead of deprecated `RealTimeSearchIndex`.
|
Python
|
mit
|
incuna/incuna-bookmarks,incuna/incuna-bookmarks
|
- from haystack.indexes import CharField, DateTimeField, RealTimeSearchIndex, Indexable
+ from haystack.indexes import CharField, DateTimeField, SearchIndex, Indexable
from models import Bookmark
- class BookmarkIndex(RealTimeSearchIndex, Indexable):
+ class BookmarkIndex(SearchIndex, Indexable):
text = CharField(document=True, use_template=True)
title = CharField(model_attr='description')
author = CharField(model_attr='adder')
pub_date = DateTimeField(model_attr='added')
summary = CharField(model_attr='note')
sites = CharField(model_attr='site_slugs')
def index_queryset(self):
"""Used when the entire index for model is updated."""
return self.get_model().objects.all()
def get_model(self):
return Bookmark
|
Use `SearchIndex` instead of deprecated `RealTimeSearchIndex`.
|
## Code Before:
from haystack.indexes import CharField, DateTimeField, RealTimeSearchIndex, Indexable
from models import Bookmark
class BookmarkIndex(RealTimeSearchIndex, Indexable):
text = CharField(document=True, use_template=True)
title = CharField(model_attr='description')
author = CharField(model_attr='adder')
pub_date = DateTimeField(model_attr='added')
summary = CharField(model_attr='note')
sites = CharField(model_attr='site_slugs')
def index_queryset(self):
"""Used when the entire index for model is updated."""
return self.get_model().objects.all()
def get_model(self):
return Bookmark
## Instruction:
Use `SearchIndex` instead of deprecated `RealTimeSearchIndex`.
## Code After:
from haystack.indexes import CharField, DateTimeField, SearchIndex, Indexable
from models import Bookmark
class BookmarkIndex(SearchIndex, Indexable):
text = CharField(document=True, use_template=True)
title = CharField(model_attr='description')
author = CharField(model_attr='adder')
pub_date = DateTimeField(model_attr='added')
summary = CharField(model_attr='note')
sites = CharField(model_attr='site_slugs')
def index_queryset(self):
"""Used when the entire index for model is updated."""
return self.get_model().objects.all()
def get_model(self):
return Bookmark
|
- from haystack.indexes import CharField, DateTimeField, RealTimeSearchIndex, Indexable
? --------
+ from haystack.indexes import CharField, DateTimeField, SearchIndex, Indexable
from models import Bookmark
- class BookmarkIndex(RealTimeSearchIndex, Indexable):
? --------
+ class BookmarkIndex(SearchIndex, Indexable):
text = CharField(document=True, use_template=True)
title = CharField(model_attr='description')
author = CharField(model_attr='adder')
pub_date = DateTimeField(model_attr='added')
summary = CharField(model_attr='note')
sites = CharField(model_attr='site_slugs')
def index_queryset(self):
"""Used when the entire index for model is updated."""
return self.get_model().objects.all()
def get_model(self):
return Bookmark
|
cb73b357d50603a1bce1184b28266fb55a4fd4ae
|
django_ethereum_events/web3_service.py
|
django_ethereum_events/web3_service.py
|
from django.conf import settings
from web3 import HTTPProvider, Web3
from web3.middleware import geth_poa_middleware
from .utils import Singleton
class Web3Service(metaclass=Singleton):
"""Creates a `web3` instance based on the given Provider."""
def __init__(self, *args, **kwargs):
"""Initializes the `web3` object.
Args:
rpc_provider (HTTPProvider): Valid `web3` HTTPProvider instance (optional)
"""
rpc_provider = kwargs.pop('rpc_provider', None)
if not rpc_provider:
timeout = getattr(settings, "ETHEREUM_NODE_TIMEOUT", 10)
uri = "{scheme}://{host}:{port}".format(
host=settings.ETHEREUM_NODE_HOST,
port=settings.ETHEREUM_NODE_PORT,
scheme="https" if settings.ETHEREUM_NODE_SSL else "http",
)
rpc_provider = HTTPProvider(
endpoint_uri=uri,
request_kwargs={
"timeout": timeout
}
)
self.web3 = Web3(rpc_provider)
# If running in a network with PoA consensus, inject the middleware
if getattr(settings, "ETHEREUM_GETH_POA", False):
self.web3.middleware_stack.inject(geth_poa_middleware, layer=0)
super(Web3Service, self).__init__()
|
from django.conf import settings
from web3 import HTTPProvider, Web3
from web3.middleware import geth_poa_middleware
from .utils import Singleton
class Web3Service(metaclass=Singleton):
"""Creates a `web3` instance based on the given Provider."""
def __init__(self, *args, **kwargs):
"""Initializes the `web3` object.
Args:
rpc_provider (HTTPProvider): Valid `web3` HTTPProvider instance (optional)
"""
rpc_provider = kwargs.pop('rpc_provider', None)
if not rpc_provider:
timeout = getattr(settings, "ETHEREUM_NODE_TIMEOUT", 10)
uri = settings.ETHEREUM_NODE_PORT
rpc_provider = HTTPProvider(
endpoint_uri=uri,
request_kwargs={
"timeout": timeout
}
)
self.web3 = Web3(rpc_provider)
# If running in a network with PoA consensus, inject the middleware
if getattr(settings, "ETHEREUM_GETH_POA", False):
self.web3.middleware_stack.inject(geth_poa_middleware, layer=0)
super(Web3Service, self).__init__()
|
Change env variables for node setup to single URI varieable
|
Change env variables for node setup to single URI varieable
|
Python
|
mit
|
artemistomaras/django-ethereum-events,artemistomaras/django-ethereum-events
|
from django.conf import settings
-
from web3 import HTTPProvider, Web3
from web3.middleware import geth_poa_middleware
from .utils import Singleton
class Web3Service(metaclass=Singleton):
"""Creates a `web3` instance based on the given Provider."""
def __init__(self, *args, **kwargs):
"""Initializes the `web3` object.
Args:
rpc_provider (HTTPProvider): Valid `web3` HTTPProvider instance (optional)
"""
rpc_provider = kwargs.pop('rpc_provider', None)
if not rpc_provider:
timeout = getattr(settings, "ETHEREUM_NODE_TIMEOUT", 10)
- uri = "{scheme}://{host}:{port}".format(
- host=settings.ETHEREUM_NODE_HOST,
- port=settings.ETHEREUM_NODE_PORT,
+ uri = settings.ETHEREUM_NODE_PORT
- scheme="https" if settings.ETHEREUM_NODE_SSL else "http",
- )
rpc_provider = HTTPProvider(
endpoint_uri=uri,
request_kwargs={
"timeout": timeout
}
)
self.web3 = Web3(rpc_provider)
# If running in a network with PoA consensus, inject the middleware
if getattr(settings, "ETHEREUM_GETH_POA", False):
self.web3.middleware_stack.inject(geth_poa_middleware, layer=0)
super(Web3Service, self).__init__()
|
Change env variables for node setup to single URI varieable
|
## Code Before:
from django.conf import settings
from web3 import HTTPProvider, Web3
from web3.middleware import geth_poa_middleware
from .utils import Singleton
class Web3Service(metaclass=Singleton):
"""Creates a `web3` instance based on the given Provider."""
def __init__(self, *args, **kwargs):
"""Initializes the `web3` object.
Args:
rpc_provider (HTTPProvider): Valid `web3` HTTPProvider instance (optional)
"""
rpc_provider = kwargs.pop('rpc_provider', None)
if not rpc_provider:
timeout = getattr(settings, "ETHEREUM_NODE_TIMEOUT", 10)
uri = "{scheme}://{host}:{port}".format(
host=settings.ETHEREUM_NODE_HOST,
port=settings.ETHEREUM_NODE_PORT,
scheme="https" if settings.ETHEREUM_NODE_SSL else "http",
)
rpc_provider = HTTPProvider(
endpoint_uri=uri,
request_kwargs={
"timeout": timeout
}
)
self.web3 = Web3(rpc_provider)
# If running in a network with PoA consensus, inject the middleware
if getattr(settings, "ETHEREUM_GETH_POA", False):
self.web3.middleware_stack.inject(geth_poa_middleware, layer=0)
super(Web3Service, self).__init__()
## Instruction:
Change env variables for node setup to single URI varieable
## Code After:
from django.conf import settings
from web3 import HTTPProvider, Web3
from web3.middleware import geth_poa_middleware
from .utils import Singleton
class Web3Service(metaclass=Singleton):
"""Creates a `web3` instance based on the given Provider."""
def __init__(self, *args, **kwargs):
"""Initializes the `web3` object.
Args:
rpc_provider (HTTPProvider): Valid `web3` HTTPProvider instance (optional)
"""
rpc_provider = kwargs.pop('rpc_provider', None)
if not rpc_provider:
timeout = getattr(settings, "ETHEREUM_NODE_TIMEOUT", 10)
uri = settings.ETHEREUM_NODE_PORT
rpc_provider = HTTPProvider(
endpoint_uri=uri,
request_kwargs={
"timeout": timeout
}
)
self.web3 = Web3(rpc_provider)
# If running in a network with PoA consensus, inject the middleware
if getattr(settings, "ETHEREUM_GETH_POA", False):
self.web3.middleware_stack.inject(geth_poa_middleware, layer=0)
super(Web3Service, self).__init__()
|
from django.conf import settings
-
from web3 import HTTPProvider, Web3
from web3.middleware import geth_poa_middleware
from .utils import Singleton
class Web3Service(metaclass=Singleton):
"""Creates a `web3` instance based on the given Provider."""
def __init__(self, *args, **kwargs):
"""Initializes the `web3` object.
Args:
rpc_provider (HTTPProvider): Valid `web3` HTTPProvider instance (optional)
"""
rpc_provider = kwargs.pop('rpc_provider', None)
if not rpc_provider:
timeout = getattr(settings, "ETHEREUM_NODE_TIMEOUT", 10)
- uri = "{scheme}://{host}:{port}".format(
- host=settings.ETHEREUM_NODE_HOST,
- port=settings.ETHEREUM_NODE_PORT,
? ------- -
+ uri = settings.ETHEREUM_NODE_PORT
? +++ +
- scheme="https" if settings.ETHEREUM_NODE_SSL else "http",
- )
rpc_provider = HTTPProvider(
endpoint_uri=uri,
request_kwargs={
"timeout": timeout
}
)
self.web3 = Web3(rpc_provider)
# If running in a network with PoA consensus, inject the middleware
if getattr(settings, "ETHEREUM_GETH_POA", False):
self.web3.middleware_stack.inject(geth_poa_middleware, layer=0)
super(Web3Service, self).__init__()
|
6f265e37361b1447cf55c5d79cfe3ba6b6047b57
|
tests/examples/helloworld/flows.py
|
tests/examples/helloworld/flows.py
|
from viewflow import flow, lock, views as flow_views
from viewflow.base import this, Flow
from viewflow.site import viewsite
from .models import HelloWorldProcess
from .tasks import send_hello_world_request
class HelloWorldFlow(Flow):
"""
Hello world
This process demonstrates hello world approval request flow.
1. User with *helloworld.can_start_process* permission creates hello world request
2. Manager, who have *helloworld.can_approve_request* approves it
3. And if request was approved, background celery job sends it to the world
4. Elsewhere, request became cancelled
"""
process_cls = HelloWorldProcess
lock_impl = lock.select_for_update_lock
start = flow.Start(flow_views.StartProcessView, fields=['text']) \
.Permission(auto_create=True) \
.Next(this.approve)
approve = flow.View(flow_views.ProcessView, fields=['approved']) \
.Permission(auto_create=True) \
.Next(this.send)
check_approve = flow.If(cond=lambda p: p.approved) \
.OnTrue(this.send) \
.OnFalse(this.end)
send = flow.Job(send_hello_world_request) \
.Next(this.end)
end = flow.End()
viewsite.register(HelloWorldFlow)
|
from viewflow import flow, lock, views as flow_views
from viewflow.base import this, Flow
from viewflow.site import viewsite
from .models import HelloWorldProcess
from .tasks import send_hello_world_request
class HelloWorldFlow(Flow):
"""
Hello world
This process demonstrates hello world approval request flow.
1. User with *helloworld.can_start_process* permission creates hello world request
2. Manager, who have *helloworld.can_approve_request* approves it
3. And if request was approved, background celery job sends it to the world
4. Elsewhere, request became cancelled
"""
process_cls = HelloWorldProcess
lock_impl = lock.select_for_update_lock
start = flow.Start(flow_views.StartProcessView, fields=['text']) \
.Permission(auto_create=True) \
.Next(this.approve)
approve = flow.View(flow_views.ProcessView, fields=['approved']) \
.Permission(auto_create=True) \
.Next(this.check_approve)
check_approve = flow.If(cond=lambda p: p.approved) \
.OnTrue(this.send) \
.OnFalse(this.end)
send = flow.Job(send_hello_world_request) \
.Next(this.end)
end = flow.End()
viewsite.register(HelloWorldFlow)
|
Fix hello world sample flow
|
Fix hello world sample flow
|
Python
|
agpl-3.0
|
ribeiro-ucl/viewflow,ribeiro-ucl/viewflow,ribeiro-ucl/viewflow,codingjoe/viewflow,viewflow/viewflow,codingjoe/viewflow,codingjoe/viewflow,pombredanne/viewflow,pombredanne/viewflow,viewflow/viewflow,viewflow/viewflow
|
from viewflow import flow, lock, views as flow_views
from viewflow.base import this, Flow
from viewflow.site import viewsite
from .models import HelloWorldProcess
from .tasks import send_hello_world_request
class HelloWorldFlow(Flow):
"""
Hello world
This process demonstrates hello world approval request flow.
1. User with *helloworld.can_start_process* permission creates hello world request
2. Manager, who have *helloworld.can_approve_request* approves it
3. And if request was approved, background celery job sends it to the world
4. Elsewhere, request became cancelled
"""
process_cls = HelloWorldProcess
lock_impl = lock.select_for_update_lock
start = flow.Start(flow_views.StartProcessView, fields=['text']) \
.Permission(auto_create=True) \
.Next(this.approve)
approve = flow.View(flow_views.ProcessView, fields=['approved']) \
.Permission(auto_create=True) \
- .Next(this.send)
+ .Next(this.check_approve)
check_approve = flow.If(cond=lambda p: p.approved) \
.OnTrue(this.send) \
.OnFalse(this.end)
send = flow.Job(send_hello_world_request) \
.Next(this.end)
end = flow.End()
viewsite.register(HelloWorldFlow)
|
Fix hello world sample flow
|
## Code Before:
from viewflow import flow, lock, views as flow_views
from viewflow.base import this, Flow
from viewflow.site import viewsite
from .models import HelloWorldProcess
from .tasks import send_hello_world_request
class HelloWorldFlow(Flow):
"""
Hello world
This process demonstrates hello world approval request flow.
1. User with *helloworld.can_start_process* permission creates hello world request
2. Manager, who have *helloworld.can_approve_request* approves it
3. And if request was approved, background celery job sends it to the world
4. Elsewhere, request became cancelled
"""
process_cls = HelloWorldProcess
lock_impl = lock.select_for_update_lock
start = flow.Start(flow_views.StartProcessView, fields=['text']) \
.Permission(auto_create=True) \
.Next(this.approve)
approve = flow.View(flow_views.ProcessView, fields=['approved']) \
.Permission(auto_create=True) \
.Next(this.send)
check_approve = flow.If(cond=lambda p: p.approved) \
.OnTrue(this.send) \
.OnFalse(this.end)
send = flow.Job(send_hello_world_request) \
.Next(this.end)
end = flow.End()
viewsite.register(HelloWorldFlow)
## Instruction:
Fix hello world sample flow
## Code After:
from viewflow import flow, lock, views as flow_views
from viewflow.base import this, Flow
from viewflow.site import viewsite
from .models import HelloWorldProcess
from .tasks import send_hello_world_request
class HelloWorldFlow(Flow):
"""
Hello world
This process demonstrates hello world approval request flow.
1. User with *helloworld.can_start_process* permission creates hello world request
2. Manager, who have *helloworld.can_approve_request* approves it
3. And if request was approved, background celery job sends it to the world
4. Elsewhere, request became cancelled
"""
process_cls = HelloWorldProcess
lock_impl = lock.select_for_update_lock
start = flow.Start(flow_views.StartProcessView, fields=['text']) \
.Permission(auto_create=True) \
.Next(this.approve)
approve = flow.View(flow_views.ProcessView, fields=['approved']) \
.Permission(auto_create=True) \
.Next(this.check_approve)
check_approve = flow.If(cond=lambda p: p.approved) \
.OnTrue(this.send) \
.OnFalse(this.end)
send = flow.Job(send_hello_world_request) \
.Next(this.end)
end = flow.End()
viewsite.register(HelloWorldFlow)
|
from viewflow import flow, lock, views as flow_views
from viewflow.base import this, Flow
from viewflow.site import viewsite
from .models import HelloWorldProcess
from .tasks import send_hello_world_request
class HelloWorldFlow(Flow):
"""
Hello world
This process demonstrates hello world approval request flow.
1. User with *helloworld.can_start_process* permission creates hello world request
2. Manager, who have *helloworld.can_approve_request* approves it
3. And if request was approved, background celery job sends it to the world
4. Elsewhere, request became cancelled
"""
process_cls = HelloWorldProcess
lock_impl = lock.select_for_update_lock
start = flow.Start(flow_views.StartProcessView, fields=['text']) \
.Permission(auto_create=True) \
.Next(this.approve)
approve = flow.View(flow_views.ProcessView, fields=['approved']) \
.Permission(auto_create=True) \
- .Next(this.send)
+ .Next(this.check_approve)
check_approve = flow.If(cond=lambda p: p.approved) \
.OnTrue(this.send) \
.OnFalse(this.end)
send = flow.Job(send_hello_world_request) \
.Next(this.end)
end = flow.End()
viewsite.register(HelloWorldFlow)
|
70929aa10fb59ed25c8fc4e76ce60bd6d2934c3f
|
rcamp/rcamp/settings/auth.py
|
rcamp/rcamp/settings/auth.py
|
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
'lib.pam_backend.PamBackend',
)
AUTH_USER_MODEL = 'accounts.User'
LOGIN_URL = '/login'
PAM_SERVICES = {
'default': 'login',
'csu': 'csu'
}
|
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
'lib.pam_backend.PamBackend',
)
AUTH_USER_MODEL = 'accounts.User'
LOGIN_URL = '/login'
PAM_SERVICES = {
'default': 'curc-twofactor-duo',
'csu': 'csu'
}
|
Change the default pam login service
|
Change the default pam login service
|
Python
|
mit
|
ResearchComputing/RCAMP,ResearchComputing/RCAMP,ResearchComputing/RCAMP,ResearchComputing/RCAMP
|
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
'lib.pam_backend.PamBackend',
)
AUTH_USER_MODEL = 'accounts.User'
LOGIN_URL = '/login'
PAM_SERVICES = {
- 'default': 'login',
+ 'default': 'curc-twofactor-duo',
'csu': 'csu'
}
|
Change the default pam login service
|
## Code Before:
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
'lib.pam_backend.PamBackend',
)
AUTH_USER_MODEL = 'accounts.User'
LOGIN_URL = '/login'
PAM_SERVICES = {
'default': 'login',
'csu': 'csu'
}
## Instruction:
Change the default pam login service
## Code After:
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
'lib.pam_backend.PamBackend',
)
AUTH_USER_MODEL = 'accounts.User'
LOGIN_URL = '/login'
PAM_SERVICES = {
'default': 'curc-twofactor-duo',
'csu': 'csu'
}
|
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
'lib.pam_backend.PamBackend',
)
AUTH_USER_MODEL = 'accounts.User'
LOGIN_URL = '/login'
PAM_SERVICES = {
- 'default': 'login',
+ 'default': 'curc-twofactor-duo',
'csu': 'csu'
}
|
1abfdea38e868d68c532961459d2b4cbef5a9b71
|
src/zeit/website/section.py
|
src/zeit/website/section.py
|
import zeit.website.interfaces
from zeit.cms.section.interfaces import ISectionMarker
import grokcore.component as grok
import zeit.cms.checkout.interfaces
import zeit.cms.content.interfaces
import zope.interface
@grok.subscribe(
zeit.cms.content.interfaces.ICommonMetadata,
zeit.cms.checkout.interfaces.IBeforeCheckinEvent)
def provide_website_content(content, event):
content = zope.security.proxy.getObject(content)
if not content.rebrush_website_content:
return
for iface in zope.interface.providedBy(content):
if issubclass(iface, ISectionMarker):
zope.interface.noLongerProvides(content, iface)
zope.interface.alsoProvides(content, zeit.website.interfaces.IWebsiteSection)
|
import zeit.website.interfaces
from zeit.cms.section.interfaces import ISectionMarker
import grokcore.component as grok
import zeit.cms.checkout.interfaces
import zeit.cms.content.interfaces
import zope.interface
@grok.subscribe(
zeit.cms.content.interfaces.ICommonMetadata,
zeit.cms.checkout.interfaces.IBeforeCheckinEvent)
def provide_website_content(content, event):
content = zope.security.proxy.getObject(content)
if not content.rebrush_website_content:
zope.interface.noLongerProvides(content,
zeit.website.interfaces.IWebsiteSection)
return
for iface in zope.interface.providedBy(content):
if issubclass(iface, ISectionMarker):
zope.interface.noLongerProvides(content, iface)
zope.interface.alsoProvides(content, zeit.website.interfaces.IWebsiteSection)
|
Remove iface, if rebrush_contetn ist not set
|
Remove iface, if rebrush_contetn ist not set
|
Python
|
bsd-3-clause
|
ZeitOnline/zeit.website
|
import zeit.website.interfaces
from zeit.cms.section.interfaces import ISectionMarker
import grokcore.component as grok
import zeit.cms.checkout.interfaces
import zeit.cms.content.interfaces
import zope.interface
@grok.subscribe(
zeit.cms.content.interfaces.ICommonMetadata,
zeit.cms.checkout.interfaces.IBeforeCheckinEvent)
def provide_website_content(content, event):
content = zope.security.proxy.getObject(content)
if not content.rebrush_website_content:
+ zope.interface.noLongerProvides(content,
+ zeit.website.interfaces.IWebsiteSection)
return
for iface in zope.interface.providedBy(content):
if issubclass(iface, ISectionMarker):
zope.interface.noLongerProvides(content, iface)
zope.interface.alsoProvides(content, zeit.website.interfaces.IWebsiteSection)
|
Remove iface, if rebrush_contetn ist not set
|
## Code Before:
import zeit.website.interfaces
from zeit.cms.section.interfaces import ISectionMarker
import grokcore.component as grok
import zeit.cms.checkout.interfaces
import zeit.cms.content.interfaces
import zope.interface
@grok.subscribe(
zeit.cms.content.interfaces.ICommonMetadata,
zeit.cms.checkout.interfaces.IBeforeCheckinEvent)
def provide_website_content(content, event):
content = zope.security.proxy.getObject(content)
if not content.rebrush_website_content:
return
for iface in zope.interface.providedBy(content):
if issubclass(iface, ISectionMarker):
zope.interface.noLongerProvides(content, iface)
zope.interface.alsoProvides(content, zeit.website.interfaces.IWebsiteSection)
## Instruction:
Remove iface, if rebrush_contetn ist not set
## Code After:
import zeit.website.interfaces
from zeit.cms.section.interfaces import ISectionMarker
import grokcore.component as grok
import zeit.cms.checkout.interfaces
import zeit.cms.content.interfaces
import zope.interface
@grok.subscribe(
zeit.cms.content.interfaces.ICommonMetadata,
zeit.cms.checkout.interfaces.IBeforeCheckinEvent)
def provide_website_content(content, event):
content = zope.security.proxy.getObject(content)
if not content.rebrush_website_content:
zope.interface.noLongerProvides(content,
zeit.website.interfaces.IWebsiteSection)
return
for iface in zope.interface.providedBy(content):
if issubclass(iface, ISectionMarker):
zope.interface.noLongerProvides(content, iface)
zope.interface.alsoProvides(content, zeit.website.interfaces.IWebsiteSection)
|
import zeit.website.interfaces
from zeit.cms.section.interfaces import ISectionMarker
import grokcore.component as grok
import zeit.cms.checkout.interfaces
import zeit.cms.content.interfaces
import zope.interface
@grok.subscribe(
zeit.cms.content.interfaces.ICommonMetadata,
zeit.cms.checkout.interfaces.IBeforeCheckinEvent)
def provide_website_content(content, event):
content = zope.security.proxy.getObject(content)
if not content.rebrush_website_content:
+ zope.interface.noLongerProvides(content,
+ zeit.website.interfaces.IWebsiteSection)
return
for iface in zope.interface.providedBy(content):
if issubclass(iface, ISectionMarker):
zope.interface.noLongerProvides(content, iface)
zope.interface.alsoProvides(content, zeit.website.interfaces.IWebsiteSection)
|
566fc15f136076db5c421ca18f8b1fcb3d332229
|
ovp_projects/views.py
|
ovp_projects/views.py
|
from ovp_projects import serializers
from ovp_projects import models
from ovp_users import models as users_models
from rest_framework import mixins
from rest_framework import viewsets
from rest_framework import response
from rest_framework import status
class ProjectResourceViewSet(mixins.CreateModelMixin, mixins.RetrieveModelMixin, viewsets.GenericViewSet):
"""
ProjectResourceViewSet resource endpoint
"""
queryset = models.Project.objects.all()
lookup_field = 'slug'
lookup_value_regex = '[^/]+' # default is [^/.]+ - here we're allowing dots in the url slug field
def get_serializer_class(self):
#if self.action == 'create':
return serializers.ProjectCreateSerializer
def create(self, request, *args, **kwargs):
user = users_models.User.objects.all().first()
request.data['owner'] = user.pk
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
serializer.save()
headers = self.get_success_headers(serializer.data)
return response.Response(serializer.data, status=status.HTTP_201_CREATED, headers=headers)
|
from ovp_projects import serializers
from ovp_projects import models
from ovp_users import models as users_models
from rest_framework import mixins
from rest_framework import viewsets
from rest_framework import response
from rest_framework import status
class ProjectResourceViewSet(mixins.CreateModelMixin, mixins.RetrieveModelMixin, viewsets.GenericViewSet):
"""
ProjectResourceViewSet resource endpoint
"""
queryset = models.Project.objects.all()
lookup_field = 'slug'
lookup_value_regex = '[^/]+' # default is [^/.]+ - here we're allowing dots in the url slug field
def get_serializer_class(self):
if self.action == 'create':
return serializers.ProjectCreateSerializer
return serializers.ProjectSearchSerializer
def create(self, request, *args, **kwargs):
user = users_models.User.objects.all().first()
request.data['owner'] = user.pk
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
serializer.save()
headers = self.get_success_headers(serializer.data)
return response.Response(serializer.data, status=status.HTTP_201_CREATED, headers=headers)
|
Return ProjectSearchSerializer on ProjectResourceViewSet if action != 'Create'
|
Return ProjectSearchSerializer on ProjectResourceViewSet if action != 'Create'
|
Python
|
agpl-3.0
|
OpenVolunteeringPlatform/django-ovp-projects,OpenVolunteeringPlatform/django-ovp-projects
|
from ovp_projects import serializers
from ovp_projects import models
from ovp_users import models as users_models
from rest_framework import mixins
from rest_framework import viewsets
from rest_framework import response
from rest_framework import status
class ProjectResourceViewSet(mixins.CreateModelMixin, mixins.RetrieveModelMixin, viewsets.GenericViewSet):
"""
ProjectResourceViewSet resource endpoint
"""
queryset = models.Project.objects.all()
lookup_field = 'slug'
lookup_value_regex = '[^/]+' # default is [^/.]+ - here we're allowing dots in the url slug field
def get_serializer_class(self):
- #if self.action == 'create':
+ if self.action == 'create':
- return serializers.ProjectCreateSerializer
+ return serializers.ProjectCreateSerializer
+ return serializers.ProjectSearchSerializer
def create(self, request, *args, **kwargs):
user = users_models.User.objects.all().first()
request.data['owner'] = user.pk
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
serializer.save()
headers = self.get_success_headers(serializer.data)
return response.Response(serializer.data, status=status.HTTP_201_CREATED, headers=headers)
|
Return ProjectSearchSerializer on ProjectResourceViewSet if action != 'Create'
|
## Code Before:
from ovp_projects import serializers
from ovp_projects import models
from ovp_users import models as users_models
from rest_framework import mixins
from rest_framework import viewsets
from rest_framework import response
from rest_framework import status
class ProjectResourceViewSet(mixins.CreateModelMixin, mixins.RetrieveModelMixin, viewsets.GenericViewSet):
"""
ProjectResourceViewSet resource endpoint
"""
queryset = models.Project.objects.all()
lookup_field = 'slug'
lookup_value_regex = '[^/]+' # default is [^/.]+ - here we're allowing dots in the url slug field
def get_serializer_class(self):
#if self.action == 'create':
return serializers.ProjectCreateSerializer
def create(self, request, *args, **kwargs):
user = users_models.User.objects.all().first()
request.data['owner'] = user.pk
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
serializer.save()
headers = self.get_success_headers(serializer.data)
return response.Response(serializer.data, status=status.HTTP_201_CREATED, headers=headers)
## Instruction:
Return ProjectSearchSerializer on ProjectResourceViewSet if action != 'Create'
## Code After:
from ovp_projects import serializers
from ovp_projects import models
from ovp_users import models as users_models
from rest_framework import mixins
from rest_framework import viewsets
from rest_framework import response
from rest_framework import status
class ProjectResourceViewSet(mixins.CreateModelMixin, mixins.RetrieveModelMixin, viewsets.GenericViewSet):
"""
ProjectResourceViewSet resource endpoint
"""
queryset = models.Project.objects.all()
lookup_field = 'slug'
lookup_value_regex = '[^/]+' # default is [^/.]+ - here we're allowing dots in the url slug field
def get_serializer_class(self):
if self.action == 'create':
return serializers.ProjectCreateSerializer
return serializers.ProjectSearchSerializer
def create(self, request, *args, **kwargs):
user = users_models.User.objects.all().first()
request.data['owner'] = user.pk
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
serializer.save()
headers = self.get_success_headers(serializer.data)
return response.Response(serializer.data, status=status.HTTP_201_CREATED, headers=headers)
|
from ovp_projects import serializers
from ovp_projects import models
from ovp_users import models as users_models
from rest_framework import mixins
from rest_framework import viewsets
from rest_framework import response
from rest_framework import status
class ProjectResourceViewSet(mixins.CreateModelMixin, mixins.RetrieveModelMixin, viewsets.GenericViewSet):
"""
ProjectResourceViewSet resource endpoint
"""
queryset = models.Project.objects.all()
lookup_field = 'slug'
lookup_value_regex = '[^/]+' # default is [^/.]+ - here we're allowing dots in the url slug field
def get_serializer_class(self):
- #if self.action == 'create':
? -
+ if self.action == 'create':
- return serializers.ProjectCreateSerializer
+ return serializers.ProjectCreateSerializer
? ++
+ return serializers.ProjectSearchSerializer
def create(self, request, *args, **kwargs):
user = users_models.User.objects.all().first()
request.data['owner'] = user.pk
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
serializer.save()
headers = self.get_success_headers(serializer.data)
return response.Response(serializer.data, status=status.HTTP_201_CREATED, headers=headers)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.