commit
stringlengths 40
40
| old_file
stringlengths 4
106
| new_file
stringlengths 4
106
| old_contents
stringlengths 10
2.94k
| new_contents
stringlengths 21
2.95k
| subject
stringlengths 16
444
| message
stringlengths 17
2.63k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 7
43k
| ndiff
stringlengths 52
3.31k
| instruction
stringlengths 16
444
| content
stringlengths 133
4.32k
| diff
stringlengths 49
3.61k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
2c62c7f063af02f6872edd2801c6700bfffeebd4
|
cloud_browser/cloud/config.py
|
cloud_browser/cloud/config.py
|
"""Cloud configuration."""
from cloud_browser.cloud.rackspace import RackspaceConnection
class Config(object):
"""Cloud configuration helper."""
conn_cls = RackspaceConnection
__singleton = None
def __init__(self, connection):
"""Initializer."""
self.connection = connection
@classmethod
def from_settings(cls):
"""Create configuration from Django settings or environment."""
from cloud_browser.app_settings import settings
from django.core.exceptions import ImproperlyConfigured
account = settings.CLOUD_BROWSER_RACKSPACE_ACCOUNT
secret_key = settings.CLOUD_BROWSER_RACKSPACE_SECRET_KEY
servicenet = settings.CLOUD_BROWSER_RACKSPACE_SERVICENET
if not (account and secret_key):
raise ImproperlyConfigured("No suitable credentials found.")
conn = cls.conn_cls(account, secret_key, servicenet)
return cls(conn)
@classmethod
def singleton(cls):
"""Get singleton object."""
if cls.__singleton is None:
cls.__singleton = cls.from_settings()
return cls.__singleton
|
"""Cloud configuration."""
class Config(object):
"""Cloud configuration helper."""
__singleton = None
def __init__(self, connection):
"""Initializer."""
self.connection = connection
@classmethod
def from_settings(cls):
"""Create configuration from Django settings or environment."""
from cloud_browser.app_settings import settings
from django.core.exceptions import ImproperlyConfigured
conn = None
if conn is None:
# Try Rackspace
account = settings.CLOUD_BROWSER_RACKSPACE_ACCOUNT
secret_key = settings.CLOUD_BROWSER_RACKSPACE_SECRET_KEY
servicenet = settings.CLOUD_BROWSER_RACKSPACE_SERVICENET
if (account and secret_key):
from cloud_browser.cloud.rackspace import RackspaceConnection
conn = RackspaceConnection(account, secret_key, servicenet)
if not conn:
raise ImproperlyConfigured("No suitable credentials found.")
return cls(conn)
@classmethod
def singleton(cls):
"""Get singleton object."""
if cls.__singleton is None:
cls.__singleton = cls.from_settings()
return cls.__singleton
|
Refactor to allow different connection class bindings.
|
Config: Refactor to allow different connection class bindings.
|
Python
|
mit
|
ryan-roemer/django-cloud-browser,ryan-roemer/django-cloud-browser,ryan-roemer/django-cloud-browser,UrbanDaddy/django-cloud-browser,UrbanDaddy/django-cloud-browser
|
"""Cloud configuration."""
- from cloud_browser.cloud.rackspace import RackspaceConnection
class Config(object):
"""Cloud configuration helper."""
- conn_cls = RackspaceConnection
__singleton = None
def __init__(self, connection):
"""Initializer."""
self.connection = connection
@classmethod
def from_settings(cls):
"""Create configuration from Django settings or environment."""
from cloud_browser.app_settings import settings
from django.core.exceptions import ImproperlyConfigured
+ conn = None
+ if conn is None:
+ # Try Rackspace
- account = settings.CLOUD_BROWSER_RACKSPACE_ACCOUNT
+ account = settings.CLOUD_BROWSER_RACKSPACE_ACCOUNT
- secret_key = settings.CLOUD_BROWSER_RACKSPACE_SECRET_KEY
+ secret_key = settings.CLOUD_BROWSER_RACKSPACE_SECRET_KEY
- servicenet = settings.CLOUD_BROWSER_RACKSPACE_SERVICENET
+ servicenet = settings.CLOUD_BROWSER_RACKSPACE_SERVICENET
+ if (account and secret_key):
+ from cloud_browser.cloud.rackspace import RackspaceConnection
+ conn = RackspaceConnection(account, secret_key, servicenet)
- if not (account and secret_key):
+ if not conn:
raise ImproperlyConfigured("No suitable credentials found.")
- conn = cls.conn_cls(account, secret_key, servicenet)
return cls(conn)
@classmethod
def singleton(cls):
"""Get singleton object."""
if cls.__singleton is None:
cls.__singleton = cls.from_settings()
return cls.__singleton
|
Refactor to allow different connection class bindings.
|
## Code Before:
"""Cloud configuration."""
from cloud_browser.cloud.rackspace import RackspaceConnection
class Config(object):
"""Cloud configuration helper."""
conn_cls = RackspaceConnection
__singleton = None
def __init__(self, connection):
"""Initializer."""
self.connection = connection
@classmethod
def from_settings(cls):
"""Create configuration from Django settings or environment."""
from cloud_browser.app_settings import settings
from django.core.exceptions import ImproperlyConfigured
account = settings.CLOUD_BROWSER_RACKSPACE_ACCOUNT
secret_key = settings.CLOUD_BROWSER_RACKSPACE_SECRET_KEY
servicenet = settings.CLOUD_BROWSER_RACKSPACE_SERVICENET
if not (account and secret_key):
raise ImproperlyConfigured("No suitable credentials found.")
conn = cls.conn_cls(account, secret_key, servicenet)
return cls(conn)
@classmethod
def singleton(cls):
"""Get singleton object."""
if cls.__singleton is None:
cls.__singleton = cls.from_settings()
return cls.__singleton
## Instruction:
Refactor to allow different connection class bindings.
## Code After:
"""Cloud configuration."""
class Config(object):
"""Cloud configuration helper."""
__singleton = None
def __init__(self, connection):
"""Initializer."""
self.connection = connection
@classmethod
def from_settings(cls):
"""Create configuration from Django settings or environment."""
from cloud_browser.app_settings import settings
from django.core.exceptions import ImproperlyConfigured
conn = None
if conn is None:
# Try Rackspace
account = settings.CLOUD_BROWSER_RACKSPACE_ACCOUNT
secret_key = settings.CLOUD_BROWSER_RACKSPACE_SECRET_KEY
servicenet = settings.CLOUD_BROWSER_RACKSPACE_SERVICENET
if (account and secret_key):
from cloud_browser.cloud.rackspace import RackspaceConnection
conn = RackspaceConnection(account, secret_key, servicenet)
if not conn:
raise ImproperlyConfigured("No suitable credentials found.")
return cls(conn)
@classmethod
def singleton(cls):
"""Get singleton object."""
if cls.__singleton is None:
cls.__singleton = cls.from_settings()
return cls.__singleton
|
"""Cloud configuration."""
- from cloud_browser.cloud.rackspace import RackspaceConnection
class Config(object):
"""Cloud configuration helper."""
- conn_cls = RackspaceConnection
__singleton = None
def __init__(self, connection):
"""Initializer."""
self.connection = connection
@classmethod
def from_settings(cls):
"""Create configuration from Django settings or environment."""
from cloud_browser.app_settings import settings
from django.core.exceptions import ImproperlyConfigured
+ conn = None
+ if conn is None:
+ # Try Rackspace
- account = settings.CLOUD_BROWSER_RACKSPACE_ACCOUNT
+ account = settings.CLOUD_BROWSER_RACKSPACE_ACCOUNT
? ++++
- secret_key = settings.CLOUD_BROWSER_RACKSPACE_SECRET_KEY
+ secret_key = settings.CLOUD_BROWSER_RACKSPACE_SECRET_KEY
? ++++
- servicenet = settings.CLOUD_BROWSER_RACKSPACE_SERVICENET
+ servicenet = settings.CLOUD_BROWSER_RACKSPACE_SERVICENET
? ++++
+ if (account and secret_key):
+ from cloud_browser.cloud.rackspace import RackspaceConnection
+ conn = RackspaceConnection(account, secret_key, servicenet)
- if not (account and secret_key):
+ if not conn:
raise ImproperlyConfigured("No suitable credentials found.")
- conn = cls.conn_cls(account, secret_key, servicenet)
return cls(conn)
@classmethod
def singleton(cls):
"""Get singleton object."""
if cls.__singleton is None:
cls.__singleton = cls.from_settings()
return cls.__singleton
|
d600fc56127f234a7a14b4a89be14b5c31b072e7
|
examples/edge_test.py
|
examples/edge_test.py
|
from seleniumbase import BaseCase
class EdgeTests(BaseCase):
def test_edge(self):
if self.browser != "edge":
print("\n This test is only for Microsoft Edge (Chromium)!")
print(' (Run this test using "--edge" or "--browser=edge")')
self.skip('Use "--edge" or "--browser=edge"')
self.open("edge://settings/help")
self.assert_element('img[alt="Edge logo"] + span')
self.highlight('#section_about div + div')
self.highlight('#section_about div + div > div')
self.highlight('img[alt="Edge logo"]')
self.highlight('img[alt="Edge logo"] + span')
self.highlight('#section_about div + div > div + div')
self.highlight('#section_about div + div > div + div + div > div')
|
from seleniumbase import BaseCase
class EdgeTests(BaseCase):
def test_edge(self):
if self.browser != "edge":
print("\n This test is only for Microsoft Edge (Chromium)!")
print(' (Run this test using "--edge" or "--browser=edge")')
self.skip('Use "--edge" or "--browser=edge"')
self.open("edge://settings/help")
self.highlight('div[role="main"]')
self.highlight('img[srcset*="logo"]')
self.assert_text("Microsoft Edge", 'img[srcset*="logo"] + div')
self.highlight('img[srcset*="logo"] + div span:nth-of-type(1)')
self.highlight('img[srcset*="logo"] + div span:nth-of-type(2)')
self.highlight('span[aria-live="assertive"]')
self.highlight('a[href*="chromium"]')
|
Update the Edge example test
|
Update the Edge example test
|
Python
|
mit
|
mdmintz/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase,mdmintz/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase
|
from seleniumbase import BaseCase
class EdgeTests(BaseCase):
def test_edge(self):
if self.browser != "edge":
print("\n This test is only for Microsoft Edge (Chromium)!")
print(' (Run this test using "--edge" or "--browser=edge")')
self.skip('Use "--edge" or "--browser=edge"')
self.open("edge://settings/help")
+ self.highlight('div[role="main"]')
- self.assert_element('img[alt="Edge logo"] + span')
- self.highlight('#section_about div + div')
- self.highlight('#section_about div + div > div')
- self.highlight('img[alt="Edge logo"]')
+ self.highlight('img[srcset*="logo"]')
- self.highlight('img[alt="Edge logo"] + span')
- self.highlight('#section_about div + div > div + div')
- self.highlight('#section_about div + div > div + div + div > div')
+ self.assert_text("Microsoft Edge", 'img[srcset*="logo"] + div')
+ self.highlight('img[srcset*="logo"] + div span:nth-of-type(1)')
+ self.highlight('img[srcset*="logo"] + div span:nth-of-type(2)')
+ self.highlight('span[aria-live="assertive"]')
+ self.highlight('a[href*="chromium"]')
|
Update the Edge example test
|
## Code Before:
from seleniumbase import BaseCase
class EdgeTests(BaseCase):
def test_edge(self):
if self.browser != "edge":
print("\n This test is only for Microsoft Edge (Chromium)!")
print(' (Run this test using "--edge" or "--browser=edge")')
self.skip('Use "--edge" or "--browser=edge"')
self.open("edge://settings/help")
self.assert_element('img[alt="Edge logo"] + span')
self.highlight('#section_about div + div')
self.highlight('#section_about div + div > div')
self.highlight('img[alt="Edge logo"]')
self.highlight('img[alt="Edge logo"] + span')
self.highlight('#section_about div + div > div + div')
self.highlight('#section_about div + div > div + div + div > div')
## Instruction:
Update the Edge example test
## Code After:
from seleniumbase import BaseCase
class EdgeTests(BaseCase):
def test_edge(self):
if self.browser != "edge":
print("\n This test is only for Microsoft Edge (Chromium)!")
print(' (Run this test using "--edge" or "--browser=edge")')
self.skip('Use "--edge" or "--browser=edge"')
self.open("edge://settings/help")
self.highlight('div[role="main"]')
self.highlight('img[srcset*="logo"]')
self.assert_text("Microsoft Edge", 'img[srcset*="logo"] + div')
self.highlight('img[srcset*="logo"] + div span:nth-of-type(1)')
self.highlight('img[srcset*="logo"] + div span:nth-of-type(2)')
self.highlight('span[aria-live="assertive"]')
self.highlight('a[href*="chromium"]')
|
from seleniumbase import BaseCase
class EdgeTests(BaseCase):
def test_edge(self):
if self.browser != "edge":
print("\n This test is only for Microsoft Edge (Chromium)!")
print(' (Run this test using "--edge" or "--browser=edge")')
self.skip('Use "--edge" or "--browser=edge"')
self.open("edge://settings/help")
+ self.highlight('div[role="main"]')
- self.assert_element('img[alt="Edge logo"] + span')
- self.highlight('#section_about div + div')
- self.highlight('#section_about div + div > div')
- self.highlight('img[alt="Edge logo"]')
? ^^ -----
+ self.highlight('img[srcset*="logo"]')
? ^^^^^ +
- self.highlight('img[alt="Edge logo"] + span')
- self.highlight('#section_about div + div > div + div')
- self.highlight('#section_about div + div > div + div + div > div')
+ self.assert_text("Microsoft Edge", 'img[srcset*="logo"] + div')
+ self.highlight('img[srcset*="logo"] + div span:nth-of-type(1)')
+ self.highlight('img[srcset*="logo"] + div span:nth-of-type(2)')
+ self.highlight('span[aria-live="assertive"]')
+ self.highlight('a[href*="chromium"]')
|
bcca20cecbc664422f72359ba4fba7d55e833b32
|
swampdragon/connections/sockjs_connection.py
|
swampdragon/connections/sockjs_connection.py
|
from sockjs.tornado import SockJSConnection
from ..pubsub_providers.redis_pubsub_provider import RedisPubSubProvider
from .. import route_handler
import json
pub_sub = RedisPubSubProvider()
class ConnectionMixin(object):
def to_json(self, data):
if isinstance(data, dict):
return data
try:
data = json.loads(data.replace("'", '"'))
return data
except:
return json.dumps({'message': data})
def to_string(self, data):
if isinstance(data, dict):
return json.dumps(data).replace("'", '"')
return data
class SubscriberConnection(ConnectionMixin, SockJSConnection):
def __init__(self, session):
super(SubscriberConnection, self).__init__(session)
def on_open(self, request):
self.pub_sub = pub_sub
def on_close(self):
self.pub_sub.close(self)
def on_message(self, data):
try:
data = self.to_json(data)
handler = route_handler.get_route_handler(data['route'])
handler(self).handle(data)
except Exception as e:
self.abort_connection()
raise e
def abort_connection(self):
self.close()
def send(self, message, binary=False):
super(SubscriberConnection, self).send(message, binary)
def broadcast(self, clients, message):
super(SubscriberConnection, self).broadcast(clients, message)
class DjangoSubscriberConnection(SubscriberConnection):
def __init__(self, session):
super(DjangoSubscriberConnection, self).__init__(session)
|
from sockjs.tornado import SockJSConnection
from ..pubsub_providers.redis_pubsub_provider import RedisPubSubProvider
from .. import route_handler
import json
pub_sub = RedisPubSubProvider()
class ConnectionMixin(object):
def to_json(self, data):
if isinstance(data, dict):
return data
try:
data = json.loads(data.replace("'", '"'))
return data
except:
return json.dumps({'message': data})
def to_string(self, data):
if isinstance(data, dict):
return json.dumps(data).replace("'", '"')
return data
class SubscriberConnection(ConnectionMixin, SockJSConnection):
channels = []
def __init__(self, session):
super(SubscriberConnection, self).__init__(session)
def on_open(self, request):
self.pub_sub = pub_sub
def on_close(self):
self.pub_sub.close(self)
def on_message(self, data):
try:
data = self.to_json(data)
handler = route_handler.get_route_handler(data['route'])
handler(self).handle(data)
except Exception as e:
self.abort_connection()
raise e
def abort_connection(self):
self.close()
def send(self, message, binary=False):
super(SubscriberConnection, self).send(message, binary)
def broadcast(self, clients, message):
super(SubscriberConnection, self).broadcast(clients, message)
class DjangoSubscriberConnection(SubscriberConnection):
def __init__(self, session):
super(DjangoSubscriberConnection, self).__init__(session)
|
Include channel list in connection
|
Include channel list in connection
|
Python
|
bsd-3-clause
|
sahlinet/swampdragon,denizs/swampdragon,michael-k/swampdragon,seclinch/swampdragon,Manuel4131/swampdragon,aexeagmbh/swampdragon,Manuel4131/swampdragon,d9pouces/swampdragon,aexeagmbh/swampdragon,michael-k/swampdragon,d9pouces/swampdragon,boris-savic/swampdragon,boris-savic/swampdragon,jonashagstedt/swampdragon,jonashagstedt/swampdragon,faulkner/swampdragon,faulkner/swampdragon,aexeagmbh/swampdragon,Manuel4131/swampdragon,michael-k/swampdragon,d9pouces/swampdragon,denizs/swampdragon,sahlinet/swampdragon,bastianh/swampdragon,h-hirokawa/swampdragon,bastianh/swampdragon,seclinch/swampdragon,faulkner/swampdragon,bastianh/swampdragon,h-hirokawa/swampdragon,sahlinet/swampdragon,seclinch/swampdragon,jonashagstedt/swampdragon,denizs/swampdragon,boris-savic/swampdragon
|
from sockjs.tornado import SockJSConnection
from ..pubsub_providers.redis_pubsub_provider import RedisPubSubProvider
from .. import route_handler
import json
pub_sub = RedisPubSubProvider()
class ConnectionMixin(object):
def to_json(self, data):
if isinstance(data, dict):
return data
try:
data = json.loads(data.replace("'", '"'))
return data
except:
return json.dumps({'message': data})
def to_string(self, data):
if isinstance(data, dict):
return json.dumps(data).replace("'", '"')
return data
class SubscriberConnection(ConnectionMixin, SockJSConnection):
+ channels = []
+
def __init__(self, session):
super(SubscriberConnection, self).__init__(session)
def on_open(self, request):
self.pub_sub = pub_sub
def on_close(self):
self.pub_sub.close(self)
def on_message(self, data):
try:
data = self.to_json(data)
handler = route_handler.get_route_handler(data['route'])
handler(self).handle(data)
except Exception as e:
self.abort_connection()
raise e
def abort_connection(self):
self.close()
def send(self, message, binary=False):
super(SubscriberConnection, self).send(message, binary)
def broadcast(self, clients, message):
super(SubscriberConnection, self).broadcast(clients, message)
class DjangoSubscriberConnection(SubscriberConnection):
def __init__(self, session):
super(DjangoSubscriberConnection, self).__init__(session)
|
Include channel list in connection
|
## Code Before:
from sockjs.tornado import SockJSConnection
from ..pubsub_providers.redis_pubsub_provider import RedisPubSubProvider
from .. import route_handler
import json
pub_sub = RedisPubSubProvider()
class ConnectionMixin(object):
def to_json(self, data):
if isinstance(data, dict):
return data
try:
data = json.loads(data.replace("'", '"'))
return data
except:
return json.dumps({'message': data})
def to_string(self, data):
if isinstance(data, dict):
return json.dumps(data).replace("'", '"')
return data
class SubscriberConnection(ConnectionMixin, SockJSConnection):
def __init__(self, session):
super(SubscriberConnection, self).__init__(session)
def on_open(self, request):
self.pub_sub = pub_sub
def on_close(self):
self.pub_sub.close(self)
def on_message(self, data):
try:
data = self.to_json(data)
handler = route_handler.get_route_handler(data['route'])
handler(self).handle(data)
except Exception as e:
self.abort_connection()
raise e
def abort_connection(self):
self.close()
def send(self, message, binary=False):
super(SubscriberConnection, self).send(message, binary)
def broadcast(self, clients, message):
super(SubscriberConnection, self).broadcast(clients, message)
class DjangoSubscriberConnection(SubscriberConnection):
def __init__(self, session):
super(DjangoSubscriberConnection, self).__init__(session)
## Instruction:
Include channel list in connection
## Code After:
from sockjs.tornado import SockJSConnection
from ..pubsub_providers.redis_pubsub_provider import RedisPubSubProvider
from .. import route_handler
import json
pub_sub = RedisPubSubProvider()
class ConnectionMixin(object):
def to_json(self, data):
if isinstance(data, dict):
return data
try:
data = json.loads(data.replace("'", '"'))
return data
except:
return json.dumps({'message': data})
def to_string(self, data):
if isinstance(data, dict):
return json.dumps(data).replace("'", '"')
return data
class SubscriberConnection(ConnectionMixin, SockJSConnection):
channels = []
def __init__(self, session):
super(SubscriberConnection, self).__init__(session)
def on_open(self, request):
self.pub_sub = pub_sub
def on_close(self):
self.pub_sub.close(self)
def on_message(self, data):
try:
data = self.to_json(data)
handler = route_handler.get_route_handler(data['route'])
handler(self).handle(data)
except Exception as e:
self.abort_connection()
raise e
def abort_connection(self):
self.close()
def send(self, message, binary=False):
super(SubscriberConnection, self).send(message, binary)
def broadcast(self, clients, message):
super(SubscriberConnection, self).broadcast(clients, message)
class DjangoSubscriberConnection(SubscriberConnection):
def __init__(self, session):
super(DjangoSubscriberConnection, self).__init__(session)
|
from sockjs.tornado import SockJSConnection
from ..pubsub_providers.redis_pubsub_provider import RedisPubSubProvider
from .. import route_handler
import json
pub_sub = RedisPubSubProvider()
class ConnectionMixin(object):
def to_json(self, data):
if isinstance(data, dict):
return data
try:
data = json.loads(data.replace("'", '"'))
return data
except:
return json.dumps({'message': data})
def to_string(self, data):
if isinstance(data, dict):
return json.dumps(data).replace("'", '"')
return data
class SubscriberConnection(ConnectionMixin, SockJSConnection):
+ channels = []
+
def __init__(self, session):
super(SubscriberConnection, self).__init__(session)
def on_open(self, request):
self.pub_sub = pub_sub
def on_close(self):
self.pub_sub.close(self)
def on_message(self, data):
try:
data = self.to_json(data)
handler = route_handler.get_route_handler(data['route'])
handler(self).handle(data)
except Exception as e:
self.abort_connection()
raise e
def abort_connection(self):
self.close()
def send(self, message, binary=False):
super(SubscriberConnection, self).send(message, binary)
def broadcast(self, clients, message):
super(SubscriberConnection, self).broadcast(clients, message)
class DjangoSubscriberConnection(SubscriberConnection):
def __init__(self, session):
super(DjangoSubscriberConnection, self).__init__(session)
|
115bb7cf36bad5d38ac3b0be9a0bab7823c3b003
|
IATISimpleTester/lib/helpers.py
|
IATISimpleTester/lib/helpers.py
|
from collections import defaultdict
import re
from lxml import etree
from IATISimpleTester import app
# given an expression list and the name of an expression,
# select it,
def select_expression(expression_list, expression_name, default_expression_name=None):
expression_dicts = {x["id"]: x for x in expression_list}
if expression_name not in expression_dicts:
expression_name = default_expression_name
return expression_name, expression_dicts.get(expression_name)
def slugify(inp):
return inp.lower().replace(' ', '-')
def pprint(explanation):
explanation = explanation.strip()
explanation = re.sub(r'\*([^\*]+)\*', r'<strong>\1</strong>', explanation)
if len(explanation) > 0:
explanation = explanation[0].upper() + explanation[1:]
explanation = explanation.replace('\n', '<br>') + '.'
return re.sub(r'`([^`]*)`', r'<code>\1</code>', explanation)
|
from collections import defaultdict
import re
from lxml import etree
from IATISimpleTester import app
# given an expression list and the name of an expression,
# select it,
def select_expression(expression_list, expression_name, default_expression_name=None):
expression_dicts = {x["id"]: x for x in expression_list}
if expression_name not in expression_dicts:
expression_name = default_expression_name
return expression_name, expression_dicts.get(expression_name)
def slugify(inp):
return inp.lower().replace(' ', '-')
def pprint(explanation):
explanation = explanation.strip()
if len(explanation) > 0:
explanation = explanation[0].upper() + explanation[1:]
explanation = explanation.replace('\n', '<br>') + '.'
return re.sub(r'`([^`]*)`', r'<code>\1</code>', explanation)
|
Remove this emboldening thing again
|
Remove this emboldening thing again
|
Python
|
mit
|
pwyf/data-quality-tester,pwyf/data-quality-tester,pwyf/data-quality-tester,pwyf/data-quality-tester
|
from collections import defaultdict
import re
from lxml import etree
from IATISimpleTester import app
# given an expression list and the name of an expression,
# select it,
def select_expression(expression_list, expression_name, default_expression_name=None):
expression_dicts = {x["id"]: x for x in expression_list}
if expression_name not in expression_dicts:
expression_name = default_expression_name
return expression_name, expression_dicts.get(expression_name)
def slugify(inp):
return inp.lower().replace(' ', '-')
def pprint(explanation):
explanation = explanation.strip()
- explanation = re.sub(r'\*([^\*]+)\*', r'<strong>\1</strong>', explanation)
if len(explanation) > 0:
explanation = explanation[0].upper() + explanation[1:]
explanation = explanation.replace('\n', '<br>') + '.'
return re.sub(r'`([^`]*)`', r'<code>\1</code>', explanation)
|
Remove this emboldening thing again
|
## Code Before:
from collections import defaultdict
import re
from lxml import etree
from IATISimpleTester import app
# given an expression list and the name of an expression,
# select it,
def select_expression(expression_list, expression_name, default_expression_name=None):
expression_dicts = {x["id"]: x for x in expression_list}
if expression_name not in expression_dicts:
expression_name = default_expression_name
return expression_name, expression_dicts.get(expression_name)
def slugify(inp):
return inp.lower().replace(' ', '-')
def pprint(explanation):
explanation = explanation.strip()
explanation = re.sub(r'\*([^\*]+)\*', r'<strong>\1</strong>', explanation)
if len(explanation) > 0:
explanation = explanation[0].upper() + explanation[1:]
explanation = explanation.replace('\n', '<br>') + '.'
return re.sub(r'`([^`]*)`', r'<code>\1</code>', explanation)
## Instruction:
Remove this emboldening thing again
## Code After:
from collections import defaultdict
import re
from lxml import etree
from IATISimpleTester import app
# given an expression list and the name of an expression,
# select it,
def select_expression(expression_list, expression_name, default_expression_name=None):
expression_dicts = {x["id"]: x for x in expression_list}
if expression_name not in expression_dicts:
expression_name = default_expression_name
return expression_name, expression_dicts.get(expression_name)
def slugify(inp):
return inp.lower().replace(' ', '-')
def pprint(explanation):
explanation = explanation.strip()
if len(explanation) > 0:
explanation = explanation[0].upper() + explanation[1:]
explanation = explanation.replace('\n', '<br>') + '.'
return re.sub(r'`([^`]*)`', r'<code>\1</code>', explanation)
|
from collections import defaultdict
import re
from lxml import etree
from IATISimpleTester import app
# given an expression list and the name of an expression,
# select it,
def select_expression(expression_list, expression_name, default_expression_name=None):
expression_dicts = {x["id"]: x for x in expression_list}
if expression_name not in expression_dicts:
expression_name = default_expression_name
return expression_name, expression_dicts.get(expression_name)
def slugify(inp):
return inp.lower().replace(' ', '-')
def pprint(explanation):
explanation = explanation.strip()
- explanation = re.sub(r'\*([^\*]+)\*', r'<strong>\1</strong>', explanation)
if len(explanation) > 0:
explanation = explanation[0].upper() + explanation[1:]
explanation = explanation.replace('\n', '<br>') + '.'
return re.sub(r'`([^`]*)`', r'<code>\1</code>', explanation)
|
834b7ff81d6e2777d3952bb588a53f12f5ace5f5
|
setup.py
|
setup.py
|
from distutils.core import setup
# If we did a straight `import regobj` here we wouldn't be able
# to build on non-win32 machines.
regobj = {}
try:
execfile("regobj.py",regobj)
except ImportError:
pass
VERSION = regobj["__version__"]
NAME = "regobj"
DESCRIPTION = "Pythonic object-based access to the Windows Registry."
LONG_DESC = regobj["__doc__"]
AUTHOR = "Ryan Kelly"
AUTHOR_EMAIL = "[email protected]"
URL="https://github.com/rfk/regobj"
LICENSE = "MIT"
KEYWORDS = "windows registry"
setup(name=NAME,
version=VERSION,
author=AUTHOR,
author_email=AUTHOR_EMAIL,
url=URL,
description=DESCRIPTION,
long_description=LONG_DESC,
license=LICENSE,
keywords=KEYWORDS,
py_modules=["regobj"],
)
|
from distutils.core import setup
# If we did a straight `import regobj` here we wouldn't be able
# to build on non-win32 machines.
regobj = {}
try:
execfile("regobj.py",regobj)
except ImportError:
pass
VERSION = regobj["__version__"]
NAME = "regobj"
DESCRIPTION = "Pythonic object-based access to the Windows Registry."
LONG_DESC = regobj["__doc__"]
AUTHOR = "Ryan Kelly"
AUTHOR_EMAIL = "[email protected]"
URL="https://github.com/rfk/regobj"
LICENSE = "MIT"
KEYWORDS = "windows registry"
setup(name=NAME,
version=VERSION,
author=AUTHOR,
author_email=AUTHOR_EMAIL,
url=URL,
description=DESCRIPTION,
long_description=LONG_DESC,
license=LICENSE,
keywords=KEYWORDS,
py_modules=["regobj"],
classifiers=[c.strip() for c in """
Intended Audience :: Developers
License :: OSI Approved :: MIT License
Programming Language :: Python :: 2
Programming Language :: Python :: 3
Topic :: Software Development :: Libraries :: Python Modules
""".split('\n') if c.strip()],
)
|
Add a Python 3 classifier recommended by community
|
Add a Python 3 classifier recommended by community
|
Python
|
mit
|
rfk/regobj
|
from distutils.core import setup
# If we did a straight `import regobj` here we wouldn't be able
# to build on non-win32 machines.
regobj = {}
try:
execfile("regobj.py",regobj)
except ImportError:
pass
VERSION = regobj["__version__"]
NAME = "regobj"
DESCRIPTION = "Pythonic object-based access to the Windows Registry."
LONG_DESC = regobj["__doc__"]
AUTHOR = "Ryan Kelly"
AUTHOR_EMAIL = "[email protected]"
URL="https://github.com/rfk/regobj"
LICENSE = "MIT"
KEYWORDS = "windows registry"
setup(name=NAME,
version=VERSION,
author=AUTHOR,
author_email=AUTHOR_EMAIL,
url=URL,
description=DESCRIPTION,
long_description=LONG_DESC,
license=LICENSE,
keywords=KEYWORDS,
py_modules=["regobj"],
+ classifiers=[c.strip() for c in """
+ Intended Audience :: Developers
+ License :: OSI Approved :: MIT License
+ Programming Language :: Python :: 2
+ Programming Language :: Python :: 3
+ Topic :: Software Development :: Libraries :: Python Modules
+ """.split('\n') if c.strip()],
)
|
Add a Python 3 classifier recommended by community
|
## Code Before:
from distutils.core import setup
# If we did a straight `import regobj` here we wouldn't be able
# to build on non-win32 machines.
regobj = {}
try:
execfile("regobj.py",regobj)
except ImportError:
pass
VERSION = regobj["__version__"]
NAME = "regobj"
DESCRIPTION = "Pythonic object-based access to the Windows Registry."
LONG_DESC = regobj["__doc__"]
AUTHOR = "Ryan Kelly"
AUTHOR_EMAIL = "[email protected]"
URL="https://github.com/rfk/regobj"
LICENSE = "MIT"
KEYWORDS = "windows registry"
setup(name=NAME,
version=VERSION,
author=AUTHOR,
author_email=AUTHOR_EMAIL,
url=URL,
description=DESCRIPTION,
long_description=LONG_DESC,
license=LICENSE,
keywords=KEYWORDS,
py_modules=["regobj"],
)
## Instruction:
Add a Python 3 classifier recommended by community
## Code After:
from distutils.core import setup
# If we did a straight `import regobj` here we wouldn't be able
# to build on non-win32 machines.
regobj = {}
try:
execfile("regobj.py",regobj)
except ImportError:
pass
VERSION = regobj["__version__"]
NAME = "regobj"
DESCRIPTION = "Pythonic object-based access to the Windows Registry."
LONG_DESC = regobj["__doc__"]
AUTHOR = "Ryan Kelly"
AUTHOR_EMAIL = "[email protected]"
URL="https://github.com/rfk/regobj"
LICENSE = "MIT"
KEYWORDS = "windows registry"
setup(name=NAME,
version=VERSION,
author=AUTHOR,
author_email=AUTHOR_EMAIL,
url=URL,
description=DESCRIPTION,
long_description=LONG_DESC,
license=LICENSE,
keywords=KEYWORDS,
py_modules=["regobj"],
classifiers=[c.strip() for c in """
Intended Audience :: Developers
License :: OSI Approved :: MIT License
Programming Language :: Python :: 2
Programming Language :: Python :: 3
Topic :: Software Development :: Libraries :: Python Modules
""".split('\n') if c.strip()],
)
|
from distutils.core import setup
# If we did a straight `import regobj` here we wouldn't be able
# to build on non-win32 machines.
regobj = {}
try:
execfile("regobj.py",regobj)
except ImportError:
pass
VERSION = regobj["__version__"]
NAME = "regobj"
DESCRIPTION = "Pythonic object-based access to the Windows Registry."
LONG_DESC = regobj["__doc__"]
AUTHOR = "Ryan Kelly"
AUTHOR_EMAIL = "[email protected]"
URL="https://github.com/rfk/regobj"
LICENSE = "MIT"
KEYWORDS = "windows registry"
setup(name=NAME,
version=VERSION,
author=AUTHOR,
author_email=AUTHOR_EMAIL,
url=URL,
description=DESCRIPTION,
long_description=LONG_DESC,
license=LICENSE,
keywords=KEYWORDS,
py_modules=["regobj"],
+ classifiers=[c.strip() for c in """
+ Intended Audience :: Developers
+ License :: OSI Approved :: MIT License
+ Programming Language :: Python :: 2
+ Programming Language :: Python :: 3
+ Topic :: Software Development :: Libraries :: Python Modules
+ """.split('\n') if c.strip()],
)
|
7599f60a0e64f1d1d076695af67a212be751a89b
|
tests/rules_tests/grammarManipulation_tests/InactiveRulesTest.py
|
tests/rules_tests/grammarManipulation_tests/InactiveRulesTest.py
|
from unittest import main, TestCase
from grammpy import Grammar, Nonterminal, Rule as _R
from ..grammar import *
class InactiveRulesTest(TestCase):
def __init__(self, *args):
super().__init__(*args)
self.g = Grammar()
def setUp(self):
g = Grammar()
g.add_term([0, 1, 2, 'a', 'b', 'c'])
g.add_nonterm([NFirst, NSecond, NThird, NFourth])
self.g = g
if __name__ == '__main__':
main()
|
from unittest import main, TestCase
from grammpy import Grammar, Nonterminal, Rule as _R
from ..grammar import *
class InactiveRulesTest(TestCase):
def __init__(self, *args):
super().__init__(*args)
self.g = Grammar()
def setUp(self):
g = Grammar()
g.add_term([0, 1, 2, 'a', 'b', 'c'])
g.add_nonterm([NFirst, NSecond, NThird, NFourth])
self.g = g
def test_countWithInactive(self):
class Tmp1(_R):
rule = ([NFirst], ['a', 0])
_active = False
self.g.add_rule(Tmp1)
self.assertEqual(self.g.rules_count(), 0)
self.assertTrue(self.g.have_rule(Tmp1))
self.assertNotIn(Tmp1, self.g.rules())
class Tmp2(_R):
rule = ([NSecond], ['a', 0, NFourth])
self.g.add_rule(Tmp2)
self.assertEqual(self.g.rules_count(), 1)
self.assertTrue(self.g.have_rule(Tmp1))
self.assertNotIn(Tmp1, self.g.rules())
self.assertIn(Tmp2, self.g.rules())
if __name__ == '__main__':
main()
|
Add test when rule with inactive is passed
|
Add test when rule with inactive is passed
|
Python
|
mit
|
PatrikValkovic/grammpy
|
from unittest import main, TestCase
from grammpy import Grammar, Nonterminal, Rule as _R
from ..grammar import *
class InactiveRulesTest(TestCase):
def __init__(self, *args):
super().__init__(*args)
self.g = Grammar()
def setUp(self):
g = Grammar()
g.add_term([0, 1, 2, 'a', 'b', 'c'])
g.add_nonterm([NFirst, NSecond, NThird, NFourth])
self.g = g
+ def test_countWithInactive(self):
+ class Tmp1(_R):
+ rule = ([NFirst], ['a', 0])
+ _active = False
+ self.g.add_rule(Tmp1)
+ self.assertEqual(self.g.rules_count(), 0)
+ self.assertTrue(self.g.have_rule(Tmp1))
+ self.assertNotIn(Tmp1, self.g.rules())
+ class Tmp2(_R):
+ rule = ([NSecond], ['a', 0, NFourth])
+ self.g.add_rule(Tmp2)
+ self.assertEqual(self.g.rules_count(), 1)
+ self.assertTrue(self.g.have_rule(Tmp1))
+ self.assertNotIn(Tmp1, self.g.rules())
+ self.assertIn(Tmp2, self.g.rules())
+
if __name__ == '__main__':
main()
|
Add test when rule with inactive is passed
|
## Code Before:
from unittest import main, TestCase
from grammpy import Grammar, Nonterminal, Rule as _R
from ..grammar import *
class InactiveRulesTest(TestCase):
def __init__(self, *args):
super().__init__(*args)
self.g = Grammar()
def setUp(self):
g = Grammar()
g.add_term([0, 1, 2, 'a', 'b', 'c'])
g.add_nonterm([NFirst, NSecond, NThird, NFourth])
self.g = g
if __name__ == '__main__':
main()
## Instruction:
Add test when rule with inactive is passed
## Code After:
from unittest import main, TestCase
from grammpy import Grammar, Nonterminal, Rule as _R
from ..grammar import *
class InactiveRulesTest(TestCase):
def __init__(self, *args):
super().__init__(*args)
self.g = Grammar()
def setUp(self):
g = Grammar()
g.add_term([0, 1, 2, 'a', 'b', 'c'])
g.add_nonterm([NFirst, NSecond, NThird, NFourth])
self.g = g
def test_countWithInactive(self):
class Tmp1(_R):
rule = ([NFirst], ['a', 0])
_active = False
self.g.add_rule(Tmp1)
self.assertEqual(self.g.rules_count(), 0)
self.assertTrue(self.g.have_rule(Tmp1))
self.assertNotIn(Tmp1, self.g.rules())
class Tmp2(_R):
rule = ([NSecond], ['a', 0, NFourth])
self.g.add_rule(Tmp2)
self.assertEqual(self.g.rules_count(), 1)
self.assertTrue(self.g.have_rule(Tmp1))
self.assertNotIn(Tmp1, self.g.rules())
self.assertIn(Tmp2, self.g.rules())
if __name__ == '__main__':
main()
|
from unittest import main, TestCase
from grammpy import Grammar, Nonterminal, Rule as _R
from ..grammar import *
class InactiveRulesTest(TestCase):
def __init__(self, *args):
super().__init__(*args)
self.g = Grammar()
def setUp(self):
g = Grammar()
g.add_term([0, 1, 2, 'a', 'b', 'c'])
g.add_nonterm([NFirst, NSecond, NThird, NFourth])
self.g = g
+ def test_countWithInactive(self):
+ class Tmp1(_R):
+ rule = ([NFirst], ['a', 0])
+ _active = False
+ self.g.add_rule(Tmp1)
+ self.assertEqual(self.g.rules_count(), 0)
+ self.assertTrue(self.g.have_rule(Tmp1))
+ self.assertNotIn(Tmp1, self.g.rules())
+ class Tmp2(_R):
+ rule = ([NSecond], ['a', 0, NFourth])
+ self.g.add_rule(Tmp2)
+ self.assertEqual(self.g.rules_count(), 1)
+ self.assertTrue(self.g.have_rule(Tmp1))
+ self.assertNotIn(Tmp1, self.g.rules())
+ self.assertIn(Tmp2, self.g.rules())
+
if __name__ == '__main__':
main()
|
afac07ce173af3e7db4a6ba6dab4786903e217b7
|
ocradmin/ocr/tools/plugins/cuneiform_wrapper.py
|
ocradmin/ocr/tools/plugins/cuneiform_wrapper.py
|
from generic_wrapper import *
def main_class():
return CuneiformWrapper
class CuneiformWrapper(GenericWrapper):
"""
Override certain methods of the OcropusWrapper to
use Cuneiform for recognition of individual lines.
"""
name = "cuneiform"
capabilities = ("line", "page")
binary = get_binary("cuneiform")
def get_command(self, outfile, image):
"""
Cuneiform command line. Simplified for now.
"""
return [self.binary, "-o", outfile, image]
|
import tempfile
import subprocess as sp
from ocradmin.ocr.tools import check_aborted, set_progress
from ocradmin.ocr.utils import HocrParser
from generic_wrapper import *
def main_class():
return CuneiformWrapper
class CuneiformWrapper(GenericWrapper):
"""
Override certain methods of the OcropusWrapper to
use Cuneiform for recognition of individual lines.
"""
name = "cuneiform"
capabilities = ("line", "page")
binary = get_binary("cuneiform")
def get_command(self, outfile, image):
"""
Cuneiform command line. Simplified for now.
"""
return [self.binary, "-o", outfile, image]
def convert(self, filepath, *args, **kwargs):
"""
Convert a full page.
"""
json = None
with tempfile.NamedTemporaryFile(delete=False) as tmp:
tmp.close()
args = [self.binary, "-f", "hocr", "-o", tmp.name, filepath]
self.logger.info(args)
proc = sp.Popen(args, stderr=sp.PIPE)
err = proc.stderr.read()
if proc.wait() != 0:
return "!!! %s CONVERSION ERROR %d: %s !!!" % (
os.path.basename(self.binary).upper(),
proc.returncode, err)
json = HocrParser().parsefile(tmp.name)
self.logger.info("%s" % json)
os.unlink(tmp.name)
set_progress(self.logger, kwargs["progress_func"], 100, 100)
return json
|
Allow Cuneiform to do full page conversions. Downsides: 1) it crashes on quite a lot of pages 2) there's no progress output
|
Allow Cuneiform to do full page conversions. Downsides: 1) it crashes on quite a lot of pages 2) there's no progress output
|
Python
|
apache-2.0
|
vitorio/ocropodium,vitorio/ocropodium,vitorio/ocropodium,vitorio/ocropodium
|
+ import tempfile
+ import subprocess as sp
+ from ocradmin.ocr.tools import check_aborted, set_progress
+ from ocradmin.ocr.utils import HocrParser
+ from generic_wrapper import *
- from generic_wrapper import *
def main_class():
return CuneiformWrapper
class CuneiformWrapper(GenericWrapper):
"""
Override certain methods of the OcropusWrapper to
use Cuneiform for recognition of individual lines.
"""
name = "cuneiform"
capabilities = ("line", "page")
binary = get_binary("cuneiform")
def get_command(self, outfile, image):
"""
Cuneiform command line. Simplified for now.
"""
return [self.binary, "-o", outfile, image]
+ def convert(self, filepath, *args, **kwargs):
+ """
+ Convert a full page.
+ """
+ json = None
+ with tempfile.NamedTemporaryFile(delete=False) as tmp:
+ tmp.close()
+ args = [self.binary, "-f", "hocr", "-o", tmp.name, filepath]
+ self.logger.info(args)
+ proc = sp.Popen(args, stderr=sp.PIPE)
+ err = proc.stderr.read()
+ if proc.wait() != 0:
+ return "!!! %s CONVERSION ERROR %d: %s !!!" % (
+ os.path.basename(self.binary).upper(),
+ proc.returncode, err)
+ json = HocrParser().parsefile(tmp.name)
+ self.logger.info("%s" % json)
+ os.unlink(tmp.name)
+ set_progress(self.logger, kwargs["progress_func"], 100, 100)
+ return json
|
Allow Cuneiform to do full page conversions. Downsides: 1) it crashes on quite a lot of pages 2) there's no progress output
|
## Code Before:
from generic_wrapper import *
def main_class():
return CuneiformWrapper
class CuneiformWrapper(GenericWrapper):
"""
Override certain methods of the OcropusWrapper to
use Cuneiform for recognition of individual lines.
"""
name = "cuneiform"
capabilities = ("line", "page")
binary = get_binary("cuneiform")
def get_command(self, outfile, image):
"""
Cuneiform command line. Simplified for now.
"""
return [self.binary, "-o", outfile, image]
## Instruction:
Allow Cuneiform to do full page conversions. Downsides: 1) it crashes on quite a lot of pages 2) there's no progress output
## Code After:
import tempfile
import subprocess as sp
from ocradmin.ocr.tools import check_aborted, set_progress
from ocradmin.ocr.utils import HocrParser
from generic_wrapper import *
def main_class():
return CuneiformWrapper
class CuneiformWrapper(GenericWrapper):
"""
Override certain methods of the OcropusWrapper to
use Cuneiform for recognition of individual lines.
"""
name = "cuneiform"
capabilities = ("line", "page")
binary = get_binary("cuneiform")
def get_command(self, outfile, image):
"""
Cuneiform command line. Simplified for now.
"""
return [self.binary, "-o", outfile, image]
def convert(self, filepath, *args, **kwargs):
"""
Convert a full page.
"""
json = None
with tempfile.NamedTemporaryFile(delete=False) as tmp:
tmp.close()
args = [self.binary, "-f", "hocr", "-o", tmp.name, filepath]
self.logger.info(args)
proc = sp.Popen(args, stderr=sp.PIPE)
err = proc.stderr.read()
if proc.wait() != 0:
return "!!! %s CONVERSION ERROR %d: %s !!!" % (
os.path.basename(self.binary).upper(),
proc.returncode, err)
json = HocrParser().parsefile(tmp.name)
self.logger.info("%s" % json)
os.unlink(tmp.name)
set_progress(self.logger, kwargs["progress_func"], 100, 100)
return json
|
+ import tempfile
+ import subprocess as sp
+ from ocradmin.ocr.tools import check_aborted, set_progress
+ from ocradmin.ocr.utils import HocrParser
+ from generic_wrapper import *
- from generic_wrapper import *
def main_class():
return CuneiformWrapper
class CuneiformWrapper(GenericWrapper):
"""
Override certain methods of the OcropusWrapper to
use Cuneiform for recognition of individual lines.
"""
name = "cuneiform"
capabilities = ("line", "page")
binary = get_binary("cuneiform")
def get_command(self, outfile, image):
"""
Cuneiform command line. Simplified for now.
"""
return [self.binary, "-o", outfile, image]
+ def convert(self, filepath, *args, **kwargs):
+ """
+ Convert a full page.
+ """
+ json = None
+ with tempfile.NamedTemporaryFile(delete=False) as tmp:
+ tmp.close()
+ args = [self.binary, "-f", "hocr", "-o", tmp.name, filepath]
+ self.logger.info(args)
+ proc = sp.Popen(args, stderr=sp.PIPE)
+ err = proc.stderr.read()
+ if proc.wait() != 0:
+ return "!!! %s CONVERSION ERROR %d: %s !!!" % (
+ os.path.basename(self.binary).upper(),
+ proc.returncode, err)
+ json = HocrParser().parsefile(tmp.name)
+ self.logger.info("%s" % json)
+ os.unlink(tmp.name)
+ set_progress(self.logger, kwargs["progress_func"], 100, 100)
+ return json
|
fdaabeaa3694103153c81a18971e6b55597cd66e
|
Sketches/JT/Jam/library/trunk/Kamaelia/Apps/Jam/Audio/Synth.py
|
Sketches/JT/Jam/library/trunk/Kamaelia/Apps/Jam/Audio/Synth.py
|
import Axon
from Kamaelia.Apps.Jam.Audio.Polyphony import Polyphoniser
from Kamaelia.Apps.Jam.Audio.Mixer import MonoMixer
class Synth(Axon.Component.component):
polyphony = 8
polyphoniser = Polyphoniser
def __init__(self, voiceGenerator, **argd):
super(Synth, self).__init__(**argd)
polyphoniser = self.polyphoniser(**argd).activate()
mixer = MonoMixer(channels=self.polyphony, **argd).activate()
self.link((self, "inbox"), (polyphoniser, "inbox"), passthrough=1)
self.link((mixer, "outbox"), (self, "outbox"), passthrough=2)
for index, voice in enumerate(voiceGenerator()):
voice = voice.activate()
self.link((polyphoniser, "voice%i" % index), (voice, "inbox"))
self.link((voice, "outbox"), (mixer, "in%i" % index))
def main(self):
while 1:
if not self.anyReady():
self.pause()
yield 1
|
import Axon
from Kamaelia.Apps.Jam.Audio.Polyphony import Polyphoniser
class Synth(Axon.Component.component):
polyphony = 8
polyphoniser = Polyphoniser
def __init__(self, voiceGenerator, **argd):
super(Synth, self).__init__(**argd)
polyphoniser = self.polyphoniser(**argd).activate()
self.link((self, "inbox"), (polyphoniser, "inbox"), passthrough=1)
for index, voice in enumerate(voiceGenerator()):
voice = voice.activate()
self.link((polyphoniser, "voice%i" % index), (voice, "inbox"))
def main(self):
while 1:
if not self.anyReady():
self.pause()
yield 1
|
Remove mixer section from synth code to reflect the components directly calling pygame mixer methods.
|
Remove mixer section from synth code to reflect the components directly calling pygame mixer methods.
|
Python
|
apache-2.0
|
sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia
|
import Axon
from Kamaelia.Apps.Jam.Audio.Polyphony import Polyphoniser
- from Kamaelia.Apps.Jam.Audio.Mixer import MonoMixer
class Synth(Axon.Component.component):
polyphony = 8
polyphoniser = Polyphoniser
def __init__(self, voiceGenerator, **argd):
super(Synth, self).__init__(**argd)
polyphoniser = self.polyphoniser(**argd).activate()
- mixer = MonoMixer(channels=self.polyphony, **argd).activate()
self.link((self, "inbox"), (polyphoniser, "inbox"), passthrough=1)
- self.link((mixer, "outbox"), (self, "outbox"), passthrough=2)
for index, voice in enumerate(voiceGenerator()):
voice = voice.activate()
self.link((polyphoniser, "voice%i" % index), (voice, "inbox"))
- self.link((voice, "outbox"), (mixer, "in%i" % index))
def main(self):
while 1:
if not self.anyReady():
self.pause()
yield 1
|
Remove mixer section from synth code to reflect the components directly calling pygame mixer methods.
|
## Code Before:
import Axon
from Kamaelia.Apps.Jam.Audio.Polyphony import Polyphoniser
from Kamaelia.Apps.Jam.Audio.Mixer import MonoMixer
class Synth(Axon.Component.component):
polyphony = 8
polyphoniser = Polyphoniser
def __init__(self, voiceGenerator, **argd):
super(Synth, self).__init__(**argd)
polyphoniser = self.polyphoniser(**argd).activate()
mixer = MonoMixer(channels=self.polyphony, **argd).activate()
self.link((self, "inbox"), (polyphoniser, "inbox"), passthrough=1)
self.link((mixer, "outbox"), (self, "outbox"), passthrough=2)
for index, voice in enumerate(voiceGenerator()):
voice = voice.activate()
self.link((polyphoniser, "voice%i" % index), (voice, "inbox"))
self.link((voice, "outbox"), (mixer, "in%i" % index))
def main(self):
while 1:
if not self.anyReady():
self.pause()
yield 1
## Instruction:
Remove mixer section from synth code to reflect the components directly calling pygame mixer methods.
## Code After:
import Axon
from Kamaelia.Apps.Jam.Audio.Polyphony import Polyphoniser
class Synth(Axon.Component.component):
polyphony = 8
polyphoniser = Polyphoniser
def __init__(self, voiceGenerator, **argd):
super(Synth, self).__init__(**argd)
polyphoniser = self.polyphoniser(**argd).activate()
self.link((self, "inbox"), (polyphoniser, "inbox"), passthrough=1)
for index, voice in enumerate(voiceGenerator()):
voice = voice.activate()
self.link((polyphoniser, "voice%i" % index), (voice, "inbox"))
def main(self):
while 1:
if not self.anyReady():
self.pause()
yield 1
|
import Axon
from Kamaelia.Apps.Jam.Audio.Polyphony import Polyphoniser
- from Kamaelia.Apps.Jam.Audio.Mixer import MonoMixer
class Synth(Axon.Component.component):
polyphony = 8
polyphoniser = Polyphoniser
def __init__(self, voiceGenerator, **argd):
super(Synth, self).__init__(**argd)
polyphoniser = self.polyphoniser(**argd).activate()
- mixer = MonoMixer(channels=self.polyphony, **argd).activate()
self.link((self, "inbox"), (polyphoniser, "inbox"), passthrough=1)
- self.link((mixer, "outbox"), (self, "outbox"), passthrough=2)
for index, voice in enumerate(voiceGenerator()):
voice = voice.activate()
self.link((polyphoniser, "voice%i" % index), (voice, "inbox"))
- self.link((voice, "outbox"), (mixer, "in%i" % index))
def main(self):
while 1:
if not self.anyReady():
self.pause()
yield 1
|
dcc2821cac0619fc2ca5f486ad30416f3c3cfda9
|
ce/expr/parser.py
|
ce/expr/parser.py
|
from ..semantics import mpq
from .common import OPERATORS, ADD_OP, MULTIPLY_OP
def try_to_number(s):
try:
return mpq(s)
except (ValueError, TypeError):
return s
def _parse_r(s):
s = s.strip()
bracket_level = 0
operator_pos = -1
for i, v in enumerate(s):
if v == '(':
bracket_level += 1
if v == ')':
bracket_level -= 1
if bracket_level == 1 and v in OPERATORS:
operator_pos = i
break
if operator_pos == -1:
return s
a1 = _parse_r(s[1:operator_pos].strip())
a2 = _parse_r(s[operator_pos + 1:-1].strip())
return Expr(s[operator_pos], a1, a2)
|
import ast
from ..semantics import mpq
from .common import OPERATORS, ADD_OP, MULTIPLY_OP
def try_to_number(s):
try:
return mpq(s)
except (ValueError, TypeError):
return s
OPERATOR_MAP = {
ast.Add: ADD_OP,
ast.Mult: MULTIPLY_OP,
}
def parse(s):
from .biop import Expr
def _parse_r(t):
try:
return t.n
except AttributeError:
pass
try:
return t.id
except AttributeError:
op = OPERATOR_MAP[t.op.__class__]
a1 = _parse_r(t.left)
a2 = _parse_r(t.right)
return Expr(op, a1, a2)
return _parse_r(ast.parse(s, mode='eval').body)
|
Replace parsing with Python's ast
|
Replace parsing with Python's ast
Allows greater flexibility and syntax checks
|
Python
|
mit
|
admk/soap
|
+
+ import ast
from ..semantics import mpq
from .common import OPERATORS, ADD_OP, MULTIPLY_OP
def try_to_number(s):
try:
return mpq(s)
except (ValueError, TypeError):
return s
+ OPERATOR_MAP = {
+ ast.Add: ADD_OP,
+ ast.Mult: MULTIPLY_OP,
+ }
- def _parse_r(s):
- s = s.strip()
- bracket_level = 0
- operator_pos = -1
- for i, v in enumerate(s):
- if v == '(':
- bracket_level += 1
- if v == ')':
- bracket_level -= 1
- if bracket_level == 1 and v in OPERATORS:
- operator_pos = i
- break
- if operator_pos == -1:
- return s
- a1 = _parse_r(s[1:operator_pos].strip())
- a2 = _parse_r(s[operator_pos + 1:-1].strip())
- return Expr(s[operator_pos], a1, a2)
+
+ def parse(s):
+ from .biop import Expr
+ def _parse_r(t):
+ try:
+ return t.n
+ except AttributeError:
+ pass
+ try:
+ return t.id
+ except AttributeError:
+ op = OPERATOR_MAP[t.op.__class__]
+ a1 = _parse_r(t.left)
+ a2 = _parse_r(t.right)
+ return Expr(op, a1, a2)
+ return _parse_r(ast.parse(s, mode='eval').body)
+
|
Replace parsing with Python's ast
|
## Code Before:
from ..semantics import mpq
from .common import OPERATORS, ADD_OP, MULTIPLY_OP
def try_to_number(s):
try:
return mpq(s)
except (ValueError, TypeError):
return s
def _parse_r(s):
s = s.strip()
bracket_level = 0
operator_pos = -1
for i, v in enumerate(s):
if v == '(':
bracket_level += 1
if v == ')':
bracket_level -= 1
if bracket_level == 1 and v in OPERATORS:
operator_pos = i
break
if operator_pos == -1:
return s
a1 = _parse_r(s[1:operator_pos].strip())
a2 = _parse_r(s[operator_pos + 1:-1].strip())
return Expr(s[operator_pos], a1, a2)
## Instruction:
Replace parsing with Python's ast
## Code After:
import ast
from ..semantics import mpq
from .common import OPERATORS, ADD_OP, MULTIPLY_OP
def try_to_number(s):
try:
return mpq(s)
except (ValueError, TypeError):
return s
OPERATOR_MAP = {
ast.Add: ADD_OP,
ast.Mult: MULTIPLY_OP,
}
def parse(s):
from .biop import Expr
def _parse_r(t):
try:
return t.n
except AttributeError:
pass
try:
return t.id
except AttributeError:
op = OPERATOR_MAP[t.op.__class__]
a1 = _parse_r(t.left)
a2 = _parse_r(t.right)
return Expr(op, a1, a2)
return _parse_r(ast.parse(s, mode='eval').body)
|
+
+ import ast
from ..semantics import mpq
from .common import OPERATORS, ADD_OP, MULTIPLY_OP
def try_to_number(s):
try:
return mpq(s)
except (ValueError, TypeError):
return s
+ OPERATOR_MAP = {
+ ast.Add: ADD_OP,
+ ast.Mult: MULTIPLY_OP,
+ }
+
+
- def _parse_r(s):
? - --
+ def parse(s):
+ from .biop import Expr
+ def _parse_r(t):
+ try:
- s = s.strip()
- bracket_level = 0
- operator_pos = -1
- for i, v in enumerate(s):
- if v == '(':
- bracket_level += 1
- if v == ')':
- bracket_level -= 1
- if bracket_level == 1 and v in OPERATORS:
- operator_pos = i
- break
- if operator_pos == -1:
- return s
? ^
+ return t.n
? ++++ ^^^
- a1 = _parse_r(s[1:operator_pos].strip())
- a2 = _parse_r(s[operator_pos + 1:-1].strip())
- return Expr(s[operator_pos], a1, a2)
+ except AttributeError:
+ pass
+ try:
+ return t.id
+ except AttributeError:
+ op = OPERATOR_MAP[t.op.__class__]
+ a1 = _parse_r(t.left)
+ a2 = _parse_r(t.right)
+ return Expr(op, a1, a2)
+ return _parse_r(ast.parse(s, mode='eval').body)
|
cc44afdca3ebcdaeed3555f161d3e0a1992c19eb
|
planet/api/__init__.py
|
planet/api/__init__.py
|
from .exceptions import (APIException, BadQuery, InvalidAPIKey)
from .exceptions import (NoPermission, MissingResource, OverQuota)
from .exceptions import (ServerError, RequestCancelled, TooManyRequests)
from .client import (ClientV1)
from .utils import write_to_file
from . import filters
__all__ = [
ClientV1, APIException, BadQuery, InvalidAPIKey,
NoPermission, MissingResource, OverQuota, ServerError, RequestCancelled,
TooManyRequests,
write_to_file,
filters
]
|
from .exceptions import (APIException, BadQuery, InvalidAPIKey)
from .exceptions import (NoPermission, MissingResource, OverQuota)
from .exceptions import (ServerError, RequestCancelled, TooManyRequests)
from .client import (ClientV1)
from .utils import write_to_file
from . import filters
from .__version__ import __version__ # NOQA
__all__ = [
ClientV1, APIException, BadQuery, InvalidAPIKey,
NoPermission, MissingResource, OverQuota, ServerError, RequestCancelled,
TooManyRequests,
write_to_file,
filters
]
|
Put api.__version__ back in after version shuffle
|
Put api.__version__ back in after version shuffle
|
Python
|
apache-2.0
|
planetlabs/planet-client-python,planetlabs/planet-client-python
|
from .exceptions import (APIException, BadQuery, InvalidAPIKey)
from .exceptions import (NoPermission, MissingResource, OverQuota)
from .exceptions import (ServerError, RequestCancelled, TooManyRequests)
from .client import (ClientV1)
from .utils import write_to_file
from . import filters
+ from .__version__ import __version__ # NOQA
__all__ = [
ClientV1, APIException, BadQuery, InvalidAPIKey,
NoPermission, MissingResource, OverQuota, ServerError, RequestCancelled,
TooManyRequests,
write_to_file,
filters
]
|
Put api.__version__ back in after version shuffle
|
## Code Before:
from .exceptions import (APIException, BadQuery, InvalidAPIKey)
from .exceptions import (NoPermission, MissingResource, OverQuota)
from .exceptions import (ServerError, RequestCancelled, TooManyRequests)
from .client import (ClientV1)
from .utils import write_to_file
from . import filters
__all__ = [
ClientV1, APIException, BadQuery, InvalidAPIKey,
NoPermission, MissingResource, OverQuota, ServerError, RequestCancelled,
TooManyRequests,
write_to_file,
filters
]
## Instruction:
Put api.__version__ back in after version shuffle
## Code After:
from .exceptions import (APIException, BadQuery, InvalidAPIKey)
from .exceptions import (NoPermission, MissingResource, OverQuota)
from .exceptions import (ServerError, RequestCancelled, TooManyRequests)
from .client import (ClientV1)
from .utils import write_to_file
from . import filters
from .__version__ import __version__ # NOQA
__all__ = [
ClientV1, APIException, BadQuery, InvalidAPIKey,
NoPermission, MissingResource, OverQuota, ServerError, RequestCancelled,
TooManyRequests,
write_to_file,
filters
]
|
from .exceptions import (APIException, BadQuery, InvalidAPIKey)
from .exceptions import (NoPermission, MissingResource, OverQuota)
from .exceptions import (ServerError, RequestCancelled, TooManyRequests)
from .client import (ClientV1)
from .utils import write_to_file
from . import filters
+ from .__version__ import __version__ # NOQA
__all__ = [
ClientV1, APIException, BadQuery, InvalidAPIKey,
NoPermission, MissingResource, OverQuota, ServerError, RequestCancelled,
TooManyRequests,
write_to_file,
filters
]
|
cb6d0ea6c05eb62fafe97ac13d5665cb00b2db3c
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(name='spherical_functions',
version='1.0',
description='Python/numba implementation of Wigner D Matrices, spin-weighted spherical harmonics, and associated functions',
author='Michael Boyle',
# author_email='',
url='https://github.com/moble/spherical_functions',
package_dir={'spherical_functions': ''},
packages=['spherical_functions',],
)
|
from distutils.core import setup
setup(name='spherical_functions',
version='1.0',
description='Python/numba implementation of Wigner D Matrices, spin-weighted spherical harmonics, and associated functions',
author='Michael Boyle',
# author_email='',
url='https://github.com/moble/spherical_functions',
packages=['spherical_functions',],
package_dir={'spherical_functions': ''},
package_data={'spherical_functions': ['Wigner_coefficients.npy',
'binomial_coefficients.npy',
'ladder_operator_coefficients.npy']},
)
|
Copy data files for numpy
|
Copy data files for numpy
|
Python
|
mit
|
moble/spherical_functions
|
from distutils.core import setup
setup(name='spherical_functions',
version='1.0',
description='Python/numba implementation of Wigner D Matrices, spin-weighted spherical harmonics, and associated functions',
author='Michael Boyle',
# author_email='',
url='https://github.com/moble/spherical_functions',
+ packages=['spherical_functions',],
package_dir={'spherical_functions': ''},
- packages=['spherical_functions',],
+ package_data={'spherical_functions': ['Wigner_coefficients.npy',
+ 'binomial_coefficients.npy',
+ 'ladder_operator_coefficients.npy']},
)
|
Copy data files for numpy
|
## Code Before:
from distutils.core import setup
setup(name='spherical_functions',
version='1.0',
description='Python/numba implementation of Wigner D Matrices, spin-weighted spherical harmonics, and associated functions',
author='Michael Boyle',
# author_email='',
url='https://github.com/moble/spherical_functions',
package_dir={'spherical_functions': ''},
packages=['spherical_functions',],
)
## Instruction:
Copy data files for numpy
## Code After:
from distutils.core import setup
setup(name='spherical_functions',
version='1.0',
description='Python/numba implementation of Wigner D Matrices, spin-weighted spherical harmonics, and associated functions',
author='Michael Boyle',
# author_email='',
url='https://github.com/moble/spherical_functions',
packages=['spherical_functions',],
package_dir={'spherical_functions': ''},
package_data={'spherical_functions': ['Wigner_coefficients.npy',
'binomial_coefficients.npy',
'ladder_operator_coefficients.npy']},
)
|
from distutils.core import setup
setup(name='spherical_functions',
version='1.0',
description='Python/numba implementation of Wigner D Matrices, spin-weighted spherical harmonics, and associated functions',
author='Michael Boyle',
# author_email='',
url='https://github.com/moble/spherical_functions',
+ packages=['spherical_functions',],
package_dir={'spherical_functions': ''},
- packages=['spherical_functions',],
+ package_data={'spherical_functions': ['Wigner_coefficients.npy',
+ 'binomial_coefficients.npy',
+ 'ladder_operator_coefficients.npy']},
)
|
1aa75af659daac62fdef423beac16aef1f057afb
|
test/testCore.py
|
test/testCore.py
|
import pyfits
import sys
def test_with_statement():
if sys.hexversion >= 0x02050000:
exec("""from __future__ import with_statement
with pyfits.open("ascii.fits") as f: pass""")
def test_naxisj_check():
hdulist = pyfits.open("o4sp040b0_raw.fits")
hdulist[1].header.update("NAXIS3", 500)
assert 'NAXIS3' in hdulist[1].header
hdulist.verify('fix')
assert 'NAXIS3' not in hdulist[1].header
|
import pyfits
import numpy as np
import sys
def test_with_statement():
if sys.hexversion >= 0x02050000:
exec("""from __future__ import with_statement
with pyfits.open("ascii.fits") as f: pass""")
def test_naxisj_check():
hdulist = pyfits.open("o4sp040b0_raw.fits")
hdulist[1].header.update("NAXIS3", 500)
assert 'NAXIS3' in hdulist[1].header
hdulist.verify('fix')
assert 'NAXIS3' not in hdulist[1].header
def test_byteswap():
p = pyfits.PrimaryHDU()
l = pyfits.HDUList()
n = np.zeros(3, dtype='i2')
n[0] = 1
n[1] = 60000
n[2] = 2
c = pyfits.Column(name='foo', format='i2', bscale=1, bzero=32768, array=n)
t = pyfits.new_table([c])
l.append(p)
l.append(t)
l.writeto('test.fits', clobber=True)
p = pyfits.open('test.fits')
assert p[1].data[1]['foo'] == 60000.0
|
Add test for byteswapping bug resolved in r514.
|
Add test for byteswapping bug resolved in r514.
git-svn-id: 5305e2c1a78737cf7dd5f8f44e9bbbd00348fde7@543 ed100bfc-0583-0410-97f2-c26b58777a21
|
Python
|
bsd-3-clause
|
embray/PyFITS,spacetelescope/PyFITS,embray/PyFITS,embray/PyFITS,spacetelescope/PyFITS,embray/PyFITS
|
import pyfits
+ import numpy as np
import sys
def test_with_statement():
if sys.hexversion >= 0x02050000:
exec("""from __future__ import with_statement
with pyfits.open("ascii.fits") as f: pass""")
def test_naxisj_check():
hdulist = pyfits.open("o4sp040b0_raw.fits")
hdulist[1].header.update("NAXIS3", 500)
assert 'NAXIS3' in hdulist[1].header
hdulist.verify('fix')
assert 'NAXIS3' not in hdulist[1].header
+ def test_byteswap():
+ p = pyfits.PrimaryHDU()
+ l = pyfits.HDUList()
+
+ n = np.zeros(3, dtype='i2')
+ n[0] = 1
+ n[1] = 60000
+ n[2] = 2
+
+ c = pyfits.Column(name='foo', format='i2', bscale=1, bzero=32768, array=n)
+ t = pyfits.new_table([c])
+
+ l.append(p)
+ l.append(t)
+
+ l.writeto('test.fits', clobber=True)
+
+ p = pyfits.open('test.fits')
+ assert p[1].data[1]['foo'] == 60000.0
+
|
Add test for byteswapping bug resolved in r514.
|
## Code Before:
import pyfits
import sys
def test_with_statement():
if sys.hexversion >= 0x02050000:
exec("""from __future__ import with_statement
with pyfits.open("ascii.fits") as f: pass""")
def test_naxisj_check():
hdulist = pyfits.open("o4sp040b0_raw.fits")
hdulist[1].header.update("NAXIS3", 500)
assert 'NAXIS3' in hdulist[1].header
hdulist.verify('fix')
assert 'NAXIS3' not in hdulist[1].header
## Instruction:
Add test for byteswapping bug resolved in r514.
## Code After:
import pyfits
import numpy as np
import sys
def test_with_statement():
if sys.hexversion >= 0x02050000:
exec("""from __future__ import with_statement
with pyfits.open("ascii.fits") as f: pass""")
def test_naxisj_check():
hdulist = pyfits.open("o4sp040b0_raw.fits")
hdulist[1].header.update("NAXIS3", 500)
assert 'NAXIS3' in hdulist[1].header
hdulist.verify('fix')
assert 'NAXIS3' not in hdulist[1].header
def test_byteswap():
p = pyfits.PrimaryHDU()
l = pyfits.HDUList()
n = np.zeros(3, dtype='i2')
n[0] = 1
n[1] = 60000
n[2] = 2
c = pyfits.Column(name='foo', format='i2', bscale=1, bzero=32768, array=n)
t = pyfits.new_table([c])
l.append(p)
l.append(t)
l.writeto('test.fits', clobber=True)
p = pyfits.open('test.fits')
assert p[1].data[1]['foo'] == 60000.0
|
import pyfits
+ import numpy as np
import sys
def test_with_statement():
if sys.hexversion >= 0x02050000:
exec("""from __future__ import with_statement
with pyfits.open("ascii.fits") as f: pass""")
def test_naxisj_check():
hdulist = pyfits.open("o4sp040b0_raw.fits")
hdulist[1].header.update("NAXIS3", 500)
assert 'NAXIS3' in hdulist[1].header
hdulist.verify('fix')
assert 'NAXIS3' not in hdulist[1].header
+
+ def test_byteswap():
+ p = pyfits.PrimaryHDU()
+ l = pyfits.HDUList()
+
+ n = np.zeros(3, dtype='i2')
+ n[0] = 1
+ n[1] = 60000
+ n[2] = 2
+
+ c = pyfits.Column(name='foo', format='i2', bscale=1, bzero=32768, array=n)
+ t = pyfits.new_table([c])
+
+ l.append(p)
+ l.append(t)
+
+ l.writeto('test.fits', clobber=True)
+
+ p = pyfits.open('test.fits')
+ assert p[1].data[1]['foo'] == 60000.0
|
1475a740f122f915127ed283ec25f0d48e2cc211
|
tests/integration/templatetags/test_currency_filters.py
|
tests/integration/templatetags/test_currency_filters.py
|
from decimal import Decimal as D
from django.utils import translation
from django.test import TestCase
from django import template
def render(template_string, ctx):
tpl = template.Template(template_string)
return tpl.render(template.Context(ctx))
class TestCurrencyFilter(TestCase):
def setUp(self):
self.template = template.Template(
"{% load currency_filters %}"
"{{ price|currency }}"
)
def test_renders_price_correctly(self):
out = self.template.render(template.Context({
'price': D('10.23'),
}))
self.assertTrue(u'£10.23' in out)
def test_handles_none_price_gracefully(self):
self.template.render(template.Context({
'price': None
}))
def test_handles_string_price_gracefully(self):
self.template.render(template.Context({
'price': ''
}))
@translation.override(None, deactivate=True)
def test_handles_no_translation(self):
self.template.render(template.Context({
'price': D('10.23'),
}))
|
from decimal import Decimal as D
from django.utils import translation
from django.test import TestCase
from django import template
def render(template_string, ctx):
tpl = template.Template(template_string)
return tpl.render(template.Context(ctx))
class TestCurrencyFilter(TestCase):
def setUp(self):
self.template = template.Template(
"{% load currency_filters %}"
"{{ price|currency }}"
)
def test_renders_price_correctly(self):
out = self.template.render(template.Context({
'price': D('10.23'),
}))
self.assertTrue(u'£10.23' in out)
def test_handles_none_price_gracefully(self):
self.template.render(template.Context({
'price': None
}))
def test_handles_string_price_gracefully(self):
self.template.render(template.Context({
'price': ''
}))
def test_handles_no_translation(self):
with translation.override(None, deactivate=True):
self.template.render(template.Context({
'price': D('10.23'),
}))
|
Use translation.override as a context manager instead of a decorator.
|
Use translation.override as a context manager instead of a decorator.
|
Python
|
bsd-3-clause
|
kapari/django-oscar,MatthewWilkes/django-oscar,WillisXChen/django-oscar,taedori81/django-oscar,solarissmoke/django-oscar,WillisXChen/django-oscar,WadeYuChen/django-oscar,MatthewWilkes/django-oscar,taedori81/django-oscar,django-oscar/django-oscar,anentropic/django-oscar,okfish/django-oscar,jlmadurga/django-oscar,saadatqadri/django-oscar,john-parton/django-oscar,solarissmoke/django-oscar,jlmadurga/django-oscar,Jannes123/django-oscar,itbabu/django-oscar,Bogh/django-oscar,anentropic/django-oscar,Jannes123/django-oscar,sonofatailor/django-oscar,sonofatailor/django-oscar,WillisXChen/django-oscar,sasha0/django-oscar,django-oscar/django-oscar,WadeYuChen/django-oscar,WillisXChen/django-oscar,sonofatailor/django-oscar,MatthewWilkes/django-oscar,dongguangming/django-oscar,okfish/django-oscar,kapari/django-oscar,jlmadurga/django-oscar,rocopartners/django-oscar,dongguangming/django-oscar,bschuon/django-oscar,ka7eh/django-oscar,Jannes123/django-oscar,Bogh/django-oscar,WadeYuChen/django-oscar,michaelkuty/django-oscar,spartonia/django-oscar,michaelkuty/django-oscar,michaelkuty/django-oscar,taedori81/django-oscar,sonofatailor/django-oscar,dongguangming/django-oscar,spartonia/django-oscar,django-oscar/django-oscar,ka7eh/django-oscar,dongguangming/django-oscar,Bogh/django-oscar,saadatqadri/django-oscar,okfish/django-oscar,saadatqadri/django-oscar,eddiep1101/django-oscar,anentropic/django-oscar,sasha0/django-oscar,john-parton/django-oscar,itbabu/django-oscar,eddiep1101/django-oscar,bschuon/django-oscar,faratro/django-oscar,faratro/django-oscar,ka7eh/django-oscar,bschuon/django-oscar,okfish/django-oscar,michaelkuty/django-oscar,itbabu/django-oscar,solarissmoke/django-oscar,kapari/django-oscar,WadeYuChen/django-oscar,WillisXChen/django-oscar,rocopartners/django-oscar,Bogh/django-oscar,faratro/django-oscar,itbabu/django-oscar,ka7eh/django-oscar,Jannes123/django-oscar,rocopartners/django-oscar,sasha0/django-oscar,bschuon/django-oscar,MatthewWilkes/django-oscar,faratro/django-oscar,anentropic/django-oscar,spartonia/django-oscar,kapari/django-oscar,jlmadurga/django-oscar,sasha0/django-oscar,solarissmoke/django-oscar,john-parton/django-oscar,eddiep1101/django-oscar,eddiep1101/django-oscar,saadatqadri/django-oscar,rocopartners/django-oscar,spartonia/django-oscar,WillisXChen/django-oscar,django-oscar/django-oscar,taedori81/django-oscar,john-parton/django-oscar
|
from decimal import Decimal as D
from django.utils import translation
from django.test import TestCase
from django import template
def render(template_string, ctx):
tpl = template.Template(template_string)
return tpl.render(template.Context(ctx))
class TestCurrencyFilter(TestCase):
def setUp(self):
self.template = template.Template(
"{% load currency_filters %}"
"{{ price|currency }}"
)
def test_renders_price_correctly(self):
out = self.template.render(template.Context({
'price': D('10.23'),
}))
self.assertTrue(u'£10.23' in out)
def test_handles_none_price_gracefully(self):
self.template.render(template.Context({
'price': None
}))
def test_handles_string_price_gracefully(self):
self.template.render(template.Context({
'price': ''
}))
- @translation.override(None, deactivate=True)
def test_handles_no_translation(self):
+ with translation.override(None, deactivate=True):
- self.template.render(template.Context({
+ self.template.render(template.Context({
- 'price': D('10.23'),
+ 'price': D('10.23'),
- }))
+ }))
|
Use translation.override as a context manager instead of a decorator.
|
## Code Before:
from decimal import Decimal as D
from django.utils import translation
from django.test import TestCase
from django import template
def render(template_string, ctx):
tpl = template.Template(template_string)
return tpl.render(template.Context(ctx))
class TestCurrencyFilter(TestCase):
def setUp(self):
self.template = template.Template(
"{% load currency_filters %}"
"{{ price|currency }}"
)
def test_renders_price_correctly(self):
out = self.template.render(template.Context({
'price': D('10.23'),
}))
self.assertTrue(u'£10.23' in out)
def test_handles_none_price_gracefully(self):
self.template.render(template.Context({
'price': None
}))
def test_handles_string_price_gracefully(self):
self.template.render(template.Context({
'price': ''
}))
@translation.override(None, deactivate=True)
def test_handles_no_translation(self):
self.template.render(template.Context({
'price': D('10.23'),
}))
## Instruction:
Use translation.override as a context manager instead of a decorator.
## Code After:
from decimal import Decimal as D
from django.utils import translation
from django.test import TestCase
from django import template
def render(template_string, ctx):
tpl = template.Template(template_string)
return tpl.render(template.Context(ctx))
class TestCurrencyFilter(TestCase):
def setUp(self):
self.template = template.Template(
"{% load currency_filters %}"
"{{ price|currency }}"
)
def test_renders_price_correctly(self):
out = self.template.render(template.Context({
'price': D('10.23'),
}))
self.assertTrue(u'£10.23' in out)
def test_handles_none_price_gracefully(self):
self.template.render(template.Context({
'price': None
}))
def test_handles_string_price_gracefully(self):
self.template.render(template.Context({
'price': ''
}))
def test_handles_no_translation(self):
with translation.override(None, deactivate=True):
self.template.render(template.Context({
'price': D('10.23'),
}))
|
from decimal import Decimal as D
from django.utils import translation
from django.test import TestCase
from django import template
def render(template_string, ctx):
tpl = template.Template(template_string)
return tpl.render(template.Context(ctx))
class TestCurrencyFilter(TestCase):
def setUp(self):
self.template = template.Template(
"{% load currency_filters %}"
"{{ price|currency }}"
)
def test_renders_price_correctly(self):
out = self.template.render(template.Context({
'price': D('10.23'),
}))
self.assertTrue(u'£10.23' in out)
def test_handles_none_price_gracefully(self):
self.template.render(template.Context({
'price': None
}))
def test_handles_string_price_gracefully(self):
self.template.render(template.Context({
'price': ''
}))
- @translation.override(None, deactivate=True)
def test_handles_no_translation(self):
+ with translation.override(None, deactivate=True):
- self.template.render(template.Context({
+ self.template.render(template.Context({
? ++++
- 'price': D('10.23'),
+ 'price': D('10.23'),
? ++++
- }))
+ }))
? ++++
|
cea4e45dc95310993e3b23ceadf83cbda810f536
|
EasyEuler/commands/list.py
|
EasyEuler/commands/list.py
|
import click
from tabulate import tabulate
from EasyEuler import data
@click.command()
@click.option('--sort', '-s', type=click.Choice(['id', 'difficulty']),
default='id', help='Sort the list by problem attribute.')
def cli(sort):
""" Lists all available problems. """
problems = sorted(data.problems, key=lambda p: p[sort.lower()])
problem_list = [(problem['id'], problem['name'], problem['difficulty'])
for problem in problems]
problem_table = tabulate(problem_list, ['ID', 'Name', 'Difficulty'],
tablefmt='fancy_grid')
click.echo_via_pager(problem_table)
|
import click
from tabulate import tabulate
from EasyEuler import data
@click.command()
@click.option('--sort', '-s', type=click.Choice(['id', 'difficulty']),
default='id', help='Sort the list by problem attribute.')
def cli(sort):
""" Lists all available problems. """
problems = sorted(data.problems, key=lambda p: p[sort.lower()])
problem_list = [(problem['id'], problem['name'],
'%d%%' % problem['difficulty']) for problem in problems]
problem_table = tabulate(problem_list, ['ID', 'Name', 'Difficulty'],
tablefmt='fancy_grid')
click.echo_via_pager(problem_table)
|
Add percentage sign to difficulty
|
Add percentage sign to difficulty
|
Python
|
mit
|
Encrylize/EasyEuler
|
import click
from tabulate import tabulate
from EasyEuler import data
@click.command()
@click.option('--sort', '-s', type=click.Choice(['id', 'difficulty']),
default='id', help='Sort the list by problem attribute.')
def cli(sort):
""" Lists all available problems. """
problems = sorted(data.problems, key=lambda p: p[sort.lower()])
- problem_list = [(problem['id'], problem['name'], problem['difficulty'])
+ problem_list = [(problem['id'], problem['name'],
- for problem in problems]
+ '%d%%' % problem['difficulty']) for problem in problems]
problem_table = tabulate(problem_list, ['ID', 'Name', 'Difficulty'],
tablefmt='fancy_grid')
click.echo_via_pager(problem_table)
|
Add percentage sign to difficulty
|
## Code Before:
import click
from tabulate import tabulate
from EasyEuler import data
@click.command()
@click.option('--sort', '-s', type=click.Choice(['id', 'difficulty']),
default='id', help='Sort the list by problem attribute.')
def cli(sort):
""" Lists all available problems. """
problems = sorted(data.problems, key=lambda p: p[sort.lower()])
problem_list = [(problem['id'], problem['name'], problem['difficulty'])
for problem in problems]
problem_table = tabulate(problem_list, ['ID', 'Name', 'Difficulty'],
tablefmt='fancy_grid')
click.echo_via_pager(problem_table)
## Instruction:
Add percentage sign to difficulty
## Code After:
import click
from tabulate import tabulate
from EasyEuler import data
@click.command()
@click.option('--sort', '-s', type=click.Choice(['id', 'difficulty']),
default='id', help='Sort the list by problem attribute.')
def cli(sort):
""" Lists all available problems. """
problems = sorted(data.problems, key=lambda p: p[sort.lower()])
problem_list = [(problem['id'], problem['name'],
'%d%%' % problem['difficulty']) for problem in problems]
problem_table = tabulate(problem_list, ['ID', 'Name', 'Difficulty'],
tablefmt='fancy_grid')
click.echo_via_pager(problem_table)
|
import click
from tabulate import tabulate
from EasyEuler import data
@click.command()
@click.option('--sort', '-s', type=click.Choice(['id', 'difficulty']),
default='id', help='Sort the list by problem attribute.')
def cli(sort):
""" Lists all available problems. """
problems = sorted(data.problems, key=lambda p: p[sort.lower()])
- problem_list = [(problem['id'], problem['name'], problem['difficulty'])
? -----------------------
+ problem_list = [(problem['id'], problem['name'],
- for problem in problems]
+ '%d%%' % problem['difficulty']) for problem in problems]
problem_table = tabulate(problem_list, ['ID', 'Name', 'Difficulty'],
tablefmt='fancy_grid')
click.echo_via_pager(problem_table)
|
e3a3e729eb60f5a7e134da5b58bb52d672e1d8b2
|
sitenco/config/sphinx.py
|
sitenco/config/sphinx.py
|
import sys
import abc
import os.path
import subprocess
from . import vcs
from .. import DOCS_PATH
class Sphinx(vcs.VCS):
"""Abstract class for project folder tools."""
__metaclass__ = abc.ABCMeta
def __init__(self, path, branch='master', url=None):
path = os.path.join(DOCS_PATH, path)
super(Sphinx, self).__init__(path, branch, url)
class Git(Sphinx, vcs.Git):
"""Git tool."""
def update(self):
self._repository.fetch()
self._repository.reset('--hard', 'origin/' + self.branch)
subprocess.check_call(
[sys.executable, 'setup.py', 'build_sphinx', '-b', 'dirhtml'],
cwd=self.path)
|
import sys
import abc
import os.path
import subprocess
from . import vcs
from .. import DOCS_PATH
class Sphinx(vcs.VCS):
"""Abstract class for project folder tools."""
__metaclass__ = abc.ABCMeta
def __init__(self, path, branch='master', url=None):
path = os.path.join(DOCS_PATH, path)
super(Sphinx, self).__init__(path, branch, url)
class Git(Sphinx, vcs.Git):
"""Git tool."""
def update(self):
self._repository.fetch()
self._repository.reset('--hard', 'origin/' + self.branch)
subprocess.check_call(
['python3', 'setup.py', 'build_sphinx', '-b', 'dirhtml'],
cwd=self.path)
|
Use python interpreter instead of sys.executable
|
Use python interpreter instead of sys.executable
|
Python
|
bsd-3-clause
|
Kozea/sitenco
|
import sys
import abc
import os.path
import subprocess
from . import vcs
from .. import DOCS_PATH
class Sphinx(vcs.VCS):
"""Abstract class for project folder tools."""
__metaclass__ = abc.ABCMeta
def __init__(self, path, branch='master', url=None):
path = os.path.join(DOCS_PATH, path)
super(Sphinx, self).__init__(path, branch, url)
class Git(Sphinx, vcs.Git):
"""Git tool."""
def update(self):
self._repository.fetch()
self._repository.reset('--hard', 'origin/' + self.branch)
subprocess.check_call(
- [sys.executable, 'setup.py', 'build_sphinx', '-b', 'dirhtml'],
+ ['python3', 'setup.py', 'build_sphinx', '-b', 'dirhtml'],
cwd=self.path)
|
Use python interpreter instead of sys.executable
|
## Code Before:
import sys
import abc
import os.path
import subprocess
from . import vcs
from .. import DOCS_PATH
class Sphinx(vcs.VCS):
"""Abstract class for project folder tools."""
__metaclass__ = abc.ABCMeta
def __init__(self, path, branch='master', url=None):
path = os.path.join(DOCS_PATH, path)
super(Sphinx, self).__init__(path, branch, url)
class Git(Sphinx, vcs.Git):
"""Git tool."""
def update(self):
self._repository.fetch()
self._repository.reset('--hard', 'origin/' + self.branch)
subprocess.check_call(
[sys.executable, 'setup.py', 'build_sphinx', '-b', 'dirhtml'],
cwd=self.path)
## Instruction:
Use python interpreter instead of sys.executable
## Code After:
import sys
import abc
import os.path
import subprocess
from . import vcs
from .. import DOCS_PATH
class Sphinx(vcs.VCS):
"""Abstract class for project folder tools."""
__metaclass__ = abc.ABCMeta
def __init__(self, path, branch='master', url=None):
path = os.path.join(DOCS_PATH, path)
super(Sphinx, self).__init__(path, branch, url)
class Git(Sphinx, vcs.Git):
"""Git tool."""
def update(self):
self._repository.fetch()
self._repository.reset('--hard', 'origin/' + self.branch)
subprocess.check_call(
['python3', 'setup.py', 'build_sphinx', '-b', 'dirhtml'],
cwd=self.path)
|
import sys
import abc
import os.path
import subprocess
from . import vcs
from .. import DOCS_PATH
class Sphinx(vcs.VCS):
"""Abstract class for project folder tools."""
__metaclass__ = abc.ABCMeta
def __init__(self, path, branch='master', url=None):
path = os.path.join(DOCS_PATH, path)
super(Sphinx, self).__init__(path, branch, url)
class Git(Sphinx, vcs.Git):
"""Git tool."""
def update(self):
self._repository.fetch()
self._repository.reset('--hard', 'origin/' + self.branch)
subprocess.check_call(
- [sys.executable, 'setup.py', 'build_sphinx', '-b', 'dirhtml'],
? ^ ------- ^^^^
+ ['python3', 'setup.py', 'build_sphinx', '-b', 'dirhtml'],
? ^^ ^^^^^
cwd=self.path)
|
69eafa95df4bdeb143d40c321f0a312d06efff1f
|
skimage/segmentation/__init__.py
|
skimage/segmentation/__init__.py
|
from .random_walker_segmentation import random_walker
from ._felzenszwalb import felzenszwalb
from ._slic import slic
from ._quickshift import quickshift
from .boundaries import find_boundaries, visualize_boundaries, mark_boundaries
from ._clear_border import clear_border
from ._join import join_segmentations, relabel_from_one
|
from .random_walker_segmentation import random_walker
from ._felzenszwalb import felzenszwalb
from ._slic import slic
from ._quickshift import quickshift
from .boundaries import find_boundaries, visualize_boundaries, mark_boundaries
from ._clear_border import clear_border
from ._join import join_segmentations, relabel_from_one
__all__ = ['random_walker',
'felzenszwalb',
'slic',
'quickshift',
'find_boundaries',
'visualize_boundaries',
'mark_boundaries',
'clear_border',
'join_segmentations',
'relabel_from_one']
|
Add __all__ to segmentation package
|
Add __all__ to segmentation package
|
Python
|
bsd-3-clause
|
Midafi/scikit-image,pratapvardhan/scikit-image,michaelaye/scikit-image,bsipocz/scikit-image,blink1073/scikit-image,robintw/scikit-image,oew1v07/scikit-image,Britefury/scikit-image,chriscrosscutler/scikit-image,Hiyorimi/scikit-image,vighneshbirodkar/scikit-image,paalge/scikit-image,newville/scikit-image,ajaybhat/scikit-image,emon10005/scikit-image,chintak/scikit-image,Hiyorimi/scikit-image,bennlich/scikit-image,warmspringwinds/scikit-image,pratapvardhan/scikit-image,vighneshbirodkar/scikit-image,dpshelio/scikit-image,jwiggins/scikit-image,warmspringwinds/scikit-image,youprofit/scikit-image,michaelaye/scikit-image,WarrenWeckesser/scikits-image,paalge/scikit-image,chintak/scikit-image,bsipocz/scikit-image,almarklein/scikit-image,ofgulban/scikit-image,rjeli/scikit-image,rjeli/scikit-image,chintak/scikit-image,ofgulban/scikit-image,ClinicalGraphics/scikit-image,michaelpacer/scikit-image,paalge/scikit-image,robintw/scikit-image,ofgulban/scikit-image,SamHames/scikit-image,Britefury/scikit-image,newville/scikit-image,juliusbierk/scikit-image,almarklein/scikit-image,SamHames/scikit-image,almarklein/scikit-image,emon10005/scikit-image,keflavich/scikit-image,youprofit/scikit-image,juliusbierk/scikit-image,bennlich/scikit-image,blink1073/scikit-image,michaelpacer/scikit-image,SamHames/scikit-image,SamHames/scikit-image,ClinicalGraphics/scikit-image,dpshelio/scikit-image,GaZ3ll3/scikit-image,almarklein/scikit-image,oew1v07/scikit-image,Midafi/scikit-image,chintak/scikit-image,jwiggins/scikit-image,ajaybhat/scikit-image,rjeli/scikit-image,WarrenWeckesser/scikits-image,vighneshbirodkar/scikit-image,GaZ3ll3/scikit-image,chriscrosscutler/scikit-image,keflavich/scikit-image
|
from .random_walker_segmentation import random_walker
from ._felzenszwalb import felzenszwalb
from ._slic import slic
from ._quickshift import quickshift
from .boundaries import find_boundaries, visualize_boundaries, mark_boundaries
from ._clear_border import clear_border
from ._join import join_segmentations, relabel_from_one
+
+ __all__ = ['random_walker',
+ 'felzenszwalb',
+ 'slic',
+ 'quickshift',
+ 'find_boundaries',
+ 'visualize_boundaries',
+ 'mark_boundaries',
+ 'clear_border',
+ 'join_segmentations',
+ 'relabel_from_one']
+
|
Add __all__ to segmentation package
|
## Code Before:
from .random_walker_segmentation import random_walker
from ._felzenszwalb import felzenszwalb
from ._slic import slic
from ._quickshift import quickshift
from .boundaries import find_boundaries, visualize_boundaries, mark_boundaries
from ._clear_border import clear_border
from ._join import join_segmentations, relabel_from_one
## Instruction:
Add __all__ to segmentation package
## Code After:
from .random_walker_segmentation import random_walker
from ._felzenszwalb import felzenszwalb
from ._slic import slic
from ._quickshift import quickshift
from .boundaries import find_boundaries, visualize_boundaries, mark_boundaries
from ._clear_border import clear_border
from ._join import join_segmentations, relabel_from_one
__all__ = ['random_walker',
'felzenszwalb',
'slic',
'quickshift',
'find_boundaries',
'visualize_boundaries',
'mark_boundaries',
'clear_border',
'join_segmentations',
'relabel_from_one']
|
from .random_walker_segmentation import random_walker
from ._felzenszwalb import felzenszwalb
from ._slic import slic
from ._quickshift import quickshift
from .boundaries import find_boundaries, visualize_boundaries, mark_boundaries
from ._clear_border import clear_border
from ._join import join_segmentations, relabel_from_one
+
+
+ __all__ = ['random_walker',
+ 'felzenszwalb',
+ 'slic',
+ 'quickshift',
+ 'find_boundaries',
+ 'visualize_boundaries',
+ 'mark_boundaries',
+ 'clear_border',
+ 'join_segmentations',
+ 'relabel_from_one']
|
e3369232014adf29f78975ff102f8e3aec51b81a
|
corgi/pandas_utils.py
|
corgi/pandas_utils.py
|
import math
import numpy as np
def remove_single_value_columns(df):
drop_ix = df.apply(pd.Series.value_counts,
normalize=True,
axis=0).max() == 1
drop_cols = df.columns[drop_ix]
df = df.drop(drop_cols, axis=1)
return df
def sample(df, sample_percent=2e-2):
sample_n = math.floor(len(df) * sample_percent)
rows = np.random.choice(df.shape[0], sample_n)
return df.ix[rows]
def sample_columns(df, sample_percent=0.5):
df = sample(df.T, sample_percent).T
return df
|
import math
import numpy as np
def remove_single_value_columns(df):
drop_ix = df.apply(pd.Series.value_counts,
normalize=True,
axis=0).max() == 1
drop_cols = df.columns[drop_ix]
df = df.drop(drop_cols, axis=1)
return df
|
Remove pandas sample utils, these are built into pandas
|
Remove pandas sample utils, these are built into pandas
|
Python
|
mit
|
log0ymxm/corgi
|
import math
import numpy as np
def remove_single_value_columns(df):
drop_ix = df.apply(pd.Series.value_counts,
normalize=True,
axis=0).max() == 1
drop_cols = df.columns[drop_ix]
df = df.drop(drop_cols, axis=1)
return df
-
- def sample(df, sample_percent=2e-2):
- sample_n = math.floor(len(df) * sample_percent)
- rows = np.random.choice(df.shape[0], sample_n)
- return df.ix[rows]
-
-
- def sample_columns(df, sample_percent=0.5):
- df = sample(df.T, sample_percent).T
- return df
-
|
Remove pandas sample utils, these are built into pandas
|
## Code Before:
import math
import numpy as np
def remove_single_value_columns(df):
drop_ix = df.apply(pd.Series.value_counts,
normalize=True,
axis=0).max() == 1
drop_cols = df.columns[drop_ix]
df = df.drop(drop_cols, axis=1)
return df
def sample(df, sample_percent=2e-2):
sample_n = math.floor(len(df) * sample_percent)
rows = np.random.choice(df.shape[0], sample_n)
return df.ix[rows]
def sample_columns(df, sample_percent=0.5):
df = sample(df.T, sample_percent).T
return df
## Instruction:
Remove pandas sample utils, these are built into pandas
## Code After:
import math
import numpy as np
def remove_single_value_columns(df):
drop_ix = df.apply(pd.Series.value_counts,
normalize=True,
axis=0).max() == 1
drop_cols = df.columns[drop_ix]
df = df.drop(drop_cols, axis=1)
return df
|
import math
import numpy as np
def remove_single_value_columns(df):
drop_ix = df.apply(pd.Series.value_counts,
normalize=True,
axis=0).max() == 1
drop_cols = df.columns[drop_ix]
df = df.drop(drop_cols, axis=1)
return df
-
-
- def sample(df, sample_percent=2e-2):
- sample_n = math.floor(len(df) * sample_percent)
- rows = np.random.choice(df.shape[0], sample_n)
- return df.ix[rows]
-
-
- def sample_columns(df, sample_percent=0.5):
- df = sample(df.T, sample_percent).T
- return df
|
16c8f23cd6ad9f9a10592bb40d1a18eb2c673d34
|
common.py
|
common.py
|
import mechanize
import os
class McGillException(Exception):
pass
urls = {
'login': 'twbkwbis.P_WWWLogin',
'transcript': 'bzsktran.P_Display_Form?user_type=S&tran_type=V'
}
_base_url = 'https://banweb.mcgill.ca/pban1/%s'
urls = {k: _base_url % v for k,v in urls.items()}
browser = mechanize.Browser()
def login(sid=None, pin=None):
if sid is None:
sid = os.environ.get('MCGILL_SID', None)
if pin is None:
pin = os.environ.get('MCGILL_PIN', None)
if sid is None or pin is None:
raise McGillException('McGill ID or PIN not provided.')
browser.open(urls['login'])
browser.select_form('loginform')
browser['sid'] = sid
browser['PIN'] = pin
response = browser.submit()
if 'Authorization Failure' in response.read():
raise McGillException('Invalid McGill ID or PIN.')
|
import mechanize
import os
class error(Exception):
pass
urls = {
'login': 'twbkwbis.P_WWWLogin',
'transcript': 'bzsktran.P_Display_Form?user_type=S&tran_type=V'
}
_base_url = 'https://banweb.mcgill.ca/pban1/%s'
urls = {k: _base_url % v for k,v in urls.items()}
browser = mechanize.Browser()
def login(sid=None, pin=None):
if sid is None:
sid = os.environ.get('MCGILL_SID', None)
if pin is None:
pin = os.environ.get('MCGILL_PIN', None)
if sid is None or pin is None:
raise error('McGill ID or PIN not provided.')
browser.open(urls['login'])
browser.select_form('loginform')
browser['sid'] = sid
browser['PIN'] = pin
response = browser.submit()
if 'Authorization Failure' in response.read():
raise error('Invalid McGill ID or PIN.')
|
Rename McGillException to error (mcgill.error)
|
Rename McGillException to error (mcgill.error)
|
Python
|
mit
|
isbadawi/minerva
|
import mechanize
import os
- class McGillException(Exception):
+ class error(Exception):
pass
urls = {
'login': 'twbkwbis.P_WWWLogin',
'transcript': 'bzsktran.P_Display_Form?user_type=S&tran_type=V'
}
_base_url = 'https://banweb.mcgill.ca/pban1/%s'
urls = {k: _base_url % v for k,v in urls.items()}
browser = mechanize.Browser()
def login(sid=None, pin=None):
if sid is None:
sid = os.environ.get('MCGILL_SID', None)
if pin is None:
pin = os.environ.get('MCGILL_PIN', None)
if sid is None or pin is None:
- raise McGillException('McGill ID or PIN not provided.')
+ raise error('McGill ID or PIN not provided.')
browser.open(urls['login'])
browser.select_form('loginform')
browser['sid'] = sid
browser['PIN'] = pin
response = browser.submit()
if 'Authorization Failure' in response.read():
- raise McGillException('Invalid McGill ID or PIN.')
+ raise error('Invalid McGill ID or PIN.')
|
Rename McGillException to error (mcgill.error)
|
## Code Before:
import mechanize
import os
class McGillException(Exception):
pass
urls = {
'login': 'twbkwbis.P_WWWLogin',
'transcript': 'bzsktran.P_Display_Form?user_type=S&tran_type=V'
}
_base_url = 'https://banweb.mcgill.ca/pban1/%s'
urls = {k: _base_url % v for k,v in urls.items()}
browser = mechanize.Browser()
def login(sid=None, pin=None):
if sid is None:
sid = os.environ.get('MCGILL_SID', None)
if pin is None:
pin = os.environ.get('MCGILL_PIN', None)
if sid is None or pin is None:
raise McGillException('McGill ID or PIN not provided.')
browser.open(urls['login'])
browser.select_form('loginform')
browser['sid'] = sid
browser['PIN'] = pin
response = browser.submit()
if 'Authorization Failure' in response.read():
raise McGillException('Invalid McGill ID or PIN.')
## Instruction:
Rename McGillException to error (mcgill.error)
## Code After:
import mechanize
import os
class error(Exception):
pass
urls = {
'login': 'twbkwbis.P_WWWLogin',
'transcript': 'bzsktran.P_Display_Form?user_type=S&tran_type=V'
}
_base_url = 'https://banweb.mcgill.ca/pban1/%s'
urls = {k: _base_url % v for k,v in urls.items()}
browser = mechanize.Browser()
def login(sid=None, pin=None):
if sid is None:
sid = os.environ.get('MCGILL_SID', None)
if pin is None:
pin = os.environ.get('MCGILL_PIN', None)
if sid is None or pin is None:
raise error('McGill ID or PIN not provided.')
browser.open(urls['login'])
browser.select_form('loginform')
browser['sid'] = sid
browser['PIN'] = pin
response = browser.submit()
if 'Authorization Failure' in response.read():
raise error('Invalid McGill ID or PIN.')
|
import mechanize
import os
- class McGillException(Exception):
+ class error(Exception):
pass
urls = {
'login': 'twbkwbis.P_WWWLogin',
'transcript': 'bzsktran.P_Display_Form?user_type=S&tran_type=V'
}
_base_url = 'https://banweb.mcgill.ca/pban1/%s'
urls = {k: _base_url % v for k,v in urls.items()}
browser = mechanize.Browser()
def login(sid=None, pin=None):
if sid is None:
sid = os.environ.get('MCGILL_SID', None)
if pin is None:
pin = os.environ.get('MCGILL_PIN', None)
if sid is None or pin is None:
- raise McGillException('McGill ID or PIN not provided.')
? --------- ^^^ ^
+ raise error('McGill ID or PIN not provided.')
? ^^ ^
browser.open(urls['login'])
browser.select_form('loginform')
browser['sid'] = sid
browser['PIN'] = pin
response = browser.submit()
if 'Authorization Failure' in response.read():
- raise McGillException('Invalid McGill ID or PIN.')
? --------- ^^^ ^
+ raise error('Invalid McGill ID or PIN.')
? ^^ ^
|
d5a578e6b72fae3c92827895055ed32baf8aa806
|
coney/response_codes.py
|
coney/response_codes.py
|
class ResponseCodes(object):
SUCCESS = 0
USER_CODE_START = 1
USER_CODE_END = 0x7fffffff
RESERVED_CODE_START = 0x80000000
MALFORMED_RESPONSE = RESERVED_CODE_START
REQUEST_ENCODING_FAILURE = RESERVED_CODE_START + 1
REMOTE_UNHANDLED_EXCEPTION = RESERVED_CODE_START + 2
CALL_REPLY_TIMEOUT = RESERVED_CODE_START + 3
RESERVED_CODE_END = 0xffffffff
_desc = {
SUCCESS: 'Success',
MALFORMED_RESPONSE: 'Response message was malformed',
REQUEST_ENCODING_FAILURE: 'The data in the request could not be encoded',
REMOTE_UNHANDLED_EXCEPTION: 'An unhandled exception occurred while processing the remote call',
CALL_REPLY_TIMEOUT: 'The request did not receive a reply within the call timeout',
}
@staticmethod
def describe(code):
try:
return ResponseCodes._desc[code]
except KeyError:
if ResponseCodes.USER_CODE_START >= code <= ResponseCodes.USER_CODE_END:
return 'RPC endpoint specific error response'
else:
return 'Unknown response code'
|
class ResponseCodes(object):
SUCCESS = 0
USER_CODE_START = 1
USER_CODE_END = 0x7fffffff
RESERVED_CODE_START = 0x80000000
MALFORMED_RESPONSE = RESERVED_CODE_START
MALFORMED_REQUEST = RESERVED_CODE_START + 1
REQUEST_ENCODING_FAILURE = RESERVED_CODE_START + 2
REMOTE_UNHANDLED_EXCEPTION = RESERVED_CODE_START + 3
CALL_REPLY_TIMEOUT = RESERVED_CODE_START + 4
METHOD_NOT_FOUND = RESERVED_CODE_START + 5
VERSION_NOT_FOUND = RESERVED_CODE_START + 6
UNEXPECTED_DISPATCH_EXCEPTION = RESERVED_CODE_START + 7
RESERVED_CODE_END = 0xffffffff
_desc = {
SUCCESS: 'Success',
MALFORMED_RESPONSE: 'Response message was malformed',
MALFORMED_REQUEST: 'Request message was malformed',
REQUEST_ENCODING_FAILURE: 'The data in the request could not be encoded',
REMOTE_UNHANDLED_EXCEPTION: 'An unhandled exception occurred while processing the remote call',
CALL_REPLY_TIMEOUT: 'The request did not receive a reply within the call timeout',
METHOD_NOT_FOUND: 'The requested method is not supported by the server',
VERSION_NOT_FOUND: 'The requested method version is not supported by the server',
UNEXPECTED_DISPATCH_EXCEPTION: 'An unexpected exception occurred during message dispatch'
}
@staticmethod
def describe(code):
try:
return ResponseCodes._desc[code]
except KeyError:
if ResponseCodes.USER_CODE_START >= code <= ResponseCodes.USER_CODE_END:
return 'RPC endpoint specific error response'
else:
return 'Unknown response code'
|
Add additional codes used by server implementation
|
Add additional codes used by server implementation
|
Python
|
mit
|
cbigler/jackrabbit
|
class ResponseCodes(object):
SUCCESS = 0
USER_CODE_START = 1
USER_CODE_END = 0x7fffffff
RESERVED_CODE_START = 0x80000000
MALFORMED_RESPONSE = RESERVED_CODE_START
+ MALFORMED_REQUEST = RESERVED_CODE_START + 1
- REQUEST_ENCODING_FAILURE = RESERVED_CODE_START + 1
+ REQUEST_ENCODING_FAILURE = RESERVED_CODE_START + 2
- REMOTE_UNHANDLED_EXCEPTION = RESERVED_CODE_START + 2
+ REMOTE_UNHANDLED_EXCEPTION = RESERVED_CODE_START + 3
- CALL_REPLY_TIMEOUT = RESERVED_CODE_START + 3
+ CALL_REPLY_TIMEOUT = RESERVED_CODE_START + 4
+ METHOD_NOT_FOUND = RESERVED_CODE_START + 5
+ VERSION_NOT_FOUND = RESERVED_CODE_START + 6
+ UNEXPECTED_DISPATCH_EXCEPTION = RESERVED_CODE_START + 7
+
RESERVED_CODE_END = 0xffffffff
_desc = {
SUCCESS: 'Success',
MALFORMED_RESPONSE: 'Response message was malformed',
+ MALFORMED_REQUEST: 'Request message was malformed',
REQUEST_ENCODING_FAILURE: 'The data in the request could not be encoded',
REMOTE_UNHANDLED_EXCEPTION: 'An unhandled exception occurred while processing the remote call',
CALL_REPLY_TIMEOUT: 'The request did not receive a reply within the call timeout',
+ METHOD_NOT_FOUND: 'The requested method is not supported by the server',
+ VERSION_NOT_FOUND: 'The requested method version is not supported by the server',
+ UNEXPECTED_DISPATCH_EXCEPTION: 'An unexpected exception occurred during message dispatch'
}
@staticmethod
def describe(code):
try:
return ResponseCodes._desc[code]
except KeyError:
if ResponseCodes.USER_CODE_START >= code <= ResponseCodes.USER_CODE_END:
return 'RPC endpoint specific error response'
else:
return 'Unknown response code'
|
Add additional codes used by server implementation
|
## Code Before:
class ResponseCodes(object):
SUCCESS = 0
USER_CODE_START = 1
USER_CODE_END = 0x7fffffff
RESERVED_CODE_START = 0x80000000
MALFORMED_RESPONSE = RESERVED_CODE_START
REQUEST_ENCODING_FAILURE = RESERVED_CODE_START + 1
REMOTE_UNHANDLED_EXCEPTION = RESERVED_CODE_START + 2
CALL_REPLY_TIMEOUT = RESERVED_CODE_START + 3
RESERVED_CODE_END = 0xffffffff
_desc = {
SUCCESS: 'Success',
MALFORMED_RESPONSE: 'Response message was malformed',
REQUEST_ENCODING_FAILURE: 'The data in the request could not be encoded',
REMOTE_UNHANDLED_EXCEPTION: 'An unhandled exception occurred while processing the remote call',
CALL_REPLY_TIMEOUT: 'The request did not receive a reply within the call timeout',
}
@staticmethod
def describe(code):
try:
return ResponseCodes._desc[code]
except KeyError:
if ResponseCodes.USER_CODE_START >= code <= ResponseCodes.USER_CODE_END:
return 'RPC endpoint specific error response'
else:
return 'Unknown response code'
## Instruction:
Add additional codes used by server implementation
## Code After:
class ResponseCodes(object):
SUCCESS = 0
USER_CODE_START = 1
USER_CODE_END = 0x7fffffff
RESERVED_CODE_START = 0x80000000
MALFORMED_RESPONSE = RESERVED_CODE_START
MALFORMED_REQUEST = RESERVED_CODE_START + 1
REQUEST_ENCODING_FAILURE = RESERVED_CODE_START + 2
REMOTE_UNHANDLED_EXCEPTION = RESERVED_CODE_START + 3
CALL_REPLY_TIMEOUT = RESERVED_CODE_START + 4
METHOD_NOT_FOUND = RESERVED_CODE_START + 5
VERSION_NOT_FOUND = RESERVED_CODE_START + 6
UNEXPECTED_DISPATCH_EXCEPTION = RESERVED_CODE_START + 7
RESERVED_CODE_END = 0xffffffff
_desc = {
SUCCESS: 'Success',
MALFORMED_RESPONSE: 'Response message was malformed',
MALFORMED_REQUEST: 'Request message was malformed',
REQUEST_ENCODING_FAILURE: 'The data in the request could not be encoded',
REMOTE_UNHANDLED_EXCEPTION: 'An unhandled exception occurred while processing the remote call',
CALL_REPLY_TIMEOUT: 'The request did not receive a reply within the call timeout',
METHOD_NOT_FOUND: 'The requested method is not supported by the server',
VERSION_NOT_FOUND: 'The requested method version is not supported by the server',
UNEXPECTED_DISPATCH_EXCEPTION: 'An unexpected exception occurred during message dispatch'
}
@staticmethod
def describe(code):
try:
return ResponseCodes._desc[code]
except KeyError:
if ResponseCodes.USER_CODE_START >= code <= ResponseCodes.USER_CODE_END:
return 'RPC endpoint specific error response'
else:
return 'Unknown response code'
|
class ResponseCodes(object):
SUCCESS = 0
USER_CODE_START = 1
USER_CODE_END = 0x7fffffff
RESERVED_CODE_START = 0x80000000
MALFORMED_RESPONSE = RESERVED_CODE_START
+ MALFORMED_REQUEST = RESERVED_CODE_START + 1
- REQUEST_ENCODING_FAILURE = RESERVED_CODE_START + 1
? ^
+ REQUEST_ENCODING_FAILURE = RESERVED_CODE_START + 2
? ^
- REMOTE_UNHANDLED_EXCEPTION = RESERVED_CODE_START + 2
? ^
+ REMOTE_UNHANDLED_EXCEPTION = RESERVED_CODE_START + 3
? ^
- CALL_REPLY_TIMEOUT = RESERVED_CODE_START + 3
? ^
+ CALL_REPLY_TIMEOUT = RESERVED_CODE_START + 4
? ^
+ METHOD_NOT_FOUND = RESERVED_CODE_START + 5
+ VERSION_NOT_FOUND = RESERVED_CODE_START + 6
+ UNEXPECTED_DISPATCH_EXCEPTION = RESERVED_CODE_START + 7
+
RESERVED_CODE_END = 0xffffffff
_desc = {
SUCCESS: 'Success',
MALFORMED_RESPONSE: 'Response message was malformed',
+ MALFORMED_REQUEST: 'Request message was malformed',
REQUEST_ENCODING_FAILURE: 'The data in the request could not be encoded',
REMOTE_UNHANDLED_EXCEPTION: 'An unhandled exception occurred while processing the remote call',
CALL_REPLY_TIMEOUT: 'The request did not receive a reply within the call timeout',
+ METHOD_NOT_FOUND: 'The requested method is not supported by the server',
+ VERSION_NOT_FOUND: 'The requested method version is not supported by the server',
+ UNEXPECTED_DISPATCH_EXCEPTION: 'An unexpected exception occurred during message dispatch'
}
@staticmethod
def describe(code):
try:
return ResponseCodes._desc[code]
except KeyError:
if ResponseCodes.USER_CODE_START >= code <= ResponseCodes.USER_CODE_END:
return 'RPC endpoint specific error response'
else:
return 'Unknown response code'
|
42221c7b858951376ba59385fa42cac11d542fdd
|
plugin/script/sphinxexampleae.py
|
plugin/script/sphinxexampleae.py
|
def process( node_name, handle ):
handle.editorTemplate( beginScrollLayout=True )
handle.editorTemplate( beginLayout="Float Attributes" )
handle.editorTemplate( "floatAttr", addControl=True )
handle.editorTemplate( endLayout=True )
handle.editorTemplate( addExtraControls=True )
handle.editorTemplate( endScrollLayout=True )
handle.editorTemplate( suppress="caching" )
handle.editorTemplate( suppress="nodeState" )
def ae_template( node_name ):
from maya import cmds
maya_handle = MayaHandle( cmds )
process( node_name, maya_handle )
|
string_attr_help = """
This is the *annotation* for the stringAttr attribute
"""
def process( node_name, handle ):
handle.editorTemplate( beginScrollLayout=True )
handle.editorTemplate( beginLayout="Float Attributes" )
handle.editorTemplate( "floatAttr", addControl=True, annotation=float_attr_help )
handle.editorTemplate( endLayout=True )
handle.editorTemplate( beginLayout="String Attributes" )
handle.editorTemplate( "stringAttr", addControl=True, annotation=string_attr_help )
handle.editorTemplate( endLayout=True )
handle.editorTemplate( addExtraControls=True )
handle.editorTemplate( endScrollLayout=True )
handle.editorTemplate( suppress="caching" )
handle.editorTemplate( suppress="nodeState" )
def ae_template( node_name ):
from maya import cmds
maya_handle = MayaHandle( cmds )
process( node_name, maya_handle )
|
Add another attribute and some annotations
|
Add another attribute and some annotations
We write the annotations in rst for the moment.
|
Python
|
bsd-3-clause
|
michaeljones/sphinx-maya-node
|
+
+ string_attr_help = """
+ This is the *annotation* for the stringAttr attribute
+ """
def process( node_name, handle ):
handle.editorTemplate( beginScrollLayout=True )
handle.editorTemplate( beginLayout="Float Attributes" )
- handle.editorTemplate( "floatAttr", addControl=True )
+ handle.editorTemplate( "floatAttr", addControl=True, annotation=float_attr_help )
+ handle.editorTemplate( endLayout=True )
+
+ handle.editorTemplate( beginLayout="String Attributes" )
+ handle.editorTemplate( "stringAttr", addControl=True, annotation=string_attr_help )
handle.editorTemplate( endLayout=True )
handle.editorTemplate( addExtraControls=True )
handle.editorTemplate( endScrollLayout=True )
handle.editorTemplate( suppress="caching" )
handle.editorTemplate( suppress="nodeState" )
def ae_template( node_name ):
from maya import cmds
maya_handle = MayaHandle( cmds )
process( node_name, maya_handle )
|
Add another attribute and some annotations
|
## Code Before:
def process( node_name, handle ):
handle.editorTemplate( beginScrollLayout=True )
handle.editorTemplate( beginLayout="Float Attributes" )
handle.editorTemplate( "floatAttr", addControl=True )
handle.editorTemplate( endLayout=True )
handle.editorTemplate( addExtraControls=True )
handle.editorTemplate( endScrollLayout=True )
handle.editorTemplate( suppress="caching" )
handle.editorTemplate( suppress="nodeState" )
def ae_template( node_name ):
from maya import cmds
maya_handle = MayaHandle( cmds )
process( node_name, maya_handle )
## Instruction:
Add another attribute and some annotations
## Code After:
string_attr_help = """
This is the *annotation* for the stringAttr attribute
"""
def process( node_name, handle ):
handle.editorTemplate( beginScrollLayout=True )
handle.editorTemplate( beginLayout="Float Attributes" )
handle.editorTemplate( "floatAttr", addControl=True, annotation=float_attr_help )
handle.editorTemplate( endLayout=True )
handle.editorTemplate( beginLayout="String Attributes" )
handle.editorTemplate( "stringAttr", addControl=True, annotation=string_attr_help )
handle.editorTemplate( endLayout=True )
handle.editorTemplate( addExtraControls=True )
handle.editorTemplate( endScrollLayout=True )
handle.editorTemplate( suppress="caching" )
handle.editorTemplate( suppress="nodeState" )
def ae_template( node_name ):
from maya import cmds
maya_handle = MayaHandle( cmds )
process( node_name, maya_handle )
|
+
+ string_attr_help = """
+ This is the *annotation* for the stringAttr attribute
+ """
def process( node_name, handle ):
handle.editorTemplate( beginScrollLayout=True )
handle.editorTemplate( beginLayout="Float Attributes" )
- handle.editorTemplate( "floatAttr", addControl=True )
+ handle.editorTemplate( "floatAttr", addControl=True, annotation=float_attr_help )
? ++++++++++++++++++++++++++++
+ handle.editorTemplate( endLayout=True )
+
+ handle.editorTemplate( beginLayout="String Attributes" )
+ handle.editorTemplate( "stringAttr", addControl=True, annotation=string_attr_help )
handle.editorTemplate( endLayout=True )
handle.editorTemplate( addExtraControls=True )
handle.editorTemplate( endScrollLayout=True )
handle.editorTemplate( suppress="caching" )
handle.editorTemplate( suppress="nodeState" )
def ae_template( node_name ):
from maya import cmds
maya_handle = MayaHandle( cmds )
process( node_name, maya_handle )
|
43a54b9d8e753f721619aa5fcecec39eb4ca6eff
|
django_amber/utils.py
|
django_amber/utils.py
|
from multiprocessing import Process
from time import sleep
from socket import socket
import requests
from django.core.management import call_command
from django.core.management.commands.runserver import Command as RunserverCommand
default_port = RunserverCommand.default_port
def run_runserver_in_process(port=default_port):
p = Process(
target=call_command,
args=('runserver', port),
kwargs={'use_reloader': False},
)
p.start()
wait_for_server(port)
return p
def wait_for_server(port=default_port):
get_with_retries('http://localhost:{}/'.format(port))
def get_with_retries(url, num_retries=5):
for i in range(num_retries):
try:
return requests.get(url)
except requests.exceptions.ConnectionError:
pass
sleep(0.1 * 2 ** i)
requests.get(url)
def get_free_port():
s = socket()
s.bind(('', 0))
port = s.getsockname()[1]
s.close()
return str(port)
|
from multiprocessing import Process
from time import sleep
from socket import socket
import traceback
import requests
from django.core.management import call_command
from django.core.management.commands.runserver import Command as RunserverCommand
default_port = RunserverCommand.default_port
def run_runserver_in_process(port=default_port):
p = Process(
target=call_command,
args=('runserver', port),
kwargs={'use_reloader': False},
)
p.start()
wait_for_server(port)
return p
def wait_for_server(port=default_port):
get_with_retries('http://localhost:{}/'.format(port))
def get_with_retries(url, num_retries=5):
for i in range(num_retries):
try:
rsp = requests.get(url)
rsp.raise_for_status()
except requests.exceptions.RequestException as e:
print('get_with_retries', i)
traceback.print_exc()
sleep(0.2 * 2 ** i)
requests.get(url)
def get_free_port():
s = socket()
s.bind(('', 0))
port = s.getsockname()[1]
s.close()
return str(port)
|
Add logging and increase timeout
|
Add logging and increase timeout
|
Python
|
mit
|
PyconUK/2017.pyconuk.org,PyconUK/2017.pyconuk.org,PyconUK/2017.pyconuk.org
|
from multiprocessing import Process
from time import sleep
from socket import socket
+ import traceback
import requests
from django.core.management import call_command
from django.core.management.commands.runserver import Command as RunserverCommand
default_port = RunserverCommand.default_port
def run_runserver_in_process(port=default_port):
p = Process(
target=call_command,
args=('runserver', port),
kwargs={'use_reloader': False},
)
p.start()
wait_for_server(port)
return p
def wait_for_server(port=default_port):
get_with_retries('http://localhost:{}/'.format(port))
def get_with_retries(url, num_retries=5):
for i in range(num_retries):
try:
- return requests.get(url)
+ rsp = requests.get(url)
+ rsp.raise_for_status()
- except requests.exceptions.ConnectionError:
+ except requests.exceptions.RequestException as e:
- pass
+ print('get_with_retries', i)
+ traceback.print_exc()
- sleep(0.1 * 2 ** i)
+ sleep(0.2 * 2 ** i)
requests.get(url)
def get_free_port():
s = socket()
s.bind(('', 0))
port = s.getsockname()[1]
s.close()
return str(port)
|
Add logging and increase timeout
|
## Code Before:
from multiprocessing import Process
from time import sleep
from socket import socket
import requests
from django.core.management import call_command
from django.core.management.commands.runserver import Command as RunserverCommand
default_port = RunserverCommand.default_port
def run_runserver_in_process(port=default_port):
p = Process(
target=call_command,
args=('runserver', port),
kwargs={'use_reloader': False},
)
p.start()
wait_for_server(port)
return p
def wait_for_server(port=default_port):
get_with_retries('http://localhost:{}/'.format(port))
def get_with_retries(url, num_retries=5):
for i in range(num_retries):
try:
return requests.get(url)
except requests.exceptions.ConnectionError:
pass
sleep(0.1 * 2 ** i)
requests.get(url)
def get_free_port():
s = socket()
s.bind(('', 0))
port = s.getsockname()[1]
s.close()
return str(port)
## Instruction:
Add logging and increase timeout
## Code After:
from multiprocessing import Process
from time import sleep
from socket import socket
import traceback
import requests
from django.core.management import call_command
from django.core.management.commands.runserver import Command as RunserverCommand
default_port = RunserverCommand.default_port
def run_runserver_in_process(port=default_port):
p = Process(
target=call_command,
args=('runserver', port),
kwargs={'use_reloader': False},
)
p.start()
wait_for_server(port)
return p
def wait_for_server(port=default_port):
get_with_retries('http://localhost:{}/'.format(port))
def get_with_retries(url, num_retries=5):
for i in range(num_retries):
try:
rsp = requests.get(url)
rsp.raise_for_status()
except requests.exceptions.RequestException as e:
print('get_with_retries', i)
traceback.print_exc()
sleep(0.2 * 2 ** i)
requests.get(url)
def get_free_port():
s = socket()
s.bind(('', 0))
port = s.getsockname()[1]
s.close()
return str(port)
|
from multiprocessing import Process
from time import sleep
from socket import socket
+ import traceback
import requests
from django.core.management import call_command
from django.core.management.commands.runserver import Command as RunserverCommand
default_port = RunserverCommand.default_port
def run_runserver_in_process(port=default_port):
p = Process(
target=call_command,
args=('runserver', port),
kwargs={'use_reloader': False},
)
p.start()
wait_for_server(port)
return p
def wait_for_server(port=default_port):
get_with_retries('http://localhost:{}/'.format(port))
def get_with_retries(url, num_retries=5):
for i in range(num_retries):
try:
- return requests.get(url)
? ^^^^^
+ rsp = requests.get(url)
? ^^^^
+ rsp.raise_for_status()
- except requests.exceptions.ConnectionError:
? ^^^^ ^^^^^
+ except requests.exceptions.RequestException as e:
? ^ +++++++ ++ ^^^^^
- pass
+ print('get_with_retries', i)
+ traceback.print_exc()
- sleep(0.1 * 2 ** i)
? ^
+ sleep(0.2 * 2 ** i)
? ^
requests.get(url)
def get_free_port():
s = socket()
s.bind(('', 0))
port = s.getsockname()[1]
s.close()
return str(port)
|
fb59c2c7c01da9f4040c6b9c818d1fe2fc7993bb
|
get_weather_data.py
|
get_weather_data.py
|
import pandas as pd
import constants as const
import utils
def main():
engine = utils.get_db_engine()
today = utils.get_current_time()
resp = utils.get_uri_content(uri=const.DARK_SKY_URI,
content_type='json')
for key in resp.keys():
if isinstance(resp.get(key), dict) and 'data' in resp.get(key):
for n, i in enumerate(resp.get(key)['data']):
resp.get(key)['data'][n]['currentTime'] = today
resp['currently']['lat'] = resp['latitude']
resp['currently']['lng'] = resp['longitude']
daily_df = pd.DataFrame(resp['daily']['data'])
tables = ['current_weather', 'daily_weather', 'hourly_weather', 'minutely_weather']
for table in tables:
daily_df.to_sql(table, con=engine, if_exists='append', index=False)
if __name__ == '__main__':
main()
|
import pandas as pd
import constants as const
import utils
def main():
engine = utils.get_db_engine()
today = utils.get_current_time()
resp = utils.get_uri_content(uri=const.DARK_SKY_URI,
content_type='json')
for key in resp.keys():
if isinstance(resp.get(key), dict) and 'data' in resp.get(key):
for n, i in enumerate(resp.get(key)['data']):
resp.get(key)['data'][n]['currentTime'] = today
resp['currently']['lat'] = resp['latitude']
resp['currently']['lng'] = resp['longitude']
current_df = pd.DataFrame([resp['currently']])
daily_df = pd.DataFrame(resp['daily']['data'])
hourly_df = pd.DataFrame(resp['hourly']['data'])
minutely_df = pd.DataFrame(resp['minutely']['data'])
tables = ['current_weather', 'daily_weather', 'hourly_weather', 'minutely_weather']
data_to_import = [current_df, daily_df, hourly_df, minutely_df]
for data, table in zip(data_to_import, tables):
data.to_sql(table, con=engine, if_exists='append', index=False)
if __name__ == '__main__':
main()
|
Fix bug with data frames
|
Fix bug with data frames
|
Python
|
mit
|
tmthyjames/Achoo,tmthyjames/Achoo,tmthyjames/Achoo,tmthyjames/Achoo,tmthyjames/Achoo
|
import pandas as pd
import constants as const
import utils
def main():
engine = utils.get_db_engine()
today = utils.get_current_time()
resp = utils.get_uri_content(uri=const.DARK_SKY_URI,
content_type='json')
for key in resp.keys():
if isinstance(resp.get(key), dict) and 'data' in resp.get(key):
for n, i in enumerate(resp.get(key)['data']):
resp.get(key)['data'][n]['currentTime'] = today
resp['currently']['lat'] = resp['latitude']
resp['currently']['lng'] = resp['longitude']
+ current_df = pd.DataFrame([resp['currently']])
daily_df = pd.DataFrame(resp['daily']['data'])
+ hourly_df = pd.DataFrame(resp['hourly']['data'])
+ minutely_df = pd.DataFrame(resp['minutely']['data'])
tables = ['current_weather', 'daily_weather', 'hourly_weather', 'minutely_weather']
- for table in tables:
+ data_to_import = [current_df, daily_df, hourly_df, minutely_df]
+ for data, table in zip(data_to_import, tables):
- daily_df.to_sql(table, con=engine, if_exists='append', index=False)
+ data.to_sql(table, con=engine, if_exists='append', index=False)
if __name__ == '__main__':
main()
|
Fix bug with data frames
|
## Code Before:
import pandas as pd
import constants as const
import utils
def main():
engine = utils.get_db_engine()
today = utils.get_current_time()
resp = utils.get_uri_content(uri=const.DARK_SKY_URI,
content_type='json')
for key in resp.keys():
if isinstance(resp.get(key), dict) and 'data' in resp.get(key):
for n, i in enumerate(resp.get(key)['data']):
resp.get(key)['data'][n]['currentTime'] = today
resp['currently']['lat'] = resp['latitude']
resp['currently']['lng'] = resp['longitude']
daily_df = pd.DataFrame(resp['daily']['data'])
tables = ['current_weather', 'daily_weather', 'hourly_weather', 'minutely_weather']
for table in tables:
daily_df.to_sql(table, con=engine, if_exists='append', index=False)
if __name__ == '__main__':
main()
## Instruction:
Fix bug with data frames
## Code After:
import pandas as pd
import constants as const
import utils
def main():
engine = utils.get_db_engine()
today = utils.get_current_time()
resp = utils.get_uri_content(uri=const.DARK_SKY_URI,
content_type='json')
for key in resp.keys():
if isinstance(resp.get(key), dict) and 'data' in resp.get(key):
for n, i in enumerate(resp.get(key)['data']):
resp.get(key)['data'][n]['currentTime'] = today
resp['currently']['lat'] = resp['latitude']
resp['currently']['lng'] = resp['longitude']
current_df = pd.DataFrame([resp['currently']])
daily_df = pd.DataFrame(resp['daily']['data'])
hourly_df = pd.DataFrame(resp['hourly']['data'])
minutely_df = pd.DataFrame(resp['minutely']['data'])
tables = ['current_weather', 'daily_weather', 'hourly_weather', 'minutely_weather']
data_to_import = [current_df, daily_df, hourly_df, minutely_df]
for data, table in zip(data_to_import, tables):
data.to_sql(table, con=engine, if_exists='append', index=False)
if __name__ == '__main__':
main()
|
import pandas as pd
import constants as const
import utils
def main():
engine = utils.get_db_engine()
today = utils.get_current_time()
resp = utils.get_uri_content(uri=const.DARK_SKY_URI,
content_type='json')
for key in resp.keys():
if isinstance(resp.get(key), dict) and 'data' in resp.get(key):
for n, i in enumerate(resp.get(key)['data']):
resp.get(key)['data'][n]['currentTime'] = today
resp['currently']['lat'] = resp['latitude']
resp['currently']['lng'] = resp['longitude']
+ current_df = pd.DataFrame([resp['currently']])
daily_df = pd.DataFrame(resp['daily']['data'])
+ hourly_df = pd.DataFrame(resp['hourly']['data'])
+ minutely_df = pd.DataFrame(resp['minutely']['data'])
tables = ['current_weather', 'daily_weather', 'hourly_weather', 'minutely_weather']
- for table in tables:
+ data_to_import = [current_df, daily_df, hourly_df, minutely_df]
+ for data, table in zip(data_to_import, tables):
- daily_df.to_sql(table, con=engine, if_exists='append', index=False)
? ^^^^^^
+ data.to_sql(table, con=engine, if_exists='append', index=False)
? ^^
if __name__ == '__main__':
main()
|
65d6563d22c8500402b23c09d3b8991b79c94dee
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(
name='meterplugin',
version='0.2.3',
url='https://github.com/boundary/meter-plugin-sdk-python',
author='David Gwartney',
author_email='[email protected]',
packages=['meterplugin', ],
entry_points={
'console_scripts': [
'plugin-runner = meterplugin.plugin_runner:main',
'post-extract = meterplugin.post_extract:main',
],
},
package_data={'meterplugin': ['templates/*']},
license='LICENSE',
description='TrueSight Pulse Meter Plugin SDK for Python',
long_description=open('README.txt').read(),
install_requires=['tinyrpc', 'tspapi',],
setup_requires=['tinyrpc', 'tspapi', ],
)
|
from distutils.core import setup
setup(
name='meterplugin',
version='0.2.3',
url='https://github.com/boundary/meter-plugin-sdk-python',
author='David Gwartney',
author_email='[email protected]',
packages=['meterplugin', ],
entry_points={
'console_scripts': [
'plugin-runner = meterplugin.plugin_runner:main',
'post-extract = meterplugin.post_extract:main',
],
},
package_data={'meterplugin': ['templates/*']},
license='LICENSE',
description='TrueSight Pulse Meter Plugin SDK for Python',
long_description=open('README.txt').read(),
install_requires=[
'tinyrpc >= 0.5',
'tspapi >= 0.3.6',],
)
|
Add versions to install requirements
|
Add versions to install requirements
|
Python
|
apache-2.0
|
jdgwartney/meter-plugin-sdk-python,jdgwartney/meter-plugin-sdk-python
|
from distutils.core import setup
setup(
name='meterplugin',
version='0.2.3',
url='https://github.com/boundary/meter-plugin-sdk-python',
author='David Gwartney',
author_email='[email protected]',
packages=['meterplugin', ],
entry_points={
'console_scripts': [
'plugin-runner = meterplugin.plugin_runner:main',
'post-extract = meterplugin.post_extract:main',
],
},
package_data={'meterplugin': ['templates/*']},
license='LICENSE',
description='TrueSight Pulse Meter Plugin SDK for Python',
long_description=open('README.txt').read(),
- install_requires=['tinyrpc', 'tspapi',],
- setup_requires=['tinyrpc', 'tspapi', ],
+ install_requires=[
+ 'tinyrpc >= 0.5',
+ 'tspapi >= 0.3.6',],
)
|
Add versions to install requirements
|
## Code Before:
from distutils.core import setup
setup(
name='meterplugin',
version='0.2.3',
url='https://github.com/boundary/meter-plugin-sdk-python',
author='David Gwartney',
author_email='[email protected]',
packages=['meterplugin', ],
entry_points={
'console_scripts': [
'plugin-runner = meterplugin.plugin_runner:main',
'post-extract = meterplugin.post_extract:main',
],
},
package_data={'meterplugin': ['templates/*']},
license='LICENSE',
description='TrueSight Pulse Meter Plugin SDK for Python',
long_description=open('README.txt').read(),
install_requires=['tinyrpc', 'tspapi',],
setup_requires=['tinyrpc', 'tspapi', ],
)
## Instruction:
Add versions to install requirements
## Code After:
from distutils.core import setup
setup(
name='meterplugin',
version='0.2.3',
url='https://github.com/boundary/meter-plugin-sdk-python',
author='David Gwartney',
author_email='[email protected]',
packages=['meterplugin', ],
entry_points={
'console_scripts': [
'plugin-runner = meterplugin.plugin_runner:main',
'post-extract = meterplugin.post_extract:main',
],
},
package_data={'meterplugin': ['templates/*']},
license='LICENSE',
description='TrueSight Pulse Meter Plugin SDK for Python',
long_description=open('README.txt').read(),
install_requires=[
'tinyrpc >= 0.5',
'tspapi >= 0.3.6',],
)
|
from distutils.core import setup
setup(
name='meterplugin',
version='0.2.3',
url='https://github.com/boundary/meter-plugin-sdk-python',
author='David Gwartney',
author_email='[email protected]',
packages=['meterplugin', ],
entry_points={
'console_scripts': [
'plugin-runner = meterplugin.plugin_runner:main',
'post-extract = meterplugin.post_extract:main',
],
},
package_data={'meterplugin': ['templates/*']},
license='LICENSE',
description='TrueSight Pulse Meter Plugin SDK for Python',
long_description=open('README.txt').read(),
- install_requires=['tinyrpc', 'tspapi',],
- setup_requires=['tinyrpc', 'tspapi', ],
+ install_requires=[
+ 'tinyrpc >= 0.5',
+ 'tspapi >= 0.3.6',],
)
|
574b4d95a48f4df676ed5f23f0c83a9df2bc241d
|
pydux/log_middleware.py
|
pydux/log_middleware.py
|
def log_middleware(store):
"""log all actions to console as they are dispatched"""
def wrapper(next_):
def log_dispatch(action):
print('Dispatch Action:', action)
return next_(action)
return log_dispatch
return wrapper
|
from __future__ import print_function
"""
logging middleware example
"""
def log_middleware(store):
"""log all actions to console as they are dispatched"""
def wrapper(next_):
def log_dispatch(action):
print('Dispatch Action:', action)
return next_(action)
return log_dispatch
return wrapper
|
Use from __future__ import for print function
|
Use from __future__ import for print function
|
Python
|
mit
|
usrlocalben/pydux
|
-
+ from __future__ import print_function
+ """
+ logging middleware example
+ """
def log_middleware(store):
"""log all actions to console as they are dispatched"""
def wrapper(next_):
def log_dispatch(action):
print('Dispatch Action:', action)
return next_(action)
return log_dispatch
return wrapper
|
Use from __future__ import for print function
|
## Code Before:
def log_middleware(store):
"""log all actions to console as they are dispatched"""
def wrapper(next_):
def log_dispatch(action):
print('Dispatch Action:', action)
return next_(action)
return log_dispatch
return wrapper
## Instruction:
Use from __future__ import for print function
## Code After:
from __future__ import print_function
"""
logging middleware example
"""
def log_middleware(store):
"""log all actions to console as they are dispatched"""
def wrapper(next_):
def log_dispatch(action):
print('Dispatch Action:', action)
return next_(action)
return log_dispatch
return wrapper
|
-
+ from __future__ import print_function
+ """
+ logging middleware example
+ """
def log_middleware(store):
"""log all actions to console as they are dispatched"""
def wrapper(next_):
def log_dispatch(action):
print('Dispatch Action:', action)
return next_(action)
return log_dispatch
return wrapper
|
32376bf577af51ed43819aa92e89231886e6b619
|
tests/functional/test_new_resolver_errors.py
|
tests/functional/test_new_resolver_errors.py
|
from tests.lib import create_basic_wheel_for_package
def test_new_resolver_conflict_requirements_file(tmpdir, script):
create_basic_wheel_for_package(script, "base", "1.0")
create_basic_wheel_for_package(script, "base", "2.0")
create_basic_wheel_for_package(
script, "pkga", "1.0", depends=["base==1.0"],
)
create_basic_wheel_for_package(
script, "pkgb", "1.0", depends=["base==2.0"],
)
req_file = tmpdir.joinpath("requirements.txt")
req_file.write_text("pkga\npkgb")
result = script.pip(
"install",
"--no-cache-dir", "--no-index",
"--find-links", script.scratch_path,
"-r", req_file,
expect_error=True,
)
message = "package versions have conflicting dependencies"
assert message in result.stderr, str(result)
|
from tests.lib import create_basic_wheel_for_package
def test_new_resolver_conflict_requirements_file(tmpdir, script):
create_basic_wheel_for_package(script, "base", "1.0")
create_basic_wheel_for_package(script, "base", "2.0")
create_basic_wheel_for_package(
script, "pkga", "1.0", depends=["base==1.0"],
)
create_basic_wheel_for_package(
script, "pkgb", "1.0", depends=["base==2.0"],
)
req_file = tmpdir.joinpath("requirements.txt")
req_file.write_text("pkga\npkgb")
result = script.pip(
"install",
"--no-cache-dir", "--no-index",
"--find-links", script.scratch_path,
"-r", req_file,
expect_error=True,
)
message = "package versions have conflicting dependencies"
assert message in result.stderr, str(result)
def test_new_resolver_conflict_constraints_file(tmpdir, script):
create_basic_wheel_for_package(script, "pkg", "1.0")
constrats_file = tmpdir.joinpath("constraints.txt")
constrats_file.write_text("pkg!=1.0")
result = script.pip(
"install",
"--no-cache-dir", "--no-index",
"--find-links", script.scratch_path,
"-c", constrats_file,
"pkg==1.0",
expect_error=True,
)
assert "ResolutionImpossible" in result.stderr, str(result)
message = "The user requested (constraint) pkg!=1.0"
assert message in result.stdout, str(result)
|
Test for constraint in message
|
Test for constraint in message
|
Python
|
mit
|
pypa/pip,pradyunsg/pip,sbidoul/pip,sbidoul/pip,pypa/pip,pfmoore/pip,pfmoore/pip,pradyunsg/pip
|
from tests.lib import create_basic_wheel_for_package
def test_new_resolver_conflict_requirements_file(tmpdir, script):
create_basic_wheel_for_package(script, "base", "1.0")
create_basic_wheel_for_package(script, "base", "2.0")
create_basic_wheel_for_package(
script, "pkga", "1.0", depends=["base==1.0"],
)
create_basic_wheel_for_package(
script, "pkgb", "1.0", depends=["base==2.0"],
)
req_file = tmpdir.joinpath("requirements.txt")
req_file.write_text("pkga\npkgb")
result = script.pip(
"install",
"--no-cache-dir", "--no-index",
"--find-links", script.scratch_path,
"-r", req_file,
expect_error=True,
)
message = "package versions have conflicting dependencies"
assert message in result.stderr, str(result)
+
+ def test_new_resolver_conflict_constraints_file(tmpdir, script):
+ create_basic_wheel_for_package(script, "pkg", "1.0")
+
+ constrats_file = tmpdir.joinpath("constraints.txt")
+ constrats_file.write_text("pkg!=1.0")
+
+ result = script.pip(
+ "install",
+ "--no-cache-dir", "--no-index",
+ "--find-links", script.scratch_path,
+ "-c", constrats_file,
+ "pkg==1.0",
+ expect_error=True,
+ )
+
+ assert "ResolutionImpossible" in result.stderr, str(result)
+
+ message = "The user requested (constraint) pkg!=1.0"
+ assert message in result.stdout, str(result)
+
|
Test for constraint in message
|
## Code Before:
from tests.lib import create_basic_wheel_for_package
def test_new_resolver_conflict_requirements_file(tmpdir, script):
create_basic_wheel_for_package(script, "base", "1.0")
create_basic_wheel_for_package(script, "base", "2.0")
create_basic_wheel_for_package(
script, "pkga", "1.0", depends=["base==1.0"],
)
create_basic_wheel_for_package(
script, "pkgb", "1.0", depends=["base==2.0"],
)
req_file = tmpdir.joinpath("requirements.txt")
req_file.write_text("pkga\npkgb")
result = script.pip(
"install",
"--no-cache-dir", "--no-index",
"--find-links", script.scratch_path,
"-r", req_file,
expect_error=True,
)
message = "package versions have conflicting dependencies"
assert message in result.stderr, str(result)
## Instruction:
Test for constraint in message
## Code After:
from tests.lib import create_basic_wheel_for_package
def test_new_resolver_conflict_requirements_file(tmpdir, script):
create_basic_wheel_for_package(script, "base", "1.0")
create_basic_wheel_for_package(script, "base", "2.0")
create_basic_wheel_for_package(
script, "pkga", "1.0", depends=["base==1.0"],
)
create_basic_wheel_for_package(
script, "pkgb", "1.0", depends=["base==2.0"],
)
req_file = tmpdir.joinpath("requirements.txt")
req_file.write_text("pkga\npkgb")
result = script.pip(
"install",
"--no-cache-dir", "--no-index",
"--find-links", script.scratch_path,
"-r", req_file,
expect_error=True,
)
message = "package versions have conflicting dependencies"
assert message in result.stderr, str(result)
def test_new_resolver_conflict_constraints_file(tmpdir, script):
create_basic_wheel_for_package(script, "pkg", "1.0")
constrats_file = tmpdir.joinpath("constraints.txt")
constrats_file.write_text("pkg!=1.0")
result = script.pip(
"install",
"--no-cache-dir", "--no-index",
"--find-links", script.scratch_path,
"-c", constrats_file,
"pkg==1.0",
expect_error=True,
)
assert "ResolutionImpossible" in result.stderr, str(result)
message = "The user requested (constraint) pkg!=1.0"
assert message in result.stdout, str(result)
|
from tests.lib import create_basic_wheel_for_package
def test_new_resolver_conflict_requirements_file(tmpdir, script):
create_basic_wheel_for_package(script, "base", "1.0")
create_basic_wheel_for_package(script, "base", "2.0")
create_basic_wheel_for_package(
script, "pkga", "1.0", depends=["base==1.0"],
)
create_basic_wheel_for_package(
script, "pkgb", "1.0", depends=["base==2.0"],
)
req_file = tmpdir.joinpath("requirements.txt")
req_file.write_text("pkga\npkgb")
result = script.pip(
"install",
"--no-cache-dir", "--no-index",
"--find-links", script.scratch_path,
"-r", req_file,
expect_error=True,
)
message = "package versions have conflicting dependencies"
assert message in result.stderr, str(result)
+
+
+ def test_new_resolver_conflict_constraints_file(tmpdir, script):
+ create_basic_wheel_for_package(script, "pkg", "1.0")
+
+ constrats_file = tmpdir.joinpath("constraints.txt")
+ constrats_file.write_text("pkg!=1.0")
+
+ result = script.pip(
+ "install",
+ "--no-cache-dir", "--no-index",
+ "--find-links", script.scratch_path,
+ "-c", constrats_file,
+ "pkg==1.0",
+ expect_error=True,
+ )
+
+ assert "ResolutionImpossible" in result.stderr, str(result)
+
+ message = "The user requested (constraint) pkg!=1.0"
+ assert message in result.stdout, str(result)
|
d5e5ddbd1e1108f327a8d4c27cc18925cf7a3e1a
|
src/sentry/api/endpoints/project_stats.py
|
src/sentry/api/endpoints/project_stats.py
|
from __future__ import absolute_import
from rest_framework.response import Response
from sentry.app import tsdb
from sentry.api.base import BaseStatsEndpoint
from sentry.api.permissions import assert_perm
from sentry.models import Project
class ProjectStatsEndpoint(BaseStatsEndpoint):
def get(self, request, project_id):
project = Project.objects.get_from_cache(
id=project_id,
)
assert_perm(project, request.user, request.auth)
data = tsdb.get_range(
model=tsdb.models.project,
keys=[project.id],
**self._parse_args(request)
)[project.id]
return Response(data)
|
from __future__ import absolute_import
from rest_framework.response import Response
from sentry.app import tsdb
from sentry.api.base import BaseStatsEndpoint, DocSection
from sentry.api.permissions import assert_perm
from sentry.models import Project
class ProjectStatsEndpoint(BaseStatsEndpoint):
doc_section = DocSection.PROJECTS
def get(self, request, project_id):
"""
Retrieve event counts for a project
**Draft:** This endpoint may change in the future without notice.
Return a set of points representing a normalized timestamp and the
number of events seen in the period.
{method} {path}?since=1421092384.822244&until=1434052399.443363
Query ranges are limited to Sentry's configured time-series resolutions.
Parameters:
- since: a timestamp to set the start of the query
- until: a timestamp to set the end of the query
- resolution: an explicit resolution to search for
**Note:** resolution should not be used unless you're familiar with Sentry
internals as it's restricted to pre-defined values.
"""
project = Project.objects.get_from_cache(
id=project_id,
)
assert_perm(project, request.user, request.auth)
data = tsdb.get_range(
model=tsdb.models.project,
keys=[project.id],
**self._parse_args(request)
)[project.id]
return Response(data)
|
Add project stats to docs
|
Add project stats to docs
|
Python
|
bsd-3-clause
|
looker/sentry,kevinlondon/sentry,pauloschilling/sentry,1tush/sentry,daevaorn/sentry,wong2/sentry,fuziontech/sentry,gencer/sentry,imankulov/sentry,felixbuenemann/sentry,ifduyue/sentry,gg7/sentry,1tush/sentry,camilonova/sentry,hongliang5623/sentry,boneyao/sentry,camilonova/sentry,songyi199111/sentry,llonchj/sentry,mvaled/sentry,jokey2k/sentry,imankulov/sentry,jokey2k/sentry,Natim/sentry,TedaLIEz/sentry,ifduyue/sentry,BayanGroup/sentry,daevaorn/sentry,beeftornado/sentry,jokey2k/sentry,JTCunning/sentry,gg7/sentry,kevinastone/sentry,mvaled/sentry,jean/sentry,drcapulet/sentry,daevaorn/sentry,hongliang5623/sentry,alexm92/sentry,songyi199111/sentry,BuildingLink/sentry,korealerts1/sentry,mvaled/sentry,JamesMura/sentry,wujuguang/sentry,pauloschilling/sentry,songyi199111/sentry,jean/sentry,gencer/sentry,ewdurbin/sentry,llonchj/sentry,fuziontech/sentry,kevinlondon/sentry,vperron/sentry,nicholasserra/sentry,jean/sentry,JTCunning/sentry,hongliang5623/sentry,kevinastone/sentry,alexm92/sentry,Natim/sentry,drcapulet/sentry,boneyao/sentry,ngonzalvez/sentry,looker/sentry,zenefits/sentry,wong2/sentry,BayanGroup/sentry,gencer/sentry,mvaled/sentry,fuziontech/sentry,JTCunning/sentry,1tush/sentry,BuildingLink/sentry,vperron/sentry,fotinakis/sentry,beeftornado/sentry,felixbuenemann/sentry,nicholasserra/sentry,gencer/sentry,korealerts1/sentry,Kryz/sentry,felixbuenemann/sentry,daevaorn/sentry,JamesMura/sentry,zenefits/sentry,JackDanger/sentry,BuildingLink/sentry,ifduyue/sentry,zenefits/sentry,mvaled/sentry,kevinastone/sentry,wujuguang/sentry,nicholasserra/sentry,JackDanger/sentry,Natim/sentry,argonemyth/sentry,TedaLIEz/sentry,gencer/sentry,alexm92/sentry,BayanGroup/sentry,korealerts1/sentry,BuildingLink/sentry,imankulov/sentry,JamesMura/sentry,looker/sentry,mitsuhiko/sentry,drcapulet/sentry,looker/sentry,TedaLIEz/sentry,fotinakis/sentry,beeftornado/sentry,mitsuhiko/sentry,ifduyue/sentry,wong2/sentry,boneyao/sentry,jean/sentry,kevinlondon/sentry,camilonova/sentry,jean/sentry,JamesMura/sentry,JamesMura/sentry,argonemyth/sentry,Kryz/sentry,zenefits/sentry,ewdurbin/sentry,argonemyth/sentry,BuildingLink/sentry,gg7/sentry,ewdurbin/sentry,wujuguang/sentry,JackDanger/sentry,looker/sentry,Kryz/sentry,mvaled/sentry,ngonzalvez/sentry,ngonzalvez/sentry,ifduyue/sentry,zenefits/sentry,fotinakis/sentry,pauloschilling/sentry,fotinakis/sentry,llonchj/sentry,vperron/sentry
|
from __future__ import absolute_import
from rest_framework.response import Response
from sentry.app import tsdb
- from sentry.api.base import BaseStatsEndpoint
+ from sentry.api.base import BaseStatsEndpoint, DocSection
from sentry.api.permissions import assert_perm
from sentry.models import Project
class ProjectStatsEndpoint(BaseStatsEndpoint):
+ doc_section = DocSection.PROJECTS
+
def get(self, request, project_id):
+ """
+ Retrieve event counts for a project
+
+ **Draft:** This endpoint may change in the future without notice.
+
+ Return a set of points representing a normalized timestamp and the
+ number of events seen in the period.
+
+ {method} {path}?since=1421092384.822244&until=1434052399.443363
+
+ Query ranges are limited to Sentry's configured time-series resolutions.
+
+ Parameters:
+
+ - since: a timestamp to set the start of the query
+ - until: a timestamp to set the end of the query
+ - resolution: an explicit resolution to search for
+
+ **Note:** resolution should not be used unless you're familiar with Sentry
+ internals as it's restricted to pre-defined values.
+ """
project = Project.objects.get_from_cache(
id=project_id,
)
assert_perm(project, request.user, request.auth)
data = tsdb.get_range(
model=tsdb.models.project,
keys=[project.id],
**self._parse_args(request)
)[project.id]
return Response(data)
|
Add project stats to docs
|
## Code Before:
from __future__ import absolute_import
from rest_framework.response import Response
from sentry.app import tsdb
from sentry.api.base import BaseStatsEndpoint
from sentry.api.permissions import assert_perm
from sentry.models import Project
class ProjectStatsEndpoint(BaseStatsEndpoint):
def get(self, request, project_id):
project = Project.objects.get_from_cache(
id=project_id,
)
assert_perm(project, request.user, request.auth)
data = tsdb.get_range(
model=tsdb.models.project,
keys=[project.id],
**self._parse_args(request)
)[project.id]
return Response(data)
## Instruction:
Add project stats to docs
## Code After:
from __future__ import absolute_import
from rest_framework.response import Response
from sentry.app import tsdb
from sentry.api.base import BaseStatsEndpoint, DocSection
from sentry.api.permissions import assert_perm
from sentry.models import Project
class ProjectStatsEndpoint(BaseStatsEndpoint):
doc_section = DocSection.PROJECTS
def get(self, request, project_id):
"""
Retrieve event counts for a project
**Draft:** This endpoint may change in the future without notice.
Return a set of points representing a normalized timestamp and the
number of events seen in the period.
{method} {path}?since=1421092384.822244&until=1434052399.443363
Query ranges are limited to Sentry's configured time-series resolutions.
Parameters:
- since: a timestamp to set the start of the query
- until: a timestamp to set the end of the query
- resolution: an explicit resolution to search for
**Note:** resolution should not be used unless you're familiar with Sentry
internals as it's restricted to pre-defined values.
"""
project = Project.objects.get_from_cache(
id=project_id,
)
assert_perm(project, request.user, request.auth)
data = tsdb.get_range(
model=tsdb.models.project,
keys=[project.id],
**self._parse_args(request)
)[project.id]
return Response(data)
|
from __future__ import absolute_import
from rest_framework.response import Response
from sentry.app import tsdb
- from sentry.api.base import BaseStatsEndpoint
+ from sentry.api.base import BaseStatsEndpoint, DocSection
? ++++++++++++
from sentry.api.permissions import assert_perm
from sentry.models import Project
class ProjectStatsEndpoint(BaseStatsEndpoint):
+ doc_section = DocSection.PROJECTS
+
def get(self, request, project_id):
+ """
+ Retrieve event counts for a project
+
+ **Draft:** This endpoint may change in the future without notice.
+
+ Return a set of points representing a normalized timestamp and the
+ number of events seen in the period.
+
+ {method} {path}?since=1421092384.822244&until=1434052399.443363
+
+ Query ranges are limited to Sentry's configured time-series resolutions.
+
+ Parameters:
+
+ - since: a timestamp to set the start of the query
+ - until: a timestamp to set the end of the query
+ - resolution: an explicit resolution to search for
+
+ **Note:** resolution should not be used unless you're familiar with Sentry
+ internals as it's restricted to pre-defined values.
+ """
project = Project.objects.get_from_cache(
id=project_id,
)
assert_perm(project, request.user, request.auth)
data = tsdb.get_range(
model=tsdb.models.project,
keys=[project.id],
**self._parse_args(request)
)[project.id]
return Response(data)
|
7435d508ae95c69dcb596e74f62bfb030011201f
|
tests/general/test_required_folders.py
|
tests/general/test_required_folders.py
|
""" Test that the All Mail folder is enabled for Gmail. """
import pytest
from inbox.auth.gmail import GmailAuthHandler
from inbox.basicauth import GmailSettingError
from inbox.crispin import GmailCrispinClient
class AccountStub(object):
id = 0
email_address = '[email protected]'
access_token = None
imap_endpoint = None
def new_token(self):
return ('foo', 22)
def validate_token(self, new_token):
return True
class ConnectionStub(object):
def logout(self):
pass
def get_auth_handler(monkeypatch, folders):
g = GmailAuthHandler('gmail')
def mock_connect(a):
return ConnectionStub()
g.connect_account = mock_connect
monkeypatch.setattr(GmailCrispinClient, 'folder_names',
lambda x: folders)
return g
def test_all_mail_missing(monkeypatch):
"""
Test that validate_folders throws a GmailSettingError if All Mail
is not in the list of folders.
"""
g = get_auth_handler(monkeypatch, {'inbox': 'INBOX'})
with pytest.raises(GmailSettingError):
g.verify_account(AccountStub())
def test_all_mail_present(monkeypatch):
"""
Test that the validate_folders passes if All Mail is present.
"""
g = get_auth_handler(monkeypatch, {'all': 'ALL', 'inbox': 'INBOX',
'trash': 'TRASH'})
assert g.verify_account(AccountStub())
|
""" Test that the All Mail folder is enabled for Gmail. """
import pytest
from inbox.auth.gmail import GmailAuthHandler
from inbox.basicauth import GmailSettingError
from inbox.crispin import GmailCrispinClient
class AccountStub(object):
id = 0
email_address = '[email protected]'
access_token = None
imap_endpoint = None
sync_state = 'running'
def new_token(self):
return ('foo', 22)
def validate_token(self, new_token):
return True
class ConnectionStub(object):
def logout(self):
pass
def get_auth_handler(monkeypatch, folders):
g = GmailAuthHandler('gmail')
def mock_connect(a):
return ConnectionStub()
g.connect_account = mock_connect
monkeypatch.setattr(GmailCrispinClient, 'folder_names',
lambda x: folders)
return g
def test_all_mail_missing(monkeypatch):
"""
Test that validate_folders throws a GmailSettingError if All Mail
is not in the list of folders.
"""
g = get_auth_handler(monkeypatch, {'inbox': 'INBOX'})
with pytest.raises(GmailSettingError):
g.verify_account(AccountStub())
def test_all_mail_present(monkeypatch):
"""
Test that the validate_folders passes if All Mail is present.
"""
g = get_auth_handler(monkeypatch, {'all': 'ALL', 'inbox': 'INBOX',
'trash': 'TRASH'})
assert g.verify_account(AccountStub())
|
Update mock Account in tests.
|
Update mock Account in tests.
|
Python
|
agpl-3.0
|
jobscore/sync-engine,jobscore/sync-engine,nylas/sync-engine,closeio/nylas,jobscore/sync-engine,nylas/sync-engine,closeio/nylas,jobscore/sync-engine,closeio/nylas,nylas/sync-engine,nylas/sync-engine,closeio/nylas
|
""" Test that the All Mail folder is enabled for Gmail. """
import pytest
from inbox.auth.gmail import GmailAuthHandler
from inbox.basicauth import GmailSettingError
from inbox.crispin import GmailCrispinClient
class AccountStub(object):
id = 0
email_address = '[email protected]'
access_token = None
imap_endpoint = None
+ sync_state = 'running'
def new_token(self):
return ('foo', 22)
def validate_token(self, new_token):
return True
class ConnectionStub(object):
def logout(self):
pass
def get_auth_handler(monkeypatch, folders):
g = GmailAuthHandler('gmail')
def mock_connect(a):
return ConnectionStub()
g.connect_account = mock_connect
monkeypatch.setattr(GmailCrispinClient, 'folder_names',
lambda x: folders)
return g
def test_all_mail_missing(monkeypatch):
"""
Test that validate_folders throws a GmailSettingError if All Mail
is not in the list of folders.
"""
g = get_auth_handler(monkeypatch, {'inbox': 'INBOX'})
with pytest.raises(GmailSettingError):
g.verify_account(AccountStub())
def test_all_mail_present(monkeypatch):
"""
Test that the validate_folders passes if All Mail is present.
"""
g = get_auth_handler(monkeypatch, {'all': 'ALL', 'inbox': 'INBOX',
'trash': 'TRASH'})
assert g.verify_account(AccountStub())
|
Update mock Account in tests.
|
## Code Before:
""" Test that the All Mail folder is enabled for Gmail. """
import pytest
from inbox.auth.gmail import GmailAuthHandler
from inbox.basicauth import GmailSettingError
from inbox.crispin import GmailCrispinClient
class AccountStub(object):
id = 0
email_address = '[email protected]'
access_token = None
imap_endpoint = None
def new_token(self):
return ('foo', 22)
def validate_token(self, new_token):
return True
class ConnectionStub(object):
def logout(self):
pass
def get_auth_handler(monkeypatch, folders):
g = GmailAuthHandler('gmail')
def mock_connect(a):
return ConnectionStub()
g.connect_account = mock_connect
monkeypatch.setattr(GmailCrispinClient, 'folder_names',
lambda x: folders)
return g
def test_all_mail_missing(monkeypatch):
"""
Test that validate_folders throws a GmailSettingError if All Mail
is not in the list of folders.
"""
g = get_auth_handler(monkeypatch, {'inbox': 'INBOX'})
with pytest.raises(GmailSettingError):
g.verify_account(AccountStub())
def test_all_mail_present(monkeypatch):
"""
Test that the validate_folders passes if All Mail is present.
"""
g = get_auth_handler(monkeypatch, {'all': 'ALL', 'inbox': 'INBOX',
'trash': 'TRASH'})
assert g.verify_account(AccountStub())
## Instruction:
Update mock Account in tests.
## Code After:
""" Test that the All Mail folder is enabled for Gmail. """
import pytest
from inbox.auth.gmail import GmailAuthHandler
from inbox.basicauth import GmailSettingError
from inbox.crispin import GmailCrispinClient
class AccountStub(object):
id = 0
email_address = '[email protected]'
access_token = None
imap_endpoint = None
sync_state = 'running'
def new_token(self):
return ('foo', 22)
def validate_token(self, new_token):
return True
class ConnectionStub(object):
def logout(self):
pass
def get_auth_handler(monkeypatch, folders):
g = GmailAuthHandler('gmail')
def mock_connect(a):
return ConnectionStub()
g.connect_account = mock_connect
monkeypatch.setattr(GmailCrispinClient, 'folder_names',
lambda x: folders)
return g
def test_all_mail_missing(monkeypatch):
"""
Test that validate_folders throws a GmailSettingError if All Mail
is not in the list of folders.
"""
g = get_auth_handler(monkeypatch, {'inbox': 'INBOX'})
with pytest.raises(GmailSettingError):
g.verify_account(AccountStub())
def test_all_mail_present(monkeypatch):
"""
Test that the validate_folders passes if All Mail is present.
"""
g = get_auth_handler(monkeypatch, {'all': 'ALL', 'inbox': 'INBOX',
'trash': 'TRASH'})
assert g.verify_account(AccountStub())
|
""" Test that the All Mail folder is enabled for Gmail. """
import pytest
from inbox.auth.gmail import GmailAuthHandler
from inbox.basicauth import GmailSettingError
from inbox.crispin import GmailCrispinClient
class AccountStub(object):
id = 0
email_address = '[email protected]'
access_token = None
imap_endpoint = None
+ sync_state = 'running'
def new_token(self):
return ('foo', 22)
def validate_token(self, new_token):
return True
class ConnectionStub(object):
def logout(self):
pass
def get_auth_handler(monkeypatch, folders):
g = GmailAuthHandler('gmail')
def mock_connect(a):
return ConnectionStub()
g.connect_account = mock_connect
monkeypatch.setattr(GmailCrispinClient, 'folder_names',
lambda x: folders)
return g
def test_all_mail_missing(monkeypatch):
"""
Test that validate_folders throws a GmailSettingError if All Mail
is not in the list of folders.
"""
g = get_auth_handler(monkeypatch, {'inbox': 'INBOX'})
with pytest.raises(GmailSettingError):
g.verify_account(AccountStub())
def test_all_mail_present(monkeypatch):
"""
Test that the validate_folders passes if All Mail is present.
"""
g = get_auth_handler(monkeypatch, {'all': 'ALL', 'inbox': 'INBOX',
'trash': 'TRASH'})
assert g.verify_account(AccountStub())
|
a11c839988b71e9f769cb5ba856474205b7aeefb
|
jsonschema/tests/fuzz_validate.py
|
jsonschema/tests/fuzz_validate.py
|
import sys
from hypothesis import given, strategies
import jsonschema
PRIM = strategies.one_of(
strategies.booleans(),
strategies.integers(),
strategies.floats(allow_nan=False, allow_infinity=False),
strategies.text(),
)
DICT = strategies.recursive(
base=strategies.one_of(
strategies.booleans(),
strategies.dictionaries(strategies.text(), PRIM),
),
extend=lambda inner: strategies.dictionaries(strategies.text(), inner),
)
@given(obj1=DICT, obj2=DICT)
def test_schemas(obj1, obj2):
try:
jsonschema.validate(instance=obj1, schema=obj2)
except jsonschema.exceptions.ValidationError:
pass
except jsonschema.exceptions.SchemaError:
pass
def main():
atheris.Setup(
sys.argv,
test_schemas.hypothesis.fuzz_one_input,
enable_python_coverage=True,
)
atheris.Fuzz()
if __name__ == "__main__":
import atheris
main()
|
import sys
from hypothesis import given, strategies
import jsonschema
PRIM = strategies.one_of(
strategies.booleans(),
strategies.integers(),
strategies.floats(allow_nan=False, allow_infinity=False),
strategies.text(),
)
DICT = strategies.recursive(
base=strategies.one_of(
strategies.booleans(),
strategies.dictionaries(strategies.text(), PRIM),
),
extend=lambda inner: strategies.dictionaries(strategies.text(), inner),
)
@given(obj1=DICT, obj2=DICT)
def test_schemas(obj1, obj2):
try:
jsonschema.validate(instance=obj1, schema=obj2)
except jsonschema.exceptions.ValidationError:
pass
except jsonschema.exceptions.SchemaError:
pass
def main():
atheris.instrument_all()
atheris.Setup(
sys.argv,
test_schemas.hypothesis.fuzz_one_input,
enable_python_coverage=True,
)
atheris.Fuzz()
if __name__ == "__main__":
import atheris
main()
|
Fix fuzzer to include instrumentation
|
Fix fuzzer to include instrumentation
|
Python
|
mit
|
python-jsonschema/jsonschema
|
import sys
from hypothesis import given, strategies
import jsonschema
PRIM = strategies.one_of(
strategies.booleans(),
strategies.integers(),
strategies.floats(allow_nan=False, allow_infinity=False),
strategies.text(),
)
DICT = strategies.recursive(
base=strategies.one_of(
strategies.booleans(),
strategies.dictionaries(strategies.text(), PRIM),
),
extend=lambda inner: strategies.dictionaries(strategies.text(), inner),
)
@given(obj1=DICT, obj2=DICT)
def test_schemas(obj1, obj2):
try:
jsonschema.validate(instance=obj1, schema=obj2)
except jsonschema.exceptions.ValidationError:
pass
except jsonschema.exceptions.SchemaError:
pass
def main():
+ atheris.instrument_all()
atheris.Setup(
sys.argv,
test_schemas.hypothesis.fuzz_one_input,
enable_python_coverage=True,
)
atheris.Fuzz()
if __name__ == "__main__":
import atheris
main()
|
Fix fuzzer to include instrumentation
|
## Code Before:
import sys
from hypothesis import given, strategies
import jsonschema
PRIM = strategies.one_of(
strategies.booleans(),
strategies.integers(),
strategies.floats(allow_nan=False, allow_infinity=False),
strategies.text(),
)
DICT = strategies.recursive(
base=strategies.one_of(
strategies.booleans(),
strategies.dictionaries(strategies.text(), PRIM),
),
extend=lambda inner: strategies.dictionaries(strategies.text(), inner),
)
@given(obj1=DICT, obj2=DICT)
def test_schemas(obj1, obj2):
try:
jsonschema.validate(instance=obj1, schema=obj2)
except jsonschema.exceptions.ValidationError:
pass
except jsonschema.exceptions.SchemaError:
pass
def main():
atheris.Setup(
sys.argv,
test_schemas.hypothesis.fuzz_one_input,
enable_python_coverage=True,
)
atheris.Fuzz()
if __name__ == "__main__":
import atheris
main()
## Instruction:
Fix fuzzer to include instrumentation
## Code After:
import sys
from hypothesis import given, strategies
import jsonschema
PRIM = strategies.one_of(
strategies.booleans(),
strategies.integers(),
strategies.floats(allow_nan=False, allow_infinity=False),
strategies.text(),
)
DICT = strategies.recursive(
base=strategies.one_of(
strategies.booleans(),
strategies.dictionaries(strategies.text(), PRIM),
),
extend=lambda inner: strategies.dictionaries(strategies.text(), inner),
)
@given(obj1=DICT, obj2=DICT)
def test_schemas(obj1, obj2):
try:
jsonschema.validate(instance=obj1, schema=obj2)
except jsonschema.exceptions.ValidationError:
pass
except jsonschema.exceptions.SchemaError:
pass
def main():
atheris.instrument_all()
atheris.Setup(
sys.argv,
test_schemas.hypothesis.fuzz_one_input,
enable_python_coverage=True,
)
atheris.Fuzz()
if __name__ == "__main__":
import atheris
main()
|
import sys
from hypothesis import given, strategies
import jsonschema
PRIM = strategies.one_of(
strategies.booleans(),
strategies.integers(),
strategies.floats(allow_nan=False, allow_infinity=False),
strategies.text(),
)
DICT = strategies.recursive(
base=strategies.one_of(
strategies.booleans(),
strategies.dictionaries(strategies.text(), PRIM),
),
extend=lambda inner: strategies.dictionaries(strategies.text(), inner),
)
@given(obj1=DICT, obj2=DICT)
def test_schemas(obj1, obj2):
try:
jsonschema.validate(instance=obj1, schema=obj2)
except jsonschema.exceptions.ValidationError:
pass
except jsonschema.exceptions.SchemaError:
pass
def main():
+ atheris.instrument_all()
atheris.Setup(
sys.argv,
test_schemas.hypothesis.fuzz_one_input,
enable_python_coverage=True,
)
atheris.Fuzz()
if __name__ == "__main__":
import atheris
main()
|
6b04211b42e76f6428fbaac361059fad4bef70de
|
txircd/modules/conn_join.py
|
txircd/modules/conn_join.py
|
from txircd.channel import IRCChannel
from txircd.modbase import Module
class Autojoin(Module):
def joinOnConnect(self, user):
if "client_join_on_connect" in self.ircd.servconfig:
for channel in self.ircd.servconfig["client_join_on_connect"]:
user.join(self.ircd.channels[channel] if channel in self.ircd.channels else IRCChannel(self.ircd, channel))
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
self.conn_join = None
def spawn(self):
self.conn_join = Autojoin().hook(self.ircd)
return {
"actions": {
"register": self.conn_join.joinOnConnect
}
}
def cleanup(self):
self.ircd.actions["register"].remove(self.conn_join.joinOnConnect)
|
from txircd.channel import IRCChannel
from txircd.modbase import Module
class Autojoin(Module):
def joinOnConnect(self, user):
if "client_join_on_connect" in self.ircd.servconfig:
for channel in self.ircd.servconfig["client_join_on_connect"]:
user.join(self.ircd.channels[channel] if channel in self.ircd.channels else IRCChannel(self.ircd, channel))
return True
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
self.conn_join = None
def spawn(self):
self.conn_join = Autojoin().hook(self.ircd)
return {
"actions": {
"register": [self.conn_join.joinOnConnect]
}
}
def cleanup(self):
self.ircd.actions["register"].remove(self.conn_join.joinOnConnect)
|
Fix once again nobody being allowed to connect
|
Fix once again nobody being allowed to connect
|
Python
|
bsd-3-clause
|
Heufneutje/txircd,DesertBus/txircd,ElementalAlchemist/txircd
|
from txircd.channel import IRCChannel
from txircd.modbase import Module
class Autojoin(Module):
def joinOnConnect(self, user):
if "client_join_on_connect" in self.ircd.servconfig:
for channel in self.ircd.servconfig["client_join_on_connect"]:
user.join(self.ircd.channels[channel] if channel in self.ircd.channels else IRCChannel(self.ircd, channel))
+ return True
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
self.conn_join = None
def spawn(self):
self.conn_join = Autojoin().hook(self.ircd)
return {
"actions": {
- "register": self.conn_join.joinOnConnect
+ "register": [self.conn_join.joinOnConnect]
}
}
def cleanup(self):
self.ircd.actions["register"].remove(self.conn_join.joinOnConnect)
|
Fix once again nobody being allowed to connect
|
## Code Before:
from txircd.channel import IRCChannel
from txircd.modbase import Module
class Autojoin(Module):
def joinOnConnect(self, user):
if "client_join_on_connect" in self.ircd.servconfig:
for channel in self.ircd.servconfig["client_join_on_connect"]:
user.join(self.ircd.channels[channel] if channel in self.ircd.channels else IRCChannel(self.ircd, channel))
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
self.conn_join = None
def spawn(self):
self.conn_join = Autojoin().hook(self.ircd)
return {
"actions": {
"register": self.conn_join.joinOnConnect
}
}
def cleanup(self):
self.ircd.actions["register"].remove(self.conn_join.joinOnConnect)
## Instruction:
Fix once again nobody being allowed to connect
## Code After:
from txircd.channel import IRCChannel
from txircd.modbase import Module
class Autojoin(Module):
def joinOnConnect(self, user):
if "client_join_on_connect" in self.ircd.servconfig:
for channel in self.ircd.servconfig["client_join_on_connect"]:
user.join(self.ircd.channels[channel] if channel in self.ircd.channels else IRCChannel(self.ircd, channel))
return True
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
self.conn_join = None
def spawn(self):
self.conn_join = Autojoin().hook(self.ircd)
return {
"actions": {
"register": [self.conn_join.joinOnConnect]
}
}
def cleanup(self):
self.ircd.actions["register"].remove(self.conn_join.joinOnConnect)
|
from txircd.channel import IRCChannel
from txircd.modbase import Module
class Autojoin(Module):
def joinOnConnect(self, user):
if "client_join_on_connect" in self.ircd.servconfig:
for channel in self.ircd.servconfig["client_join_on_connect"]:
user.join(self.ircd.channels[channel] if channel in self.ircd.channels else IRCChannel(self.ircd, channel))
+ return True
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
self.conn_join = None
def spawn(self):
self.conn_join = Autojoin().hook(self.ircd)
return {
"actions": {
- "register": self.conn_join.joinOnConnect
+ "register": [self.conn_join.joinOnConnect]
? + +
}
}
def cleanup(self):
self.ircd.actions["register"].remove(self.conn_join.joinOnConnect)
|
31ee84042a12fc65be539de896daf755b342d9a0
|
junction/proposals/permissions.py
|
junction/proposals/permissions.py
|
from django.core.exceptions import PermissionDenied
from junction.conferences.models import ConferenceProposalReviewer
from .models import ProposalSectionReviewer
def is_proposal_author(user, proposal):
return user.is_authenticated() and proposal.author == user
def is_proposal_reviewer(user, conference):
is_reviewer = ConferenceProposalReviewer.objects.filter(
reviewer=user, conference=conference, active=True).exists()
return user.is_authenticated() and is_reviewer
def is_proposal_section_reviewer(user, conference, proposal):
return user.is_authenticated() and ProposalSectionReviewer.objects.filter(
conference_reviewer__reviewer=user,
conference_reviewer__conference=conference,
proposal_section=proposal.proposal_section,
active=True).exists()
def is_proposal_author_or_proposal_reviewer(user, conference, proposal):
return is_proposal_author(user, proposal) or \
is_proposal_reviewer(user, conference)
def is_proposal_author_or_proposal_section_reviewer(user,
conference, proposal):
return is_proposal_author(user, proposal) or \
is_proposal_section_reviewer(user, conference, proposal)
def is_proposal_author_or_permisson_denied(user, proposal):
if is_proposal_author(user, proposal):
return True
raise PermissionDenied
|
from django.core.exceptions import PermissionDenied
from junction.conferences.models import ConferenceProposalReviewer
from .models import ProposalSectionReviewer
def is_proposal_author(user, proposal):
return user.is_authenticated() and proposal.author == user
def is_proposal_reviewer(user, conference):
authenticated = user.is_authenticated()
is_reviewer = ConferenceProposalReviewer.objects.filter(
reviewer=user, conference=conference, active=True).exists()
return authenticated and is_reviewer
def is_proposal_section_reviewer(user, conference, proposal):
return user.is_authenticated() and ProposalSectionReviewer.objects.filter(
conference_reviewer__reviewer=user,
conference_reviewer__conference=conference,
proposal_section=proposal.proposal_section,
active=True).exists()
def is_proposal_author_or_proposal_reviewer(user, conference, proposal):
return is_proposal_author(user, proposal) or \
is_proposal_reviewer(user, conference)
def is_proposal_author_or_proposal_section_reviewer(user,
conference, proposal):
return is_proposal_author(user, proposal) or \
is_proposal_section_reviewer(user, conference, proposal)
def is_proposal_author_or_permisson_denied(user, proposal):
if is_proposal_author(user, proposal):
return True
raise PermissionDenied
|
Move check for authentication to top
|
Move check for authentication to top
|
Python
|
mit
|
ChillarAnand/junction,pythonindia/junction,ChillarAnand/junction,ChillarAnand/junction,pythonindia/junction,ChillarAnand/junction,pythonindia/junction,pythonindia/junction
|
from django.core.exceptions import PermissionDenied
from junction.conferences.models import ConferenceProposalReviewer
from .models import ProposalSectionReviewer
def is_proposal_author(user, proposal):
return user.is_authenticated() and proposal.author == user
def is_proposal_reviewer(user, conference):
+ authenticated = user.is_authenticated()
is_reviewer = ConferenceProposalReviewer.objects.filter(
reviewer=user, conference=conference, active=True).exists()
- return user.is_authenticated() and is_reviewer
+ return authenticated and is_reviewer
def is_proposal_section_reviewer(user, conference, proposal):
return user.is_authenticated() and ProposalSectionReviewer.objects.filter(
conference_reviewer__reviewer=user,
conference_reviewer__conference=conference,
proposal_section=proposal.proposal_section,
active=True).exists()
def is_proposal_author_or_proposal_reviewer(user, conference, proposal):
return is_proposal_author(user, proposal) or \
is_proposal_reviewer(user, conference)
def is_proposal_author_or_proposal_section_reviewer(user,
conference, proposal):
return is_proposal_author(user, proposal) or \
is_proposal_section_reviewer(user, conference, proposal)
def is_proposal_author_or_permisson_denied(user, proposal):
if is_proposal_author(user, proposal):
return True
raise PermissionDenied
|
Move check for authentication to top
|
## Code Before:
from django.core.exceptions import PermissionDenied
from junction.conferences.models import ConferenceProposalReviewer
from .models import ProposalSectionReviewer
def is_proposal_author(user, proposal):
return user.is_authenticated() and proposal.author == user
def is_proposal_reviewer(user, conference):
is_reviewer = ConferenceProposalReviewer.objects.filter(
reviewer=user, conference=conference, active=True).exists()
return user.is_authenticated() and is_reviewer
def is_proposal_section_reviewer(user, conference, proposal):
return user.is_authenticated() and ProposalSectionReviewer.objects.filter(
conference_reviewer__reviewer=user,
conference_reviewer__conference=conference,
proposal_section=proposal.proposal_section,
active=True).exists()
def is_proposal_author_or_proposal_reviewer(user, conference, proposal):
return is_proposal_author(user, proposal) or \
is_proposal_reviewer(user, conference)
def is_proposal_author_or_proposal_section_reviewer(user,
conference, proposal):
return is_proposal_author(user, proposal) or \
is_proposal_section_reviewer(user, conference, proposal)
def is_proposal_author_or_permisson_denied(user, proposal):
if is_proposal_author(user, proposal):
return True
raise PermissionDenied
## Instruction:
Move check for authentication to top
## Code After:
from django.core.exceptions import PermissionDenied
from junction.conferences.models import ConferenceProposalReviewer
from .models import ProposalSectionReviewer
def is_proposal_author(user, proposal):
return user.is_authenticated() and proposal.author == user
def is_proposal_reviewer(user, conference):
authenticated = user.is_authenticated()
is_reviewer = ConferenceProposalReviewer.objects.filter(
reviewer=user, conference=conference, active=True).exists()
return authenticated and is_reviewer
def is_proposal_section_reviewer(user, conference, proposal):
return user.is_authenticated() and ProposalSectionReviewer.objects.filter(
conference_reviewer__reviewer=user,
conference_reviewer__conference=conference,
proposal_section=proposal.proposal_section,
active=True).exists()
def is_proposal_author_or_proposal_reviewer(user, conference, proposal):
return is_proposal_author(user, proposal) or \
is_proposal_reviewer(user, conference)
def is_proposal_author_or_proposal_section_reviewer(user,
conference, proposal):
return is_proposal_author(user, proposal) or \
is_proposal_section_reviewer(user, conference, proposal)
def is_proposal_author_or_permisson_denied(user, proposal):
if is_proposal_author(user, proposal):
return True
raise PermissionDenied
|
from django.core.exceptions import PermissionDenied
from junction.conferences.models import ConferenceProposalReviewer
from .models import ProposalSectionReviewer
def is_proposal_author(user, proposal):
return user.is_authenticated() and proposal.author == user
def is_proposal_reviewer(user, conference):
+ authenticated = user.is_authenticated()
is_reviewer = ConferenceProposalReviewer.objects.filter(
reviewer=user, conference=conference, active=True).exists()
- return user.is_authenticated() and is_reviewer
? -------- --
+ return authenticated and is_reviewer
def is_proposal_section_reviewer(user, conference, proposal):
return user.is_authenticated() and ProposalSectionReviewer.objects.filter(
conference_reviewer__reviewer=user,
conference_reviewer__conference=conference,
proposal_section=proposal.proposal_section,
active=True).exists()
def is_proposal_author_or_proposal_reviewer(user, conference, proposal):
return is_proposal_author(user, proposal) or \
is_proposal_reviewer(user, conference)
def is_proposal_author_or_proposal_section_reviewer(user,
conference, proposal):
return is_proposal_author(user, proposal) or \
is_proposal_section_reviewer(user, conference, proposal)
def is_proposal_author_or_permisson_denied(user, proposal):
if is_proposal_author(user, proposal):
return True
raise PermissionDenied
|
21afbaab7deb874703f4968ea1337b59120f0ad0
|
music-stream.py
|
music-stream.py
|
import urllib.request
import subprocess
LIMIT = 10
PLAYER = 'vlc'
url = 'http://streams.twitch.tv/kraken/streams?limit='+str(LIMIT)+'&offset=0&game=Music&broadcaster_language=&on_site=1'
with urllib.request.urlopen(url) as response:
html = response.read().decode('utf8')
i = 0
urls = []
for line in html.split(','):
if 'status' in line:
status = line.split('"')[-2]
status = ''.join(i for i in status if ord(i)<128) #filter non ascii characters
if 'display_name' in line:
name = line.split('"')[-2]
print(str(i) + ') ' + name + ' : ' + status)
i += 1
if 'url' in line:
url = line.split('"')[-2]
urls.append(url)
choice = LIMIT
while (choice >= LIMIT):
choice = int(input('Choose a stream\n'))
cmd = ['livestreamer', urls[choice], 'audio']
if PLAYER != 'vlc':
cmd.append('-p')
cmd.append(PLAYER)
subprocess.Popen(cmd, shell=False)
|
import urllib.request
import subprocess
LIMIT = 10
PLAYER = 'vlc'
STREAMS_URL = 'http://streams.twitch.tv/kraken/streams?limit='+str(LIMIT)+'&offset=0&game=Music&broadcaster_language=&on_site=1'
while True:
with urllib.request.urlopen(STREAMS_URL) as response:
html = response.read().decode('utf8')
i = 0
urls = []
for line in html.split(','):
if 'status' in line:
status = line.split('"')[-2]
status = ''.join(i for i in status if ord(i)<128) #filter non ascii characters
if 'display_name' in line:
name = line.split('"')[-2]
print(str(i) + ') ' + name + ' : ' + status)
i += 1
if 'url' in line:
url = line.split('"')[-2]
urls.append(url)
choice = LIMIT
while (choice >= LIMIT):
choice = int(input('Choose a stream\n'))
cmd = ['livestreamer', urls[choice], 'audio']
if PLAYER != 'vlc':
cmd.append('-p')
cmd.append(PLAYER)
subprocess.call(cmd, shell=False)
print('\n\n\n')
|
Refresh streams list when player is closed
|
Refresh streams list when player is closed
|
Python
|
mit
|
GaudyZircon/music-stream
|
import urllib.request
import subprocess
LIMIT = 10
PLAYER = 'vlc'
- url = 'http://streams.twitch.tv/kraken/streams?limit='+str(LIMIT)+'&offset=0&game=Music&broadcaster_language=&on_site=1'
+ STREAMS_URL = 'http://streams.twitch.tv/kraken/streams?limit='+str(LIMIT)+'&offset=0&game=Music&broadcaster_language=&on_site=1'
+ while True:
- with urllib.request.urlopen(url) as response:
+ with urllib.request.urlopen(STREAMS_URL) as response:
- html = response.read().decode('utf8')
+ html = response.read().decode('utf8')
- i = 0
+ i = 0
- urls = []
+ urls = []
- for line in html.split(','):
+ for line in html.split(','):
- if 'status' in line:
+ if 'status' in line:
- status = line.split('"')[-2]
+ status = line.split('"')[-2]
- status = ''.join(i for i in status if ord(i)<128) #filter non ascii characters
+ status = ''.join(i for i in status if ord(i)<128) #filter non ascii characters
- if 'display_name' in line:
+ if 'display_name' in line:
- name = line.split('"')[-2]
+ name = line.split('"')[-2]
- print(str(i) + ') ' + name + ' : ' + status)
+ print(str(i) + ') ' + name + ' : ' + status)
- i += 1
+ i += 1
- if 'url' in line:
+ if 'url' in line:
- url = line.split('"')[-2]
+ url = line.split('"')[-2]
- urls.append(url)
+ urls.append(url)
- choice = LIMIT
+ choice = LIMIT
- while (choice >= LIMIT):
+ while (choice >= LIMIT):
- choice = int(input('Choose a stream\n'))
+ choice = int(input('Choose a stream\n'))
- cmd = ['livestreamer', urls[choice], 'audio']
+ cmd = ['livestreamer', urls[choice], 'audio']
- if PLAYER != 'vlc':
+ if PLAYER != 'vlc':
- cmd.append('-p')
+ cmd.append('-p')
- cmd.append(PLAYER)
+ cmd.append(PLAYER)
- subprocess.Popen(cmd, shell=False)
+ subprocess.call(cmd, shell=False)
+ print('\n\n\n')
|
Refresh streams list when player is closed
|
## Code Before:
import urllib.request
import subprocess
LIMIT = 10
PLAYER = 'vlc'
url = 'http://streams.twitch.tv/kraken/streams?limit='+str(LIMIT)+'&offset=0&game=Music&broadcaster_language=&on_site=1'
with urllib.request.urlopen(url) as response:
html = response.read().decode('utf8')
i = 0
urls = []
for line in html.split(','):
if 'status' in line:
status = line.split('"')[-2]
status = ''.join(i for i in status if ord(i)<128) #filter non ascii characters
if 'display_name' in line:
name = line.split('"')[-2]
print(str(i) + ') ' + name + ' : ' + status)
i += 1
if 'url' in line:
url = line.split('"')[-2]
urls.append(url)
choice = LIMIT
while (choice >= LIMIT):
choice = int(input('Choose a stream\n'))
cmd = ['livestreamer', urls[choice], 'audio']
if PLAYER != 'vlc':
cmd.append('-p')
cmd.append(PLAYER)
subprocess.Popen(cmd, shell=False)
## Instruction:
Refresh streams list when player is closed
## Code After:
import urllib.request
import subprocess
LIMIT = 10
PLAYER = 'vlc'
STREAMS_URL = 'http://streams.twitch.tv/kraken/streams?limit='+str(LIMIT)+'&offset=0&game=Music&broadcaster_language=&on_site=1'
while True:
with urllib.request.urlopen(STREAMS_URL) as response:
html = response.read().decode('utf8')
i = 0
urls = []
for line in html.split(','):
if 'status' in line:
status = line.split('"')[-2]
status = ''.join(i for i in status if ord(i)<128) #filter non ascii characters
if 'display_name' in line:
name = line.split('"')[-2]
print(str(i) + ') ' + name + ' : ' + status)
i += 1
if 'url' in line:
url = line.split('"')[-2]
urls.append(url)
choice = LIMIT
while (choice >= LIMIT):
choice = int(input('Choose a stream\n'))
cmd = ['livestreamer', urls[choice], 'audio']
if PLAYER != 'vlc':
cmd.append('-p')
cmd.append(PLAYER)
subprocess.call(cmd, shell=False)
print('\n\n\n')
|
import urllib.request
import subprocess
LIMIT = 10
PLAYER = 'vlc'
- url = 'http://streams.twitch.tv/kraken/streams?limit='+str(LIMIT)+'&offset=0&game=Music&broadcaster_language=&on_site=1'
? ^^^
+ STREAMS_URL = 'http://streams.twitch.tv/kraken/streams?limit='+str(LIMIT)+'&offset=0&game=Music&broadcaster_language=&on_site=1'
? ^^^^^^^^^^^
+ while True:
- with urllib.request.urlopen(url) as response:
? ^^^
+ with urllib.request.urlopen(STREAMS_URL) as response:
? ++++ ^^^^^^^^^^^
- html = response.read().decode('utf8')
+ html = response.read().decode('utf8')
? ++++
- i = 0
+ i = 0
- urls = []
+ urls = []
? ++++
- for line in html.split(','):
+ for line in html.split(','):
? ++++
- if 'status' in line:
+ if 'status' in line:
? ++++
- status = line.split('"')[-2]
+ status = line.split('"')[-2]
? ++++
- status = ''.join(i for i in status if ord(i)<128) #filter non ascii characters
+ status = ''.join(i for i in status if ord(i)<128) #filter non ascii characters
? ++++
- if 'display_name' in line:
+ if 'display_name' in line:
? ++++
- name = line.split('"')[-2]
+ name = line.split('"')[-2]
? ++++
- print(str(i) + ') ' + name + ' : ' + status)
+ print(str(i) + ') ' + name + ' : ' + status)
? ++++
- i += 1
+ i += 1
? ++++
- if 'url' in line:
+ if 'url' in line:
? ++++
- url = line.split('"')[-2]
+ url = line.split('"')[-2]
? ++++
- urls.append(url)
+ urls.append(url)
? ++++
- choice = LIMIT
+ choice = LIMIT
? ++++
- while (choice >= LIMIT):
+ while (choice >= LIMIT):
? ++++
- choice = int(input('Choose a stream\n'))
+ choice = int(input('Choose a stream\n'))
? ++++
- cmd = ['livestreamer', urls[choice], 'audio']
+ cmd = ['livestreamer', urls[choice], 'audio']
? ++++
- if PLAYER != 'vlc':
+ if PLAYER != 'vlc':
? ++++
- cmd.append('-p')
+ cmd.append('-p')
? ++++
- cmd.append(PLAYER)
+ cmd.append(PLAYER)
? ++++
- subprocess.Popen(cmd, shell=False)
? ^^^^^
+ subprocess.call(cmd, shell=False)
? ++++ ^^^^
+ print('\n\n\n')
|
8be856ed565d9e961a4d24da74a13240e25f4ded
|
cio/plugins/base.py
|
cio/plugins/base.py
|
class BasePlugin(object):
ext = None
def load(self, content):
"""
Return plugin data for content string
"""
return content
def save(self, data):
"""
Persist external plugin resources and return content string for plugin data
"""
return data
def delete(self, data):
"""
Delete external plugin resources
"""
pass
def render(self, data):
"""
Render plugin
"""
return data
|
from cio.conf import settings
class BasePlugin(object):
ext = None
@property
def settings(self):
return settings.get(self.ext.upper(), {})
def load(self, content):
"""
Return plugin data for content string
"""
return content
def save(self, data):
"""
Persist external plugin resources and return content string for plugin data
"""
return data
def delete(self, data):
"""
Delete external plugin resources
"""
pass
def render(self, data):
"""
Render plugin
"""
return data
|
Add support for plugin settings
|
Add support for plugin settings
|
Python
|
bsd-3-clause
|
5monkeys/content-io
|
+ from cio.conf import settings
+
+
class BasePlugin(object):
ext = None
+
+ @property
+ def settings(self):
+ return settings.get(self.ext.upper(), {})
def load(self, content):
"""
Return plugin data for content string
"""
return content
def save(self, data):
"""
Persist external plugin resources and return content string for plugin data
"""
return data
def delete(self, data):
"""
Delete external plugin resources
"""
pass
def render(self, data):
"""
Render plugin
"""
return data
|
Add support for plugin settings
|
## Code Before:
class BasePlugin(object):
ext = None
def load(self, content):
"""
Return plugin data for content string
"""
return content
def save(self, data):
"""
Persist external plugin resources and return content string for plugin data
"""
return data
def delete(self, data):
"""
Delete external plugin resources
"""
pass
def render(self, data):
"""
Render plugin
"""
return data
## Instruction:
Add support for plugin settings
## Code After:
from cio.conf import settings
class BasePlugin(object):
ext = None
@property
def settings(self):
return settings.get(self.ext.upper(), {})
def load(self, content):
"""
Return plugin data for content string
"""
return content
def save(self, data):
"""
Persist external plugin resources and return content string for plugin data
"""
return data
def delete(self, data):
"""
Delete external plugin resources
"""
pass
def render(self, data):
"""
Render plugin
"""
return data
|
+ from cio.conf import settings
+
+
class BasePlugin(object):
ext = None
+
+ @property
+ def settings(self):
+ return settings.get(self.ext.upper(), {})
def load(self, content):
"""
Return plugin data for content string
"""
return content
def save(self, data):
"""
Persist external plugin resources and return content string for plugin data
"""
return data
def delete(self, data):
"""
Delete external plugin resources
"""
pass
def render(self, data):
"""
Render plugin
"""
return data
|
238427e684e6cb6f8c8cc6c80c8d24a51e908e24
|
bika/lims/upgrade/to3031.py
|
bika/lims/upgrade/to3031.py
|
from Products.CMFPlone.utils import _createObjectByType
from zExceptions import BadRequest
def upgrade(tool):
portal = aq_parent(aq_inner(tool))
setup = portal.portal_setup
setup.runImportStepFromProfile('profile-bika.lims:default', 'controlpanel')
try:
_createObjectByType("SubGroups", portal.bika_setup, "bika_subgroups",
title="Sub-groups")
obj = portal.bika_setup.bika_subgroups
obj.unmarkCreationFlag()
obj.reindexObject()
except BadRequest:
# folder already exists
pass
return True
|
from Acquisition import aq_parent, aq_inner
from Products.CMFCore.utils import getToolByName
from Products.CMFPlone.utils import _createObjectByType
from zExceptions import BadRequest
def upgrade(tool):
portal = aq_parent(aq_inner(tool))
at = getToolByName(portal, 'archetype_tool')
at.setCatalogsByType('SubGroup', ['bika_setup_catalog', ])
setup = portal.portal_setup
setup.runImportStepFromProfile('profile-bika.lims:default', 'controlpanel')
try:
_createObjectByType("SubGroups", portal.bika_setup, "bika_subgroups",
title="Sub-groups")
obj = portal.bika_setup.bika_subgroups
obj.unmarkCreationFlag()
obj.reindexObject()
except BadRequest:
# folder already exists
pass
return True
|
Include catalog setup in 3031 upgrade
|
Include catalog setup in 3031 upgrade
|
Python
|
agpl-3.0
|
veroc/Bika-LIMS,anneline/Bika-LIMS,labsanmartin/Bika-LIMS,rockfruit/bika.lims,veroc/Bika-LIMS,anneline/Bika-LIMS,veroc/Bika-LIMS,anneline/Bika-LIMS,DeBortoliWines/Bika-LIMS,rockfruit/bika.lims,labsanmartin/Bika-LIMS,labsanmartin/Bika-LIMS,DeBortoliWines/Bika-LIMS,DeBortoliWines/Bika-LIMS
|
+ from Acquisition import aq_parent, aq_inner
+ from Products.CMFCore.utils import getToolByName
from Products.CMFPlone.utils import _createObjectByType
from zExceptions import BadRequest
def upgrade(tool):
portal = aq_parent(aq_inner(tool))
+ at = getToolByName(portal, 'archetype_tool')
+ at.setCatalogsByType('SubGroup', ['bika_setup_catalog', ])
setup = portal.portal_setup
setup.runImportStepFromProfile('profile-bika.lims:default', 'controlpanel')
try:
_createObjectByType("SubGroups", portal.bika_setup, "bika_subgroups",
title="Sub-groups")
obj = portal.bika_setup.bika_subgroups
obj.unmarkCreationFlag()
obj.reindexObject()
except BadRequest:
# folder already exists
pass
return True
|
Include catalog setup in 3031 upgrade
|
## Code Before:
from Products.CMFPlone.utils import _createObjectByType
from zExceptions import BadRequest
def upgrade(tool):
portal = aq_parent(aq_inner(tool))
setup = portal.portal_setup
setup.runImportStepFromProfile('profile-bika.lims:default', 'controlpanel')
try:
_createObjectByType("SubGroups", portal.bika_setup, "bika_subgroups",
title="Sub-groups")
obj = portal.bika_setup.bika_subgroups
obj.unmarkCreationFlag()
obj.reindexObject()
except BadRequest:
# folder already exists
pass
return True
## Instruction:
Include catalog setup in 3031 upgrade
## Code After:
from Acquisition import aq_parent, aq_inner
from Products.CMFCore.utils import getToolByName
from Products.CMFPlone.utils import _createObjectByType
from zExceptions import BadRequest
def upgrade(tool):
portal = aq_parent(aq_inner(tool))
at = getToolByName(portal, 'archetype_tool')
at.setCatalogsByType('SubGroup', ['bika_setup_catalog', ])
setup = portal.portal_setup
setup.runImportStepFromProfile('profile-bika.lims:default', 'controlpanel')
try:
_createObjectByType("SubGroups", portal.bika_setup, "bika_subgroups",
title="Sub-groups")
obj = portal.bika_setup.bika_subgroups
obj.unmarkCreationFlag()
obj.reindexObject()
except BadRequest:
# folder already exists
pass
return True
|
+ from Acquisition import aq_parent, aq_inner
+ from Products.CMFCore.utils import getToolByName
from Products.CMFPlone.utils import _createObjectByType
from zExceptions import BadRequest
def upgrade(tool):
portal = aq_parent(aq_inner(tool))
+ at = getToolByName(portal, 'archetype_tool')
+ at.setCatalogsByType('SubGroup', ['bika_setup_catalog', ])
setup = portal.portal_setup
setup.runImportStepFromProfile('profile-bika.lims:default', 'controlpanel')
try:
_createObjectByType("SubGroups", portal.bika_setup, "bika_subgroups",
title="Sub-groups")
obj = portal.bika_setup.bika_subgroups
obj.unmarkCreationFlag()
obj.reindexObject()
except BadRequest:
# folder already exists
pass
return True
|
11022b79ded961bdd2e9a6bff0c4f4a03097084c
|
scripts/install_new_database.py
|
scripts/install_new_database.py
|
import os
import sys
_upper_dir = os.path.abspath(
os.path.join(os.path.dirname(__file__), '..'))
if _upper_dir not in sys.path:
sys.path.append(_upper_dir)
import chdb
if __name__ == '__main__':
chdb.install_scratch_db()
|
import os
import sys
_upper_dir = os.path.abspath(
os.path.join(os.path.dirname(__file__), '..'))
if _upper_dir not in sys.path:
sys.path.append(_upper_dir)
import chdb
def sanity_check():
sdb = chdb.init_scratch_db()
snippet_count = sdb.execute_with_retry_s(
'''SELECT COUNT(*) FROM snippets''')[0]
assert snippet_count > 100
article_count = sdb.execute_with_retry_s(
'''SELECT COUNT(*) FROM articles''')[0]
assert article_count > 100
if __name__ == '__main__':
sanity_check()
chdb.install_scratch_db()
|
Add a couple of sanity checks so we don't break the database.
|
Add a couple of sanity checks so we don't break the database.
Part of #139.
|
Python
|
mit
|
guilherme-pg/citationhunt,eggpi/citationhunt,guilherme-pg/citationhunt,guilherme-pg/citationhunt,guilherme-pg/citationhunt,eggpi/citationhunt,eggpi/citationhunt,eggpi/citationhunt
|
import os
import sys
_upper_dir = os.path.abspath(
os.path.join(os.path.dirname(__file__), '..'))
if _upper_dir not in sys.path:
sys.path.append(_upper_dir)
import chdb
+ def sanity_check():
+ sdb = chdb.init_scratch_db()
+ snippet_count = sdb.execute_with_retry_s(
+ '''SELECT COUNT(*) FROM snippets''')[0]
+ assert snippet_count > 100
+
+ article_count = sdb.execute_with_retry_s(
+ '''SELECT COUNT(*) FROM articles''')[0]
+ assert article_count > 100
+
if __name__ == '__main__':
+ sanity_check()
chdb.install_scratch_db()
|
Add a couple of sanity checks so we don't break the database.
|
## Code Before:
import os
import sys
_upper_dir = os.path.abspath(
os.path.join(os.path.dirname(__file__), '..'))
if _upper_dir not in sys.path:
sys.path.append(_upper_dir)
import chdb
if __name__ == '__main__':
chdb.install_scratch_db()
## Instruction:
Add a couple of sanity checks so we don't break the database.
## Code After:
import os
import sys
_upper_dir = os.path.abspath(
os.path.join(os.path.dirname(__file__), '..'))
if _upper_dir not in sys.path:
sys.path.append(_upper_dir)
import chdb
def sanity_check():
sdb = chdb.init_scratch_db()
snippet_count = sdb.execute_with_retry_s(
'''SELECT COUNT(*) FROM snippets''')[0]
assert snippet_count > 100
article_count = sdb.execute_with_retry_s(
'''SELECT COUNT(*) FROM articles''')[0]
assert article_count > 100
if __name__ == '__main__':
sanity_check()
chdb.install_scratch_db()
|
import os
import sys
_upper_dir = os.path.abspath(
os.path.join(os.path.dirname(__file__), '..'))
if _upper_dir not in sys.path:
sys.path.append(_upper_dir)
import chdb
+ def sanity_check():
+ sdb = chdb.init_scratch_db()
+ snippet_count = sdb.execute_with_retry_s(
+ '''SELECT COUNT(*) FROM snippets''')[0]
+ assert snippet_count > 100
+
+ article_count = sdb.execute_with_retry_s(
+ '''SELECT COUNT(*) FROM articles''')[0]
+ assert article_count > 100
+
if __name__ == '__main__':
+ sanity_check()
chdb.install_scratch_db()
|
69f7490b6ed28c28784148295dec2144344f4ed8
|
config.py
|
config.py
|
import os
if os.environ.get('DATABASE_URL') is None:
SQLALCHEMY_DATABASE_URI = 'sqlite:///meetup.db'
else:
SQLALCHEMY_DATABASE_URI = os.environ['DATABASE_URL']
SQLALCHEMY_TRACK_MODIFICATIONS = False # supress deprecation warning
ACCESS_TOKEN = os.environ['ACCESS_TOKEN']
PAGE_ID = os.environ['PAGE_ID']
APP_ID = os.environ['APP_ID']
VERIFY_TOKEN = os.environ['VERIFY_TOKEN']
|
import os
SQLALCHEMY_DATABASE_URI = os.environ['DATABASE_URL']
SQLALCHEMY_TRACK_MODIFICATIONS = False # suppress deprecation warning
ACCESS_TOKEN = os.environ['ACCESS_TOKEN']
PAGE_ID = os.environ['PAGE_ID']
APP_ID = os.environ['APP_ID']
VERIFY_TOKEN = os.environ['VERIFY_TOKEN']
|
Remove automatic fallback to SQLite
|
Remove automatic fallback to SQLite
It's better to be explicit if there's no DATABASE_URL.
|
Python
|
mit
|
Stark-Mountain/meetup-facebook-bot,Stark-Mountain/meetup-facebook-bot
|
import os
-
- if os.environ.get('DATABASE_URL') is None:
- SQLALCHEMY_DATABASE_URI = 'sqlite:///meetup.db'
- else:
- SQLALCHEMY_DATABASE_URI = os.environ['DATABASE_URL']
+ SQLALCHEMY_DATABASE_URI = os.environ['DATABASE_URL']
-
- SQLALCHEMY_TRACK_MODIFICATIONS = False # supress deprecation warning
+ SQLALCHEMY_TRACK_MODIFICATIONS = False # suppress deprecation warning
ACCESS_TOKEN = os.environ['ACCESS_TOKEN']
PAGE_ID = os.environ['PAGE_ID']
APP_ID = os.environ['APP_ID']
VERIFY_TOKEN = os.environ['VERIFY_TOKEN']
|
Remove automatic fallback to SQLite
|
## Code Before:
import os
if os.environ.get('DATABASE_URL') is None:
SQLALCHEMY_DATABASE_URI = 'sqlite:///meetup.db'
else:
SQLALCHEMY_DATABASE_URI = os.environ['DATABASE_URL']
SQLALCHEMY_TRACK_MODIFICATIONS = False # supress deprecation warning
ACCESS_TOKEN = os.environ['ACCESS_TOKEN']
PAGE_ID = os.environ['PAGE_ID']
APP_ID = os.environ['APP_ID']
VERIFY_TOKEN = os.environ['VERIFY_TOKEN']
## Instruction:
Remove automatic fallback to SQLite
## Code After:
import os
SQLALCHEMY_DATABASE_URI = os.environ['DATABASE_URL']
SQLALCHEMY_TRACK_MODIFICATIONS = False # suppress deprecation warning
ACCESS_TOKEN = os.environ['ACCESS_TOKEN']
PAGE_ID = os.environ['PAGE_ID']
APP_ID = os.environ['APP_ID']
VERIFY_TOKEN = os.environ['VERIFY_TOKEN']
|
import os
-
- if os.environ.get('DATABASE_URL') is None:
- SQLALCHEMY_DATABASE_URI = 'sqlite:///meetup.db'
- else:
- SQLALCHEMY_DATABASE_URI = os.environ['DATABASE_URL']
? ----
+ SQLALCHEMY_DATABASE_URI = os.environ['DATABASE_URL']
-
- SQLALCHEMY_TRACK_MODIFICATIONS = False # supress deprecation warning
+ SQLALCHEMY_TRACK_MODIFICATIONS = False # suppress deprecation warning
? +
ACCESS_TOKEN = os.environ['ACCESS_TOKEN']
PAGE_ID = os.environ['PAGE_ID']
APP_ID = os.environ['APP_ID']
VERIFY_TOKEN = os.environ['VERIFY_TOKEN']
|
e5bd4884fc7ea4389315d0d2b8ff248bbda9a905
|
custom/enikshay/integrations/utils.py
|
custom/enikshay/integrations/utils.py
|
from corehq.apps.locations.models import SQLLocation
from dimagi.utils.logging import notify_exception
def is_submission_from_test_location(person_case):
try:
phi_location = SQLLocation.objects.get(location_id=person_case.owner_id)
except SQLLocation.DoesNotExist:
message = ("Location with id {location_id} not found. This is the owner for person with id: {person_id}"
.format(location_id=person_case.owner_id, person_id=person_case.case_id))
notify_exception(None, message="[ENIKSHAY] {}".format(message))
return True
return phi_location.metadata.get('is_test', "yes") == "yes"
|
from corehq.apps.locations.models import SQLLocation
from custom.enikshay.exceptions import NikshayLocationNotFound
def is_submission_from_test_location(person_case):
try:
phi_location = SQLLocation.objects.get(location_id=person_case.owner_id)
except SQLLocation.DoesNotExist:
raise NikshayLocationNotFound(
"Location with id {location_id} not found. This is the owner for person with id: {person_id}"
.format(location_id=person_case.owner_id, person_id=person_case.case_id)
)
return phi_location.metadata.get('is_test', "yes") == "yes"
|
Revert "Fallback is test location"
|
Revert "Fallback is test location"
This reverts commit 2ba9865fa0f05e9ae244b2513e046c961540fca1.
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
from corehq.apps.locations.models import SQLLocation
- from dimagi.utils.logging import notify_exception
+ from custom.enikshay.exceptions import NikshayLocationNotFound
def is_submission_from_test_location(person_case):
try:
phi_location = SQLLocation.objects.get(location_id=person_case.owner_id)
except SQLLocation.DoesNotExist:
+ raise NikshayLocationNotFound(
- message = ("Location with id {location_id} not found. This is the owner for person with id: {person_id}"
+ "Location with id {location_id} not found. This is the owner for person with id: {person_id}"
- .format(location_id=person_case.owner_id, person_id=person_case.case_id))
+ .format(location_id=person_case.owner_id, person_id=person_case.case_id)
+ )
- notify_exception(None, message="[ENIKSHAY] {}".format(message))
- return True
-
return phi_location.metadata.get('is_test', "yes") == "yes"
|
Revert "Fallback is test location"
|
## Code Before:
from corehq.apps.locations.models import SQLLocation
from dimagi.utils.logging import notify_exception
def is_submission_from_test_location(person_case):
try:
phi_location = SQLLocation.objects.get(location_id=person_case.owner_id)
except SQLLocation.DoesNotExist:
message = ("Location with id {location_id} not found. This is the owner for person with id: {person_id}"
.format(location_id=person_case.owner_id, person_id=person_case.case_id))
notify_exception(None, message="[ENIKSHAY] {}".format(message))
return True
return phi_location.metadata.get('is_test', "yes") == "yes"
## Instruction:
Revert "Fallback is test location"
## Code After:
from corehq.apps.locations.models import SQLLocation
from custom.enikshay.exceptions import NikshayLocationNotFound
def is_submission_from_test_location(person_case):
try:
phi_location = SQLLocation.objects.get(location_id=person_case.owner_id)
except SQLLocation.DoesNotExist:
raise NikshayLocationNotFound(
"Location with id {location_id} not found. This is the owner for person with id: {person_id}"
.format(location_id=person_case.owner_id, person_id=person_case.case_id)
)
return phi_location.metadata.get('is_test', "yes") == "yes"
|
from corehq.apps.locations.models import SQLLocation
- from dimagi.utils.logging import notify_exception
+ from custom.enikshay.exceptions import NikshayLocationNotFound
def is_submission_from_test_location(person_case):
try:
phi_location = SQLLocation.objects.get(location_id=person_case.owner_id)
except SQLLocation.DoesNotExist:
+ raise NikshayLocationNotFound(
- message = ("Location with id {location_id} not found. This is the owner for person with id: {person_id}"
? ------- - ^
+ "Location with id {location_id} not found. This is the owner for person with id: {person_id}"
? ^^
- .format(location_id=person_case.owner_id, person_id=person_case.case_id))
? ------- -
+ .format(location_id=person_case.owner_id, person_id=person_case.case_id)
+ )
- notify_exception(None, message="[ENIKSHAY] {}".format(message))
- return True
-
return phi_location.metadata.get('is_test', "yes") == "yes"
|
d20039737d1e25f4462c4865347fa22411045677
|
budgetsupervisor/users/models.py
|
budgetsupervisor/users/models.py
|
from django.db import models
from django.contrib.auth.models import AbstractUser
from django.conf import settings
from django.db.models.signals import post_save
from saltedge.factory import get_saltedge_app
class User(AbstractUser):
pass
class ProfileManager(models.Manager):
def create_in_saltedge(self, profile):
app = get_saltedge_app()
url = "https://www.saltedge.com/api/v5/customers"
payload = json.dumps({"data": {"identifier": profile.user.id}})
response = app.post(url, payload)
data = response.json()
profile.external_id = data["data"]["id"]
profile.save()
class Profile(models.Model):
user = models.OneToOneField(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
external_id = models.BigIntegerField(blank=True, null=True)
objects = ProfileManager()
def __str__(self):
return str(self.user)
def create_user_profile(sender, instance, created, **kwargs):
if created:
Profile.objects.create(user=instance)
post_save.connect(create_user_profile, sender=settings.AUTH_USER_MODEL)
|
from django.db import models
from django.contrib.auth.models import AbstractUser
from django.conf import settings
from django.db.models.signals import post_save
from saltedge.factory import get_saltedge_app
class User(AbstractUser):
pass
class ProfileManager(models.Manager):
def create_in_saltedge(self, profile):
app = get_saltedge_app()
url = "https://www.saltedge.com/api/v5/customers"
payload = json.dumps({"data": {"identifier": profile.user.id}})
response = app.post(url, payload)
data = response.json()
profile.external_id = data["data"]["id"]
profile.save()
def remove_from_saltedge(self, profile):
pass
class Profile(models.Model):
user = models.OneToOneField(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
external_id = models.BigIntegerField(blank=True, null=True)
objects = ProfileManager()
def __str__(self):
return str(self.user)
def create_user_profile(sender, instance, created, **kwargs):
if created:
Profile.objects.create(user=instance)
post_save.connect(create_user_profile, sender=settings.AUTH_USER_MODEL)
|
Add placeholder for removing customer from saltedge
|
Add placeholder for removing customer from saltedge
|
Python
|
mit
|
ltowarek/budget-supervisor
|
from django.db import models
from django.contrib.auth.models import AbstractUser
from django.conf import settings
from django.db.models.signals import post_save
from saltedge.factory import get_saltedge_app
class User(AbstractUser):
pass
class ProfileManager(models.Manager):
def create_in_saltedge(self, profile):
app = get_saltedge_app()
url = "https://www.saltedge.com/api/v5/customers"
payload = json.dumps({"data": {"identifier": profile.user.id}})
response = app.post(url, payload)
data = response.json()
profile.external_id = data["data"]["id"]
profile.save()
+ def remove_from_saltedge(self, profile):
+ pass
+
class Profile(models.Model):
user = models.OneToOneField(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
external_id = models.BigIntegerField(blank=True, null=True)
objects = ProfileManager()
def __str__(self):
return str(self.user)
def create_user_profile(sender, instance, created, **kwargs):
if created:
Profile.objects.create(user=instance)
post_save.connect(create_user_profile, sender=settings.AUTH_USER_MODEL)
|
Add placeholder for removing customer from saltedge
|
## Code Before:
from django.db import models
from django.contrib.auth.models import AbstractUser
from django.conf import settings
from django.db.models.signals import post_save
from saltedge.factory import get_saltedge_app
class User(AbstractUser):
pass
class ProfileManager(models.Manager):
def create_in_saltedge(self, profile):
app = get_saltedge_app()
url = "https://www.saltedge.com/api/v5/customers"
payload = json.dumps({"data": {"identifier": profile.user.id}})
response = app.post(url, payload)
data = response.json()
profile.external_id = data["data"]["id"]
profile.save()
class Profile(models.Model):
user = models.OneToOneField(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
external_id = models.BigIntegerField(blank=True, null=True)
objects = ProfileManager()
def __str__(self):
return str(self.user)
def create_user_profile(sender, instance, created, **kwargs):
if created:
Profile.objects.create(user=instance)
post_save.connect(create_user_profile, sender=settings.AUTH_USER_MODEL)
## Instruction:
Add placeholder for removing customer from saltedge
## Code After:
from django.db import models
from django.contrib.auth.models import AbstractUser
from django.conf import settings
from django.db.models.signals import post_save
from saltedge.factory import get_saltedge_app
class User(AbstractUser):
pass
class ProfileManager(models.Manager):
def create_in_saltedge(self, profile):
app = get_saltedge_app()
url = "https://www.saltedge.com/api/v5/customers"
payload = json.dumps({"data": {"identifier": profile.user.id}})
response = app.post(url, payload)
data = response.json()
profile.external_id = data["data"]["id"]
profile.save()
def remove_from_saltedge(self, profile):
pass
class Profile(models.Model):
user = models.OneToOneField(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
external_id = models.BigIntegerField(blank=True, null=True)
objects = ProfileManager()
def __str__(self):
return str(self.user)
def create_user_profile(sender, instance, created, **kwargs):
if created:
Profile.objects.create(user=instance)
post_save.connect(create_user_profile, sender=settings.AUTH_USER_MODEL)
|
from django.db import models
from django.contrib.auth.models import AbstractUser
from django.conf import settings
from django.db.models.signals import post_save
from saltedge.factory import get_saltedge_app
class User(AbstractUser):
pass
class ProfileManager(models.Manager):
def create_in_saltedge(self, profile):
app = get_saltedge_app()
url = "https://www.saltedge.com/api/v5/customers"
payload = json.dumps({"data": {"identifier": profile.user.id}})
response = app.post(url, payload)
data = response.json()
profile.external_id = data["data"]["id"]
profile.save()
+ def remove_from_saltedge(self, profile):
+ pass
+
class Profile(models.Model):
user = models.OneToOneField(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
external_id = models.BigIntegerField(blank=True, null=True)
objects = ProfileManager()
def __str__(self):
return str(self.user)
def create_user_profile(sender, instance, created, **kwargs):
if created:
Profile.objects.create(user=instance)
post_save.connect(create_user_profile, sender=settings.AUTH_USER_MODEL)
|
206a59c838623aae5e0b0f91f8089ffc13e2cfd0
|
pipenv/vendor/pythonfinder/environment.py
|
pipenv/vendor/pythonfinder/environment.py
|
from __future__ import print_function, absolute_import
import os
import platform
import sys
def is_type_checking():
from typing import TYPE_CHECKING
return TYPE_CHECKING
PYENV_INSTALLED = bool(os.environ.get("PYENV_SHELL")) or bool(
os.environ.get("PYENV_ROOT")
)
ASDF_INSTALLED = bool(os.environ.get("ASDF_DIR"))
PYENV_ROOT = os.path.expanduser(
os.path.expandvars(os.environ.get("PYENV_ROOT", "~/.pyenv"))
)
ASDF_DATA_DIR = os.path.expanduser(
os.path.expandvars(os.environ.get("ASDF_DATA_DIR", "~/.asdf"))
)
IS_64BIT_OS = None
SYSTEM_ARCH = platform.architecture()[0]
if sys.maxsize > 2 ** 32:
IS_64BIT_OS = platform.machine() == "AMD64"
else:
IS_64BIT_OS = False
IGNORE_UNSUPPORTED = bool(os.environ.get("PYTHONFINDER_IGNORE_UNSUPPORTED", False))
MYPY_RUNNING = os.environ.get("MYPY_RUNNING", is_type_checking())
|
from __future__ import print_function, absolute_import
import os
import platform
import sys
def is_type_checking():
try:
from typing import TYPE_CHECKING
except ImportError:
return False
return TYPE_CHECKING
PYENV_INSTALLED = bool(os.environ.get("PYENV_SHELL")) or bool(
os.environ.get("PYENV_ROOT")
)
ASDF_INSTALLED = bool(os.environ.get("ASDF_DIR"))
PYENV_ROOT = os.path.expanduser(
os.path.expandvars(os.environ.get("PYENV_ROOT", "~/.pyenv"))
)
ASDF_DATA_DIR = os.path.expanduser(
os.path.expandvars(os.environ.get("ASDF_DATA_DIR", "~/.asdf"))
)
IS_64BIT_OS = None
SYSTEM_ARCH = platform.architecture()[0]
if sys.maxsize > 2 ** 32:
IS_64BIT_OS = platform.machine() == "AMD64"
else:
IS_64BIT_OS = False
IGNORE_UNSUPPORTED = bool(os.environ.get("PYTHONFINDER_IGNORE_UNSUPPORTED", False))
MYPY_RUNNING = os.environ.get("MYPY_RUNNING", is_type_checking())
|
Fix typing check for pythonfinder
|
Fix typing check for pythonfinder
Signed-off-by: Dan Ryan <[email protected]>
|
Python
|
mit
|
kennethreitz/pipenv
|
from __future__ import print_function, absolute_import
import os
import platform
import sys
def is_type_checking():
+ try:
- from typing import TYPE_CHECKING
+ from typing import TYPE_CHECKING
+ except ImportError:
+ return False
return TYPE_CHECKING
PYENV_INSTALLED = bool(os.environ.get("PYENV_SHELL")) or bool(
os.environ.get("PYENV_ROOT")
)
ASDF_INSTALLED = bool(os.environ.get("ASDF_DIR"))
PYENV_ROOT = os.path.expanduser(
os.path.expandvars(os.environ.get("PYENV_ROOT", "~/.pyenv"))
)
ASDF_DATA_DIR = os.path.expanduser(
os.path.expandvars(os.environ.get("ASDF_DATA_DIR", "~/.asdf"))
)
IS_64BIT_OS = None
SYSTEM_ARCH = platform.architecture()[0]
if sys.maxsize > 2 ** 32:
IS_64BIT_OS = platform.machine() == "AMD64"
else:
IS_64BIT_OS = False
IGNORE_UNSUPPORTED = bool(os.environ.get("PYTHONFINDER_IGNORE_UNSUPPORTED", False))
MYPY_RUNNING = os.environ.get("MYPY_RUNNING", is_type_checking())
|
Fix typing check for pythonfinder
|
## Code Before:
from __future__ import print_function, absolute_import
import os
import platform
import sys
def is_type_checking():
from typing import TYPE_CHECKING
return TYPE_CHECKING
PYENV_INSTALLED = bool(os.environ.get("PYENV_SHELL")) or bool(
os.environ.get("PYENV_ROOT")
)
ASDF_INSTALLED = bool(os.environ.get("ASDF_DIR"))
PYENV_ROOT = os.path.expanduser(
os.path.expandvars(os.environ.get("PYENV_ROOT", "~/.pyenv"))
)
ASDF_DATA_DIR = os.path.expanduser(
os.path.expandvars(os.environ.get("ASDF_DATA_DIR", "~/.asdf"))
)
IS_64BIT_OS = None
SYSTEM_ARCH = platform.architecture()[0]
if sys.maxsize > 2 ** 32:
IS_64BIT_OS = platform.machine() == "AMD64"
else:
IS_64BIT_OS = False
IGNORE_UNSUPPORTED = bool(os.environ.get("PYTHONFINDER_IGNORE_UNSUPPORTED", False))
MYPY_RUNNING = os.environ.get("MYPY_RUNNING", is_type_checking())
## Instruction:
Fix typing check for pythonfinder
## Code After:
from __future__ import print_function, absolute_import
import os
import platform
import sys
def is_type_checking():
try:
from typing import TYPE_CHECKING
except ImportError:
return False
return TYPE_CHECKING
PYENV_INSTALLED = bool(os.environ.get("PYENV_SHELL")) or bool(
os.environ.get("PYENV_ROOT")
)
ASDF_INSTALLED = bool(os.environ.get("ASDF_DIR"))
PYENV_ROOT = os.path.expanduser(
os.path.expandvars(os.environ.get("PYENV_ROOT", "~/.pyenv"))
)
ASDF_DATA_DIR = os.path.expanduser(
os.path.expandvars(os.environ.get("ASDF_DATA_DIR", "~/.asdf"))
)
IS_64BIT_OS = None
SYSTEM_ARCH = platform.architecture()[0]
if sys.maxsize > 2 ** 32:
IS_64BIT_OS = platform.machine() == "AMD64"
else:
IS_64BIT_OS = False
IGNORE_UNSUPPORTED = bool(os.environ.get("PYTHONFINDER_IGNORE_UNSUPPORTED", False))
MYPY_RUNNING = os.environ.get("MYPY_RUNNING", is_type_checking())
|
from __future__ import print_function, absolute_import
import os
import platform
import sys
def is_type_checking():
+ try:
- from typing import TYPE_CHECKING
+ from typing import TYPE_CHECKING
? ++++
+ except ImportError:
+ return False
return TYPE_CHECKING
PYENV_INSTALLED = bool(os.environ.get("PYENV_SHELL")) or bool(
os.environ.get("PYENV_ROOT")
)
ASDF_INSTALLED = bool(os.environ.get("ASDF_DIR"))
PYENV_ROOT = os.path.expanduser(
os.path.expandvars(os.environ.get("PYENV_ROOT", "~/.pyenv"))
)
ASDF_DATA_DIR = os.path.expanduser(
os.path.expandvars(os.environ.get("ASDF_DATA_DIR", "~/.asdf"))
)
IS_64BIT_OS = None
SYSTEM_ARCH = platform.architecture()[0]
if sys.maxsize > 2 ** 32:
IS_64BIT_OS = platform.machine() == "AMD64"
else:
IS_64BIT_OS = False
IGNORE_UNSUPPORTED = bool(os.environ.get("PYTHONFINDER_IGNORE_UNSUPPORTED", False))
MYPY_RUNNING = os.environ.get("MYPY_RUNNING", is_type_checking())
|
b51e0ff9407f8a609be580d8fcb9cad6cfd267d8
|
setup.py
|
setup.py
|
try:
from setuptools.core import setup
except ImportError:
from distutils.core import setup
PACKAGE = 'django-render-as'
VERSION = '1.1'
package_data = {
'render_as': [
'templates/avoid_clash_with_real_app/*.html',
'templates/render_as/*.html',
],
}
setup(
name=PACKAGE, version=VERSION,
description="Template rendering indirector based on object class",
packages=[
'render_as',
'render_as/templatetags',
],
package_data=package_data,
license='MIT',
author='James Aylett',
author_email='[email protected]',
install_requires=[
'Django~=1.10',
],
classifiers=[
'Intended Audience :: Developers',
'Framework :: Django',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
)
|
try:
from setuptools.core import setup
except ImportError:
from distutils.core import setup
PACKAGE = 'django-render-as'
VERSION = '1.2'
package_data = {
'render_as': [
'test_templates/avoid_clash_with_real_app/*.html',
'test_templates/render_as/*.html',
],
}
setup(
name=PACKAGE, version=VERSION,
description="Template rendering indirector based on object class",
packages=[
'render_as',
'render_as/templatetags',
],
package_data=package_data,
license='MIT',
author='James Aylett',
author_email='[email protected]',
install_requires=[
'Django~=1.10',
],
classifiers=[
'Intended Audience :: Developers',
'Framework :: Django',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
)
|
Include test templates in distributions.
|
Include test templates in distributions.
This probably wasn't working before, although apparently I didn't
notice. But then no one really runs tests for their 3PA, do they?
This is v1.2.
|
Python
|
mit
|
jaylett/django-render-as,jaylett/django-render-as
|
try:
from setuptools.core import setup
except ImportError:
from distutils.core import setup
PACKAGE = 'django-render-as'
- VERSION = '1.1'
+ VERSION = '1.2'
package_data = {
'render_as': [
- 'templates/avoid_clash_with_real_app/*.html',
+ 'test_templates/avoid_clash_with_real_app/*.html',
- 'templates/render_as/*.html',
+ 'test_templates/render_as/*.html',
],
}
setup(
name=PACKAGE, version=VERSION,
description="Template rendering indirector based on object class",
packages=[
'render_as',
'render_as/templatetags',
],
package_data=package_data,
license='MIT',
author='James Aylett',
author_email='[email protected]',
install_requires=[
'Django~=1.10',
],
classifiers=[
'Intended Audience :: Developers',
'Framework :: Django',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
)
|
Include test templates in distributions.
|
## Code Before:
try:
from setuptools.core import setup
except ImportError:
from distutils.core import setup
PACKAGE = 'django-render-as'
VERSION = '1.1'
package_data = {
'render_as': [
'templates/avoid_clash_with_real_app/*.html',
'templates/render_as/*.html',
],
}
setup(
name=PACKAGE, version=VERSION,
description="Template rendering indirector based on object class",
packages=[
'render_as',
'render_as/templatetags',
],
package_data=package_data,
license='MIT',
author='James Aylett',
author_email='[email protected]',
install_requires=[
'Django~=1.10',
],
classifiers=[
'Intended Audience :: Developers',
'Framework :: Django',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
)
## Instruction:
Include test templates in distributions.
## Code After:
try:
from setuptools.core import setup
except ImportError:
from distutils.core import setup
PACKAGE = 'django-render-as'
VERSION = '1.2'
package_data = {
'render_as': [
'test_templates/avoid_clash_with_real_app/*.html',
'test_templates/render_as/*.html',
],
}
setup(
name=PACKAGE, version=VERSION,
description="Template rendering indirector based on object class",
packages=[
'render_as',
'render_as/templatetags',
],
package_data=package_data,
license='MIT',
author='James Aylett',
author_email='[email protected]',
install_requires=[
'Django~=1.10',
],
classifiers=[
'Intended Audience :: Developers',
'Framework :: Django',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
)
|
try:
from setuptools.core import setup
except ImportError:
from distutils.core import setup
PACKAGE = 'django-render-as'
- VERSION = '1.1'
? ^
+ VERSION = '1.2'
? ^
package_data = {
'render_as': [
- 'templates/avoid_clash_with_real_app/*.html',
+ 'test_templates/avoid_clash_with_real_app/*.html',
? +++++
- 'templates/render_as/*.html',
+ 'test_templates/render_as/*.html',
? +++++
],
}
setup(
name=PACKAGE, version=VERSION,
description="Template rendering indirector based on object class",
packages=[
'render_as',
'render_as/templatetags',
],
package_data=package_data,
license='MIT',
author='James Aylett',
author_email='[email protected]',
install_requires=[
'Django~=1.10',
],
classifiers=[
'Intended Audience :: Developers',
'Framework :: Django',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
)
|
d12884572175cc74ea9e410909128e590a29d1d8
|
pygments/styles/igor.py
|
pygments/styles/igor.py
|
from pygments.style import Style
from pygments.token import Keyword, Name, Comment, String, Error, \
Number, Operator, Generic
class IgorStyle(Style):
default_style = ""
styles = {
Comment: 'italic #FF0000',
Keyword: '#0000FF',
Name.Function: '#C34E00',
Name.Class: '#007575',
String: '#009C00'
}
|
from pygments.style import Style
from pygments.token import Keyword, Name, Comment, String, Error, \
Number, Operator, Generic
class IgorStyle(Style):
"""
Pygments version of the official colors for Igor Pro procedures.
"""
default_style = ""
styles = {
Comment: 'italic #FF0000',
Keyword: '#0000FF',
Name.Function: '#C34E00',
Name.Decorator: '#CC00A3',
Name.Class: '#007575',
String: '#009C00'
}
|
Add class comment and a custom color for the decorator
|
Add class comment and a custom color for the decorator
|
Python
|
bsd-2-clause
|
spencerlyon2/pygments,spencerlyon2/pygments,spencerlyon2/pygments,spencerlyon2/pygments,spencerlyon2/pygments,spencerlyon2/pygments,spencerlyon2/pygments,spencerlyon2/pygments,spencerlyon2/pygments,spencerlyon2/pygments,spencerlyon2/pygments,spencerlyon2/pygments,spencerlyon2/pygments,spencerlyon2/pygments,spencerlyon2/pygments,spencerlyon2/pygments,spencerlyon2/pygments,spencerlyon2/pygments
|
from pygments.style import Style
from pygments.token import Keyword, Name, Comment, String, Error, \
Number, Operator, Generic
class IgorStyle(Style):
+ """
+ Pygments version of the official colors for Igor Pro procedures.
+ """
default_style = ""
styles = {
Comment: 'italic #FF0000',
Keyword: '#0000FF',
Name.Function: '#C34E00',
+ Name.Decorator: '#CC00A3',
Name.Class: '#007575',
String: '#009C00'
}
|
Add class comment and a custom color for the decorator
|
## Code Before:
from pygments.style import Style
from pygments.token import Keyword, Name, Comment, String, Error, \
Number, Operator, Generic
class IgorStyle(Style):
default_style = ""
styles = {
Comment: 'italic #FF0000',
Keyword: '#0000FF',
Name.Function: '#C34E00',
Name.Class: '#007575',
String: '#009C00'
}
## Instruction:
Add class comment and a custom color for the decorator
## Code After:
from pygments.style import Style
from pygments.token import Keyword, Name, Comment, String, Error, \
Number, Operator, Generic
class IgorStyle(Style):
"""
Pygments version of the official colors for Igor Pro procedures.
"""
default_style = ""
styles = {
Comment: 'italic #FF0000',
Keyword: '#0000FF',
Name.Function: '#C34E00',
Name.Decorator: '#CC00A3',
Name.Class: '#007575',
String: '#009C00'
}
|
from pygments.style import Style
from pygments.token import Keyword, Name, Comment, String, Error, \
Number, Operator, Generic
class IgorStyle(Style):
+ """
+ Pygments version of the official colors for Igor Pro procedures.
+ """
default_style = ""
styles = {
Comment: 'italic #FF0000',
Keyword: '#0000FF',
Name.Function: '#C34E00',
+ Name.Decorator: '#CC00A3',
Name.Class: '#007575',
String: '#009C00'
}
|
74ce850d7db766328e2931f5a8119b7e2e5b1ded
|
examples/basic_example.py
|
examples/basic_example.py
|
'''
A simple script using sparqllib and rdflib to retrieve a JSON representation
of some information about Barack Obama from dbpedia.
'''
from sparqllib import Query
from rdflib import BNode, Literal
from rdflib.namespace import FOAF
from pprint import pprint
if __name__ == "__main__":
# construct the query variables (the explict names are optional)
obama, relation, value = BNode("Obama"), BNode("relation"), BNode("value")
# construct the query itself, selecting the relation and value variables
q = Query(result_vars=[relation, value])
# get everyone with the name Barack Obama
q.add(subject=obama, relationship=FOAF.name,
object=Literal("Barack Obama", lang="en"))
# get every relation these people have to any object
q.add(subject=obama, relationship=relation, object=value)
# limit the results to the first 50 distince pairs
q.result_limit = 50
print(str(q))
print(pprint(q.execute()))
|
'''
A simple script using sparqllib and rdflib to retrieve a JSON representation
of some information about Barack Obama from dbpedia.
'''
from sparqllib import Query
from rdflib import BNode, Literal
from rdflib.namespace import FOAF
from pprint import pprint
def main():
# construct the query variables (the explict names are optional)
obama, relation, value = BNode("Obama"), BNode("relation"), BNode("value")
# construct the query itself, selecting the relation and value variables
q = Query(result_vars=[relation, value])
# get everyone with the name Barack Obama
q.add(subject=obama, relationship=FOAF.name,
object=Literal("Barack Obama", lang="en"))
# get every relation these people have to any object
q.add(subject=obama, relationship=relation, object=value)
# limit the results to the first 50 distince pairs
q.result_limit = 50
print(str(q))
print(pprint(q.execute()))
if __name__ == "__main__":
main()
|
Switch to main method in examples
|
Switch to main method in examples
|
Python
|
mit
|
ALSchwalm/sparqllib
|
'''
A simple script using sparqllib and rdflib to retrieve a JSON representation
of some information about Barack Obama from dbpedia.
'''
from sparqllib import Query
from rdflib import BNode, Literal
from rdflib.namespace import FOAF
from pprint import pprint
- if __name__ == "__main__":
+ def main():
# construct the query variables (the explict names are optional)
obama, relation, value = BNode("Obama"), BNode("relation"), BNode("value")
# construct the query itself, selecting the relation and value variables
q = Query(result_vars=[relation, value])
# get everyone with the name Barack Obama
q.add(subject=obama, relationship=FOAF.name,
object=Literal("Barack Obama", lang="en"))
# get every relation these people have to any object
q.add(subject=obama, relationship=relation, object=value)
# limit the results to the first 50 distince pairs
q.result_limit = 50
print(str(q))
print(pprint(q.execute()))
+ if __name__ == "__main__":
+ main()
+
|
Switch to main method in examples
|
## Code Before:
'''
A simple script using sparqllib and rdflib to retrieve a JSON representation
of some information about Barack Obama from dbpedia.
'''
from sparqllib import Query
from rdflib import BNode, Literal
from rdflib.namespace import FOAF
from pprint import pprint
if __name__ == "__main__":
# construct the query variables (the explict names are optional)
obama, relation, value = BNode("Obama"), BNode("relation"), BNode("value")
# construct the query itself, selecting the relation and value variables
q = Query(result_vars=[relation, value])
# get everyone with the name Barack Obama
q.add(subject=obama, relationship=FOAF.name,
object=Literal("Barack Obama", lang="en"))
# get every relation these people have to any object
q.add(subject=obama, relationship=relation, object=value)
# limit the results to the first 50 distince pairs
q.result_limit = 50
print(str(q))
print(pprint(q.execute()))
## Instruction:
Switch to main method in examples
## Code After:
'''
A simple script using sparqllib and rdflib to retrieve a JSON representation
of some information about Barack Obama from dbpedia.
'''
from sparqllib import Query
from rdflib import BNode, Literal
from rdflib.namespace import FOAF
from pprint import pprint
def main():
# construct the query variables (the explict names are optional)
obama, relation, value = BNode("Obama"), BNode("relation"), BNode("value")
# construct the query itself, selecting the relation and value variables
q = Query(result_vars=[relation, value])
# get everyone with the name Barack Obama
q.add(subject=obama, relationship=FOAF.name,
object=Literal("Barack Obama", lang="en"))
# get every relation these people have to any object
q.add(subject=obama, relationship=relation, object=value)
# limit the results to the first 50 distince pairs
q.result_limit = 50
print(str(q))
print(pprint(q.execute()))
if __name__ == "__main__":
main()
|
'''
A simple script using sparqllib and rdflib to retrieve a JSON representation
of some information about Barack Obama from dbpedia.
'''
from sparqllib import Query
from rdflib import BNode, Literal
from rdflib.namespace import FOAF
from pprint import pprint
- if __name__ == "__main__":
+ def main():
# construct the query variables (the explict names are optional)
obama, relation, value = BNode("Obama"), BNode("relation"), BNode("value")
# construct the query itself, selecting the relation and value variables
q = Query(result_vars=[relation, value])
# get everyone with the name Barack Obama
q.add(subject=obama, relationship=FOAF.name,
object=Literal("Barack Obama", lang="en"))
# get every relation these people have to any object
q.add(subject=obama, relationship=relation, object=value)
# limit the results to the first 50 distince pairs
q.result_limit = 50
print(str(q))
print(pprint(q.execute()))
+
+ if __name__ == "__main__":
+ main()
|
2f280e34762ad4910ff9e5041c2bf24f8283368c
|
src-backend/registration/tests/test_user.py
|
src-backend/registration/tests/test_user.py
|
from django.test import TestCase
from django.contrib.auth.models import User
from django.core.exceptions import ObjectDoesNotExist
from rest_framework.authtoken.models import Token
class UserTest(TestCase):
def setUp(self):
self.test_user = User.objects.create_user('username', '[email protected]', 'password')
self.test_user.save()
def test_user_has_token(self):
try:
token = Token.objects.get(user=self.test_user)
self.assertFalse(token is None)
except ObjectDoesNotExist:
self.assertTrue(False)
|
from django.test import TestCase
from django.contrib.auth.models import User
from rest_framework.authtoken.models import Token
from nose.tools import assert_false
class UserTest(TestCase):
def setUp(self):
self.test_user = User.objects.create_user('username', '[email protected]', 'password')
self.test_user.save()
def test_user_has_token(self):
token = Token.objects.get(user=self.test_user)
assert_false(token is None)
|
Use nose test tools for the user test
|
Use nose test tools for the user test
|
Python
|
bsd-3-clause
|
SanaMobile/sana.protocol_builder,SanaMobile/sana.protocol_builder,SanaMobile/sana.protocol_builder,SanaMobile/sana.protocol_builder,SanaMobile/sana.protocol_builder
|
from django.test import TestCase
from django.contrib.auth.models import User
- from django.core.exceptions import ObjectDoesNotExist
from rest_framework.authtoken.models import Token
+ from nose.tools import assert_false
class UserTest(TestCase):
def setUp(self):
self.test_user = User.objects.create_user('username', '[email protected]', 'password')
self.test_user.save()
def test_user_has_token(self):
- try:
- token = Token.objects.get(user=self.test_user)
+ token = Token.objects.get(user=self.test_user)
- self.assertFalse(token is None)
+ assert_false(token is None)
- except ObjectDoesNotExist:
- self.assertTrue(False)
|
Use nose test tools for the user test
|
## Code Before:
from django.test import TestCase
from django.contrib.auth.models import User
from django.core.exceptions import ObjectDoesNotExist
from rest_framework.authtoken.models import Token
class UserTest(TestCase):
def setUp(self):
self.test_user = User.objects.create_user('username', '[email protected]', 'password')
self.test_user.save()
def test_user_has_token(self):
try:
token = Token.objects.get(user=self.test_user)
self.assertFalse(token is None)
except ObjectDoesNotExist:
self.assertTrue(False)
## Instruction:
Use nose test tools for the user test
## Code After:
from django.test import TestCase
from django.contrib.auth.models import User
from rest_framework.authtoken.models import Token
from nose.tools import assert_false
class UserTest(TestCase):
def setUp(self):
self.test_user = User.objects.create_user('username', '[email protected]', 'password')
self.test_user.save()
def test_user_has_token(self):
token = Token.objects.get(user=self.test_user)
assert_false(token is None)
|
from django.test import TestCase
from django.contrib.auth.models import User
- from django.core.exceptions import ObjectDoesNotExist
from rest_framework.authtoken.models import Token
+ from nose.tools import assert_false
class UserTest(TestCase):
def setUp(self):
self.test_user = User.objects.create_user('username', '[email protected]', 'password')
self.test_user.save()
def test_user_has_token(self):
- try:
- token = Token.objects.get(user=self.test_user)
? ----
+ token = Token.objects.get(user=self.test_user)
- self.assertFalse(token is None)
? --------- ^
+ assert_false(token is None)
? ^^
- except ObjectDoesNotExist:
- self.assertTrue(False)
|
65b3a7fa7ae7ee467d3e8ef32b9a00b99b095c3e
|
humblemedia/processing_spooler.py
|
humblemedia/processing_spooler.py
|
import uwsgi
import django
django.setup()
from resources.processing import BaseProcessor
from resources.models import Attachment
def content_processing(env):
print (env)
resource_id = int(env.get(b"id"))
processor_name = env.get(b"processor").decode()
att = Attachment.objects.get(id=resource_id)
for cls in BaseProcessor.__subclasses__():
if cls.__name__ == processor_name:
instance = cls(att)
instance.process()
break
return uwsgi.SPOOL_OK
uwsgi.spooler = content_processing
|
import uwsgi
import django
django.setup()
from resources.processing import BaseProcessor
from resources.models import Attachment
def content_processing(env):
print (env)
resource_id = int(env.get(b"id"))
processor_name = env.get(b"processor").decode()
try:
att = Attachment.objects.get(id=resource_id)
except Attachment.DoesNotExist:
return uwsgi.SPOOL_OK
for cls in BaseProcessor.__subclasses__():
if cls.__name__ == processor_name:
instance = cls(att)
instance.process()
break
return uwsgi.SPOOL_OK
uwsgi.spooler = content_processing
|
Return ok if db object does not exist in spooler
|
Return ok if db object does not exist in spooler
|
Python
|
mit
|
vladimiroff/humble-media,vladimiroff/humble-media
|
import uwsgi
import django
django.setup()
from resources.processing import BaseProcessor
from resources.models import Attachment
def content_processing(env):
print (env)
resource_id = int(env.get(b"id"))
processor_name = env.get(b"processor").decode()
+ try:
- att = Attachment.objects.get(id=resource_id)
+ att = Attachment.objects.get(id=resource_id)
+ except Attachment.DoesNotExist:
+ return uwsgi.SPOOL_OK
for cls in BaseProcessor.__subclasses__():
if cls.__name__ == processor_name:
instance = cls(att)
instance.process()
break
return uwsgi.SPOOL_OK
uwsgi.spooler = content_processing
|
Return ok if db object does not exist in spooler
|
## Code Before:
import uwsgi
import django
django.setup()
from resources.processing import BaseProcessor
from resources.models import Attachment
def content_processing(env):
print (env)
resource_id = int(env.get(b"id"))
processor_name = env.get(b"processor").decode()
att = Attachment.objects.get(id=resource_id)
for cls in BaseProcessor.__subclasses__():
if cls.__name__ == processor_name:
instance = cls(att)
instance.process()
break
return uwsgi.SPOOL_OK
uwsgi.spooler = content_processing
## Instruction:
Return ok if db object does not exist in spooler
## Code After:
import uwsgi
import django
django.setup()
from resources.processing import BaseProcessor
from resources.models import Attachment
def content_processing(env):
print (env)
resource_id = int(env.get(b"id"))
processor_name = env.get(b"processor").decode()
try:
att = Attachment.objects.get(id=resource_id)
except Attachment.DoesNotExist:
return uwsgi.SPOOL_OK
for cls in BaseProcessor.__subclasses__():
if cls.__name__ == processor_name:
instance = cls(att)
instance.process()
break
return uwsgi.SPOOL_OK
uwsgi.spooler = content_processing
|
import uwsgi
import django
django.setup()
from resources.processing import BaseProcessor
from resources.models import Attachment
def content_processing(env):
print (env)
resource_id = int(env.get(b"id"))
processor_name = env.get(b"processor").decode()
+ try:
- att = Attachment.objects.get(id=resource_id)
+ att = Attachment.objects.get(id=resource_id)
? ++++
+ except Attachment.DoesNotExist:
+ return uwsgi.SPOOL_OK
for cls in BaseProcessor.__subclasses__():
if cls.__name__ == processor_name:
instance = cls(att)
instance.process()
break
return uwsgi.SPOOL_OK
uwsgi.spooler = content_processing
|
c47f93796bfc4f9026e5451121de7a419ed88e96
|
lobster/cmssw/data/merge_cfg.py
|
lobster/cmssw/data/merge_cfg.py
|
import FWCore.ParameterSet.Config as cms
from FWCore.ParameterSet.VarParsing import VarParsing
import subprocess
import os
import sys
options = VarParsing('analysis')
options.register('output', mytype=VarParsing.varType.string)
options.parseArguments()
process = cms.Process("PickEvent")
process.source = cms.Source ("PoolSource",
fileNames = cms.untracked.vstring(''),
duplicateCheckMode = cms.untracked.string('noDuplicateCheck')
)
process.out = cms.OutputModule("PoolOutputModule",
fileName = cms.untracked.string(options.output)
)
process.end = cms.EndPath(process.out)
|
import FWCore.ParameterSet.Config as cms
from FWCore.ParameterSet.VarParsing import VarParsing
import subprocess
import os
import sys
options = VarParsing('analysis')
options.register('output', mytype=VarParsing.varType.string)
options.register('loginterval', 1000, mytype=VarParsing.varType.int)
options.parseArguments()
process = cms.Process("PickEvent")
process.load('FWCore.MessageService.MessageLogger_cfi')
process.MessageLogger.cerr.FwkReport.reportEvery = options.loginterval
process.source = cms.Source ("PoolSource",
fileNames = cms.untracked.vstring(''),
duplicateCheckMode = cms.untracked.string('noDuplicateCheck')
)
process.out = cms.OutputModule("PoolOutputModule",
fileName = cms.untracked.string(options.output)
)
process.end = cms.EndPath(process.out)
|
Trim down merge verbosity to avoid overly large log files.
|
Trim down merge verbosity to avoid overly large log files.
|
Python
|
mit
|
matz-e/lobster,matz-e/lobster,matz-e/lobster
|
import FWCore.ParameterSet.Config as cms
from FWCore.ParameterSet.VarParsing import VarParsing
import subprocess
import os
import sys
options = VarParsing('analysis')
options.register('output', mytype=VarParsing.varType.string)
+ options.register('loginterval', 1000, mytype=VarParsing.varType.int)
options.parseArguments()
process = cms.Process("PickEvent")
+
+ process.load('FWCore.MessageService.MessageLogger_cfi')
+ process.MessageLogger.cerr.FwkReport.reportEvery = options.loginterval
+
process.source = cms.Source ("PoolSource",
fileNames = cms.untracked.vstring(''),
duplicateCheckMode = cms.untracked.string('noDuplicateCheck')
)
process.out = cms.OutputModule("PoolOutputModule",
fileName = cms.untracked.string(options.output)
)
process.end = cms.EndPath(process.out)
|
Trim down merge verbosity to avoid overly large log files.
|
## Code Before:
import FWCore.ParameterSet.Config as cms
from FWCore.ParameterSet.VarParsing import VarParsing
import subprocess
import os
import sys
options = VarParsing('analysis')
options.register('output', mytype=VarParsing.varType.string)
options.parseArguments()
process = cms.Process("PickEvent")
process.source = cms.Source ("PoolSource",
fileNames = cms.untracked.vstring(''),
duplicateCheckMode = cms.untracked.string('noDuplicateCheck')
)
process.out = cms.OutputModule("PoolOutputModule",
fileName = cms.untracked.string(options.output)
)
process.end = cms.EndPath(process.out)
## Instruction:
Trim down merge verbosity to avoid overly large log files.
## Code After:
import FWCore.ParameterSet.Config as cms
from FWCore.ParameterSet.VarParsing import VarParsing
import subprocess
import os
import sys
options = VarParsing('analysis')
options.register('output', mytype=VarParsing.varType.string)
options.register('loginterval', 1000, mytype=VarParsing.varType.int)
options.parseArguments()
process = cms.Process("PickEvent")
process.load('FWCore.MessageService.MessageLogger_cfi')
process.MessageLogger.cerr.FwkReport.reportEvery = options.loginterval
process.source = cms.Source ("PoolSource",
fileNames = cms.untracked.vstring(''),
duplicateCheckMode = cms.untracked.string('noDuplicateCheck')
)
process.out = cms.OutputModule("PoolOutputModule",
fileName = cms.untracked.string(options.output)
)
process.end = cms.EndPath(process.out)
|
import FWCore.ParameterSet.Config as cms
from FWCore.ParameterSet.VarParsing import VarParsing
import subprocess
import os
import sys
options = VarParsing('analysis')
options.register('output', mytype=VarParsing.varType.string)
+ options.register('loginterval', 1000, mytype=VarParsing.varType.int)
options.parseArguments()
process = cms.Process("PickEvent")
+
+ process.load('FWCore.MessageService.MessageLogger_cfi')
+ process.MessageLogger.cerr.FwkReport.reportEvery = options.loginterval
+
process.source = cms.Source ("PoolSource",
fileNames = cms.untracked.vstring(''),
duplicateCheckMode = cms.untracked.string('noDuplicateCheck')
)
process.out = cms.OutputModule("PoolOutputModule",
fileName = cms.untracked.string(options.output)
)
process.end = cms.EndPath(process.out)
|
3298fff0ded49c21897a7387a7f3093c351ae04f
|
scripts/run_psql.py
|
scripts/run_psql.py
|
from acoustid.script import run_script
import subprocess
def main(script, opts, args):
subprocess.call(['psql'] + script.config.database.create_psql_args())
run_script(main)
|
from acoustid.script import run_script
import os
def main(script, opts, args):
os.execlp('psql', 'psql', *script.config.database.create_psql_args())
run_script(main)
|
Use os.exelp to launch psql
|
Use os.exelp to launch psql
|
Python
|
mit
|
lalinsky/acoustid-server,lalinsky/acoustid-server,lalinsky/acoustid-server,lalinsky/acoustid-server
|
from acoustid.script import run_script
- import subprocess
+ import os
def main(script, opts, args):
- subprocess.call(['psql'] + script.config.database.create_psql_args())
+ os.execlp('psql', 'psql', *script.config.database.create_psql_args())
run_script(main)
|
Use os.exelp to launch psql
|
## Code Before:
from acoustid.script import run_script
import subprocess
def main(script, opts, args):
subprocess.call(['psql'] + script.config.database.create_psql_args())
run_script(main)
## Instruction:
Use os.exelp to launch psql
## Code After:
from acoustid.script import run_script
import os
def main(script, opts, args):
os.execlp('psql', 'psql', *script.config.database.create_psql_args())
run_script(main)
|
from acoustid.script import run_script
- import subprocess
+ import os
def main(script, opts, args):
- subprocess.call(['psql'] + script.config.database.create_psql_args())
? ----- --- - ^ - ^ ^
+ os.execlp('psql', 'psql', *script.config.database.create_psql_args())
? +++ ^ ^ ^^^^^^^ +
run_script(main)
|
fa418ed5a6769a369d4b5cddfc6e215f551c57cf
|
events/cms_app.py
|
events/cms_app.py
|
from cms.app_base import CMSApp
from cms.apphook_pool import apphook_pool
from django.utils.translation import ugettext_lazy as _
class EventsApphook(CMSApp):
name = _("Events Apphook")
urls = ["events.urls"]
app_name = "events"
apphook_pool.register(EventsApphook)
|
from cms.app_base import CMSApp
from cms.apphook_pool import apphook_pool
from django.utils.translation import ugettext_lazy as _
class EventsApphook(CMSApp):
name = _("Events Apphook")
urls = ["events.urls"]
app_name = "events"
namespace = "events"
apphook_pool.register(EventsApphook)
|
Add namespace to support djangoCMS v3
|
Add namespace to support djangoCMS v3
|
Python
|
bsd-3-clause
|
theherk/django-theherk-events
|
from cms.app_base import CMSApp
from cms.apphook_pool import apphook_pool
from django.utils.translation import ugettext_lazy as _
class EventsApphook(CMSApp):
name = _("Events Apphook")
urls = ["events.urls"]
app_name = "events"
+ namespace = "events"
apphook_pool.register(EventsApphook)
|
Add namespace to support djangoCMS v3
|
## Code Before:
from cms.app_base import CMSApp
from cms.apphook_pool import apphook_pool
from django.utils.translation import ugettext_lazy as _
class EventsApphook(CMSApp):
name = _("Events Apphook")
urls = ["events.urls"]
app_name = "events"
apphook_pool.register(EventsApphook)
## Instruction:
Add namespace to support djangoCMS v3
## Code After:
from cms.app_base import CMSApp
from cms.apphook_pool import apphook_pool
from django.utils.translation import ugettext_lazy as _
class EventsApphook(CMSApp):
name = _("Events Apphook")
urls = ["events.urls"]
app_name = "events"
namespace = "events"
apphook_pool.register(EventsApphook)
|
from cms.app_base import CMSApp
from cms.apphook_pool import apphook_pool
from django.utils.translation import ugettext_lazy as _
class EventsApphook(CMSApp):
name = _("Events Apphook")
urls = ["events.urls"]
app_name = "events"
+ namespace = "events"
apphook_pool.register(EventsApphook)
|
b2f40d9b1ed9d78a4fdc1f73e64575a26d117d0c
|
nuitka/tools/release/msi_create/__main__.py
|
nuitka/tools/release/msi_create/__main__.py
|
from nuitka.tools.release.MSI import createMSIPackage
def main():
createMSIPackage()
if __name__ == "__main__":
main()
|
import os
import shutil
from nuitka.tools.Basics import goHome
from nuitka.tools.release.MSI import createMSIPackage
def main():
goHome()
msi_filename = createMSIPackage()
if not os.path.exists("msi"):
os.makedirs("msi")
shutil.move(msi_filename, "msi")
if __name__ == "__main__":
main()
|
Copy created MSI to dedicated folder.
|
Release: Copy created MSI to dedicated folder.
* The "dist" folder is erased each time and we determine the result
name from being the only MSI file.
|
Python
|
apache-2.0
|
kayhayen/Nuitka,kayhayen/Nuitka,kayhayen/Nuitka,kayhayen/Nuitka
|
+ import os
+ import shutil
+
+ from nuitka.tools.Basics import goHome
from nuitka.tools.release.MSI import createMSIPackage
def main():
- createMSIPackage()
+ goHome()
+
+ msi_filename = createMSIPackage()
+
+ if not os.path.exists("msi"):
+ os.makedirs("msi")
+
+ shutil.move(msi_filename, "msi")
+
if __name__ == "__main__":
main()
|
Copy created MSI to dedicated folder.
|
## Code Before:
from nuitka.tools.release.MSI import createMSIPackage
def main():
createMSIPackage()
if __name__ == "__main__":
main()
## Instruction:
Copy created MSI to dedicated folder.
## Code After:
import os
import shutil
from nuitka.tools.Basics import goHome
from nuitka.tools.release.MSI import createMSIPackage
def main():
goHome()
msi_filename = createMSIPackage()
if not os.path.exists("msi"):
os.makedirs("msi")
shutil.move(msi_filename, "msi")
if __name__ == "__main__":
main()
|
+ import os
+ import shutil
+
+ from nuitka.tools.Basics import goHome
from nuitka.tools.release.MSI import createMSIPackage
def main():
- createMSIPackage()
+ goHome()
+
+ msi_filename = createMSIPackage()
+
+ if not os.path.exists("msi"):
+ os.makedirs("msi")
+
+ shutil.move(msi_filename, "msi")
+
if __name__ == "__main__":
main()
|
3acd7d885e6c660c3acb0b584b7ed07c8a1a4df3
|
docs/source/_examples/myclient.py
|
docs/source/_examples/myclient.py
|
import socket, ssl
import h11
class MyHttpClient:
def __init__(self, host, port):
self.sock = socket.create_connection((host, port))
if port == 443:
self.sock = ssl.wrap_socket(self.sock)
self.conn = h11.Connection(our_role=h11.CLIENT)
def send(self, *events):
for event in events:
data = self.conn.send(event)
if data is None:
# event was a ConnectionClosed(), meaning that we won't be
# sending any more data:
self.sock.shutdown(socket.SHUT_WR)
else:
self.sock.sendall(data)
# max_bytes_per_recv intentionally set low for pedagogical purposes
def next_event(self, max_bytes_per_recv=200):
while True:
# If we already have a complete event buffered internally, just
# return that. Otherwise, read some data, add it to the internal
# buffer, and then try again.
event = self.conn.next_event()
if event is h11.NEED_DATA:
self.conn.receive_data(self.sock.recv(max_bytes_per_recv))
continue
return event
|
import socket, ssl
import h11
class MyHttpClient:
def __init__(self, host, port):
self.sock = socket.create_connection((host, port))
if port == 443:
ctx = ssl.create_default_context()
self.sock = ctx.wrap_socket(self.sock, server_hostname=host)
self.conn = h11.Connection(our_role=h11.CLIENT)
def send(self, *events):
for event in events:
data = self.conn.send(event)
if data is None:
# event was a ConnectionClosed(), meaning that we won't be
# sending any more data:
self.sock.shutdown(socket.SHUT_WR)
else:
self.sock.sendall(data)
# max_bytes_per_recv intentionally set low for pedagogical purposes
def next_event(self, max_bytes_per_recv=200):
while True:
# If we already have a complete event buffered internally, just
# return that. Otherwise, read some data, add it to the internal
# buffer, and then try again.
event = self.conn.next_event()
if event is h11.NEED_DATA:
self.conn.receive_data(self.sock.recv(max_bytes_per_recv))
continue
return event
|
Support SNI in the example client
|
Support SNI in the example client
Fixes: gh-36
|
Python
|
mit
|
python-hyper/h11,njsmith/h11
|
import socket, ssl
import h11
class MyHttpClient:
def __init__(self, host, port):
self.sock = socket.create_connection((host, port))
if port == 443:
+ ctx = ssl.create_default_context()
- self.sock = ssl.wrap_socket(self.sock)
+ self.sock = ctx.wrap_socket(self.sock, server_hostname=host)
self.conn = h11.Connection(our_role=h11.CLIENT)
def send(self, *events):
for event in events:
data = self.conn.send(event)
if data is None:
# event was a ConnectionClosed(), meaning that we won't be
# sending any more data:
self.sock.shutdown(socket.SHUT_WR)
else:
self.sock.sendall(data)
# max_bytes_per_recv intentionally set low for pedagogical purposes
def next_event(self, max_bytes_per_recv=200):
while True:
# If we already have a complete event buffered internally, just
# return that. Otherwise, read some data, add it to the internal
# buffer, and then try again.
event = self.conn.next_event()
if event is h11.NEED_DATA:
self.conn.receive_data(self.sock.recv(max_bytes_per_recv))
continue
return event
|
Support SNI in the example client
|
## Code Before:
import socket, ssl
import h11
class MyHttpClient:
def __init__(self, host, port):
self.sock = socket.create_connection((host, port))
if port == 443:
self.sock = ssl.wrap_socket(self.sock)
self.conn = h11.Connection(our_role=h11.CLIENT)
def send(self, *events):
for event in events:
data = self.conn.send(event)
if data is None:
# event was a ConnectionClosed(), meaning that we won't be
# sending any more data:
self.sock.shutdown(socket.SHUT_WR)
else:
self.sock.sendall(data)
# max_bytes_per_recv intentionally set low for pedagogical purposes
def next_event(self, max_bytes_per_recv=200):
while True:
# If we already have a complete event buffered internally, just
# return that. Otherwise, read some data, add it to the internal
# buffer, and then try again.
event = self.conn.next_event()
if event is h11.NEED_DATA:
self.conn.receive_data(self.sock.recv(max_bytes_per_recv))
continue
return event
## Instruction:
Support SNI in the example client
## Code After:
import socket, ssl
import h11
class MyHttpClient:
def __init__(self, host, port):
self.sock = socket.create_connection((host, port))
if port == 443:
ctx = ssl.create_default_context()
self.sock = ctx.wrap_socket(self.sock, server_hostname=host)
self.conn = h11.Connection(our_role=h11.CLIENT)
def send(self, *events):
for event in events:
data = self.conn.send(event)
if data is None:
# event was a ConnectionClosed(), meaning that we won't be
# sending any more data:
self.sock.shutdown(socket.SHUT_WR)
else:
self.sock.sendall(data)
# max_bytes_per_recv intentionally set low for pedagogical purposes
def next_event(self, max_bytes_per_recv=200):
while True:
# If we already have a complete event buffered internally, just
# return that. Otherwise, read some data, add it to the internal
# buffer, and then try again.
event = self.conn.next_event()
if event is h11.NEED_DATA:
self.conn.receive_data(self.sock.recv(max_bytes_per_recv))
continue
return event
|
import socket, ssl
import h11
class MyHttpClient:
def __init__(self, host, port):
self.sock = socket.create_connection((host, port))
if port == 443:
+ ctx = ssl.create_default_context()
- self.sock = ssl.wrap_socket(self.sock)
? ^^^
+ self.sock = ctx.wrap_socket(self.sock, server_hostname=host)
? ^^^ ++++++++++++++++++++++
self.conn = h11.Connection(our_role=h11.CLIENT)
def send(self, *events):
for event in events:
data = self.conn.send(event)
if data is None:
# event was a ConnectionClosed(), meaning that we won't be
# sending any more data:
self.sock.shutdown(socket.SHUT_WR)
else:
self.sock.sendall(data)
# max_bytes_per_recv intentionally set low for pedagogical purposes
def next_event(self, max_bytes_per_recv=200):
while True:
# If we already have a complete event buffered internally, just
# return that. Otherwise, read some data, add it to the internal
# buffer, and then try again.
event = self.conn.next_event()
if event is h11.NEED_DATA:
self.conn.receive_data(self.sock.recv(max_bytes_per_recv))
continue
return event
|
918b001cb6d9743d3d2ee1b2bab8f14c90e1adf7
|
src/ice/rom_finder.py
|
src/ice/rom_finder.py
|
from console import Console
from rom import ROM
from functools import reduce
class ROMFinder(object):
def __init__(self, filesystem):
self.filesystem = filesystem
def roms_for_console(self, console):
"""
@param console - A console object
@returns A list of ROM objects representing all of the valid ROMs for a
given console.
Valid ROMs are defined as ROMs for which `console`'s `is_valid_rom` method
returns True.
Returns an empty list if `console` is not enabled
"""
if not console.is_enabled():
return []
paths = self.filesystem.files_in_directory(console.roms_directory())
valid_rom_paths = filter(console.is_valid_rom, paths)
return map(lambda path: ROM(path, console), valid_rom_paths)
def roms_for_consoles(self, consoles):
"""
@param consoles - An iterable list of consoles
@returns A list of all of the ROMs for all of the consoles in `consoles`
Equivalent to calling `roms_for_console` on every element of `consoles`
and combining the results
"""
assert hasattr(
consoles, '__iter__'), "Expecting an iterable list of consoles"
def rom_collector(roms, console):
roms.extend(self.roms_for_console(console))
return roms
return reduce(rom_collector, consoles, [])
|
from console import Console
from rom import ROM
from functools import reduce
class ROMFinder(object):
def __init__(self, filesystem):
self.filesystem = filesystem
def roms_for_console(self, console):
"""
@param console - A console object
@returns A list of ROM objects representing all of the valid ROMs for a
given console.
Valid ROMs are defined as ROMs for which `console`'s `is_valid_rom` method
returns True.
Returns an empty list if `console` is not enabled
"""
if not console.is_enabled():
return []
paths = self.filesystem.files_in_directory(console.roms_directory())
valid_rom_paths = filter(console.is_valid_rom, paths)
return map(lambda path: ROM(path, console), valid_rom_paths)
def roms_for_consoles(self, consoles):
"""
@param consoles - An iterable list of consoles
@returns A list of all of the ROMs for all of the consoles in `consoles`
Equivalent to calling `roms_for_console` on every element of `consoles`
and combining the results
"""
return reduce(lambda roms, console: roms + self.roms_for_console(console), consoles, [])
|
Replace 'list.extend' call with '+' operator
|
[Cleanup] Replace 'list.extend' call with '+' operator
I knew there had to be an easier way for merging lists other than `extend`. Turns out the plus operator does exactly what I need.
|
Python
|
mit
|
rdoyle1978/Ice,scottrice/Ice
|
from console import Console
from rom import ROM
from functools import reduce
class ROMFinder(object):
def __init__(self, filesystem):
self.filesystem = filesystem
def roms_for_console(self, console):
"""
@param console - A console object
@returns A list of ROM objects representing all of the valid ROMs for a
given console.
Valid ROMs are defined as ROMs for which `console`'s `is_valid_rom` method
returns True.
Returns an empty list if `console` is not enabled
"""
if not console.is_enabled():
return []
paths = self.filesystem.files_in_directory(console.roms_directory())
valid_rom_paths = filter(console.is_valid_rom, paths)
return map(lambda path: ROM(path, console), valid_rom_paths)
def roms_for_consoles(self, consoles):
"""
@param consoles - An iterable list of consoles
@returns A list of all of the ROMs for all of the consoles in `consoles`
Equivalent to calling `roms_for_console` on every element of `consoles`
and combining the results
"""
+ return reduce(lambda roms, console: roms + self.roms_for_console(console), consoles, [])
- assert hasattr(
- consoles, '__iter__'), "Expecting an iterable list of consoles"
- def rom_collector(roms, console):
- roms.extend(self.roms_for_console(console))
- return roms
- return reduce(rom_collector, consoles, [])
-
|
Replace 'list.extend' call with '+' operator
|
## Code Before:
from console import Console
from rom import ROM
from functools import reduce
class ROMFinder(object):
def __init__(self, filesystem):
self.filesystem = filesystem
def roms_for_console(self, console):
"""
@param console - A console object
@returns A list of ROM objects representing all of the valid ROMs for a
given console.
Valid ROMs are defined as ROMs for which `console`'s `is_valid_rom` method
returns True.
Returns an empty list if `console` is not enabled
"""
if not console.is_enabled():
return []
paths = self.filesystem.files_in_directory(console.roms_directory())
valid_rom_paths = filter(console.is_valid_rom, paths)
return map(lambda path: ROM(path, console), valid_rom_paths)
def roms_for_consoles(self, consoles):
"""
@param consoles - An iterable list of consoles
@returns A list of all of the ROMs for all of the consoles in `consoles`
Equivalent to calling `roms_for_console` on every element of `consoles`
and combining the results
"""
assert hasattr(
consoles, '__iter__'), "Expecting an iterable list of consoles"
def rom_collector(roms, console):
roms.extend(self.roms_for_console(console))
return roms
return reduce(rom_collector, consoles, [])
## Instruction:
Replace 'list.extend' call with '+' operator
## Code After:
from console import Console
from rom import ROM
from functools import reduce
class ROMFinder(object):
def __init__(self, filesystem):
self.filesystem = filesystem
def roms_for_console(self, console):
"""
@param console - A console object
@returns A list of ROM objects representing all of the valid ROMs for a
given console.
Valid ROMs are defined as ROMs for which `console`'s `is_valid_rom` method
returns True.
Returns an empty list if `console` is not enabled
"""
if not console.is_enabled():
return []
paths = self.filesystem.files_in_directory(console.roms_directory())
valid_rom_paths = filter(console.is_valid_rom, paths)
return map(lambda path: ROM(path, console), valid_rom_paths)
def roms_for_consoles(self, consoles):
"""
@param consoles - An iterable list of consoles
@returns A list of all of the ROMs for all of the consoles in `consoles`
Equivalent to calling `roms_for_console` on every element of `consoles`
and combining the results
"""
return reduce(lambda roms, console: roms + self.roms_for_console(console), consoles, [])
|
from console import Console
from rom import ROM
from functools import reduce
class ROMFinder(object):
def __init__(self, filesystem):
self.filesystem = filesystem
def roms_for_console(self, console):
"""
@param console - A console object
@returns A list of ROM objects representing all of the valid ROMs for a
given console.
Valid ROMs are defined as ROMs for which `console`'s `is_valid_rom` method
returns True.
Returns an empty list if `console` is not enabled
"""
if not console.is_enabled():
return []
paths = self.filesystem.files_in_directory(console.roms_directory())
valid_rom_paths = filter(console.is_valid_rom, paths)
return map(lambda path: ROM(path, console), valid_rom_paths)
def roms_for_consoles(self, consoles):
"""
@param consoles - An iterable list of consoles
@returns A list of all of the ROMs for all of the consoles in `consoles`
Equivalent to calling `roms_for_console` on every element of `consoles`
and combining the results
"""
+ return reduce(lambda roms, console: roms + self.roms_for_console(console), consoles, [])
- assert hasattr(
- consoles, '__iter__'), "Expecting an iterable list of consoles"
-
- def rom_collector(roms, console):
- roms.extend(self.roms_for_console(console))
- return roms
- return reduce(rom_collector, consoles, [])
|
33448ac669e192143655560dd52299e7069585ac
|
django_token/middleware.py
|
django_token/middleware.py
|
from django import http
from django.contrib import auth
from django.core import exceptions
class TokenMiddleware(object):
"""
Middleware that authenticates against a token in the http authorization
header.
"""
get_response = None
def __init__(self, get_response=None):
self.get_response = get_response
def __call__(self, request):
if not self.get_response:
return exceptions.ImproperlyConfigured(
'Middleware called without proper initialization')
self.process_request(request)
return self.get_response(request)
def process_request(self, request):
auth_header = request.META.get('HTTP_AUTHORIZATION', b'')
auth_header = auth_header.partition(b' ')
if auth_header[0].lower() != b'token':
return None
# If they specified an invalid token, let them know.
if not auth_header[2]:
return http.HttpResponseBadRequest("Improperly formatted token")
user = auth.authenticate(token=auth_header[2])
if user:
request.user = user
|
from django import http
from django.contrib import auth
from django.core import exceptions
class TokenMiddleware(object):
"""
Middleware that authenticates against a token in the http authorization
header.
"""
get_response = None
def __init__(self, get_response=None):
self.get_response = get_response
def __call__(self, request):
if not self.get_response:
return exceptions.ImproperlyConfigured(
'Middleware called without proper initialization')
self.process_request(request)
return self.get_response(request)
def process_request(self, request):
auth_header = str(request.META.get('HTTP_AUTHORIZATION', '')).partition(' ')
if auth_header[0].lower() != 'token':
return None
# If they specified an invalid token, let them know.
if not auth_header[2]:
return http.HttpResponseBadRequest("Improperly formatted token")
user = auth.authenticate(token=auth_header[2])
if user:
request.user = user
|
Fix for byte literals compatibility.
|
Fix for byte literals compatibility.
|
Python
|
mit
|
jasonbeverage/django-token
|
from django import http
from django.contrib import auth
from django.core import exceptions
class TokenMiddleware(object):
"""
Middleware that authenticates against a token in the http authorization
header.
"""
get_response = None
def __init__(self, get_response=None):
self.get_response = get_response
def __call__(self, request):
if not self.get_response:
return exceptions.ImproperlyConfigured(
'Middleware called without proper initialization')
self.process_request(request)
return self.get_response(request)
def process_request(self, request):
- auth_header = request.META.get('HTTP_AUTHORIZATION', b'')
+ auth_header = str(request.META.get('HTTP_AUTHORIZATION', '')).partition(' ')
- auth_header = auth_header.partition(b' ')
- if auth_header[0].lower() != b'token':
+ if auth_header[0].lower() != 'token':
return None
# If they specified an invalid token, let them know.
if not auth_header[2]:
return http.HttpResponseBadRequest("Improperly formatted token")
user = auth.authenticate(token=auth_header[2])
if user:
request.user = user
|
Fix for byte literals compatibility.
|
## Code Before:
from django import http
from django.contrib import auth
from django.core import exceptions
class TokenMiddleware(object):
"""
Middleware that authenticates against a token in the http authorization
header.
"""
get_response = None
def __init__(self, get_response=None):
self.get_response = get_response
def __call__(self, request):
if not self.get_response:
return exceptions.ImproperlyConfigured(
'Middleware called without proper initialization')
self.process_request(request)
return self.get_response(request)
def process_request(self, request):
auth_header = request.META.get('HTTP_AUTHORIZATION', b'')
auth_header = auth_header.partition(b' ')
if auth_header[0].lower() != b'token':
return None
# If they specified an invalid token, let them know.
if not auth_header[2]:
return http.HttpResponseBadRequest("Improperly formatted token")
user = auth.authenticate(token=auth_header[2])
if user:
request.user = user
## Instruction:
Fix for byte literals compatibility.
## Code After:
from django import http
from django.contrib import auth
from django.core import exceptions
class TokenMiddleware(object):
"""
Middleware that authenticates against a token in the http authorization
header.
"""
get_response = None
def __init__(self, get_response=None):
self.get_response = get_response
def __call__(self, request):
if not self.get_response:
return exceptions.ImproperlyConfigured(
'Middleware called without proper initialization')
self.process_request(request)
return self.get_response(request)
def process_request(self, request):
auth_header = str(request.META.get('HTTP_AUTHORIZATION', '')).partition(' ')
if auth_header[0].lower() != 'token':
return None
# If they specified an invalid token, let them know.
if not auth_header[2]:
return http.HttpResponseBadRequest("Improperly formatted token")
user = auth.authenticate(token=auth_header[2])
if user:
request.user = user
|
from django import http
from django.contrib import auth
from django.core import exceptions
class TokenMiddleware(object):
"""
Middleware that authenticates against a token in the http authorization
header.
"""
get_response = None
def __init__(self, get_response=None):
self.get_response = get_response
def __call__(self, request):
if not self.get_response:
return exceptions.ImproperlyConfigured(
'Middleware called without proper initialization')
self.process_request(request)
return self.get_response(request)
def process_request(self, request):
- auth_header = request.META.get('HTTP_AUTHORIZATION', b'')
? -
+ auth_header = str(request.META.get('HTTP_AUTHORIZATION', '')).partition(' ')
? ++++ ++++++++++++++++
- auth_header = auth_header.partition(b' ')
- if auth_header[0].lower() != b'token':
? -
+ if auth_header[0].lower() != 'token':
return None
# If they specified an invalid token, let them know.
if not auth_header[2]:
return http.HttpResponseBadRequest("Improperly formatted token")
user = auth.authenticate(token=auth_header[2])
if user:
request.user = user
|
89454b1e83e01a4d523b776f74429a81467762da
|
redis/utils.py
|
redis/utils.py
|
try:
import hiredis
HIREDIS_AVAILABLE = True
except ImportError:
HIREDIS_AVAILABLE = False
def from_url(url, db=None, **kwargs):
"""
Returns an active Redis client generated from the given database URL.
Will attempt to extract the database id from the path url fragment, if
none is provided.
"""
from redis.client import Redis
return Redis.from_url(url, db, **kwargs)
from contextlib import contextmanager
@contextmanager
def pipeline(redis_obj):
p = redis_obj.pipeline()
yield p
p.execute()
|
from contextlib import contextmanager
try:
import hiredis
HIREDIS_AVAILABLE = True
except ImportError:
HIREDIS_AVAILABLE = False
def from_url(url, db=None, **kwargs):
"""
Returns an active Redis client generated from the given database URL.
Will attempt to extract the database id from the path url fragment, if
none is provided.
"""
from redis.client import Redis
return Redis.from_url(url, db, **kwargs)
@contextmanager
def pipeline(redis_obj):
p = redis_obj.pipeline()
yield p
p.execute()
|
Move import statement on top for PEP8 compliancy.
|
Move import statement on top for PEP8 compliancy.
|
Python
|
mit
|
MegaByte875/redis-py,fengshao0907/redis-py,sigma-random/redis-py,sunminghong/redis-py,garnertb/redis-py,softliumin/redis-py,sirk390/redis-py,barseghyanartur/redis-py,zhangyancoder/redis-py,LTD-Beget/redis-py,boyxuper/redis-py,barseghyanartur/redis-py,dmugtasimov/redis-py,LTD-Beget/redis-py,yuruidong/redis-py,sigma-random/redis-py,VishvajitP/redis-py,boyxuper/redis-py,barseghyanartur/redis-py,thedrow/redis-py,siryuan525614/python_operation,ContextLogic/redis-py,andymccurdy/redis-py,Kazanz/redis-py,mozillazg/redis-py-doc,joshowen/redis-py,joshowen/redis-py,MrKiven/redis-py,rcrdclub/redis-py,ze-phyr-us/redis-py,RedisLabs/redis-py,maxikov/redis-py,dylanjw/redis-py,wfxiang08/redis-py,dmoliveira/redis-py,fengshao0907/redis-py,kaushik94/redis-py,sirk390/redis-py,siryuan525614/python_operation,RedisLabs/redis-py,JamieCressey/redispy,piperck/redis-py,kaushik94/redis-py,Kazanz/redis-py,5977862/redis-py,kouhou/redis-py,dmugtasimov/redis-py,wfxiang08/redis-py,fengsp/redis-py,ffrree/redis-py,harlowja/redis-py,mozillazg/redis-py-doc,VishvajitP/redis-py,sunminghong/redis-py,yuruidong/redis-py,alisaifee/redis-py,ferrero-zhang/redis-py,thedrow/redis-py,JamieCressey/redispy,MrKiven/redis-py,harlowja/redis-py,cvrebert/redis-py,lamby/redis-py,jparise/redis-py,fengsp/redis-py,piperck/redis-py,andymccurdy/redis-py,kouhou/redis-py,boyxuper/redis-py,ycaihua/redis-py,forblackking/redis-py,MegaByte875/redis-py,alisaifee/redis-py,yihuang/redis-py,ContextLogic/redis-py,5977862/redis-py,LTD-Beget/redis-py,softliumin/redis-py,ffrree/redis-py,kouhou/redis-py,redis/redis-py,joshowen/redis-py,nfvs/redis-py,ycaihua/redis-py,softliumin/redis-py,5977862/redis-py,andymccurdy/redis-py,pombredanne/redis-py,cvrebert/redis-py,JamieCressey/redispy,dmugtasimov/redis-py,maxikov/redis-py,nfvs/redis-py,fengshao0907/redis-py,ycaihua/redis-py,yuruidong/redis-py,cvrebert/redis-py,MegaByte875/redis-py,jparise/redis-py,ze-phyr-us/redis-py,sigma-random/redis-py,harlowja/redis-py,ContextLogic/redis-py,VishvajitP/redis-py,yihuang/redis-py,ze-phyr-us/redis-py,siryuan525614/python_operation,pombredanne/redis-py,nfvs/redis-py,fengsp/redis-py,Kazanz/redis-py,rcrdclub/redis-py,pombredanne/redis-py,MrKiven/redis-py,thedrow/redis-py,kaushik94/redis-py,ffrree/redis-py,sunminghong/redis-py,ferrero-zhang/redis-py,garnertb/redis-py,sirk390/redis-py,rcrdclub/redis-py,yihuang/redis-py,forblackking/redis-py,zhangyancoder/redis-py,dmoliveira/redis-py,redis/redis-py,garnertb/redis-py,zhangyancoder/redis-py,forblackking/redis-py,piperck/redis-py,dylanjw/redis-py,lamby/redis-py,ferrero-zhang/redis-py,jparise/redis-py,dylanjw/redis-py,dmoliveira/redis-py,wfxiang08/redis-py,maxikov/redis-py,lamby/redis-py
|
+ from contextlib import contextmanager
+
+
try:
import hiredis
HIREDIS_AVAILABLE = True
except ImportError:
HIREDIS_AVAILABLE = False
def from_url(url, db=None, **kwargs):
"""
Returns an active Redis client generated from the given database URL.
Will attempt to extract the database id from the path url fragment, if
none is provided.
"""
from redis.client import Redis
return Redis.from_url(url, db, **kwargs)
- from contextlib import contextmanager
@contextmanager
def pipeline(redis_obj):
p = redis_obj.pipeline()
yield p
p.execute()
|
Move import statement on top for PEP8 compliancy.
|
## Code Before:
try:
import hiredis
HIREDIS_AVAILABLE = True
except ImportError:
HIREDIS_AVAILABLE = False
def from_url(url, db=None, **kwargs):
"""
Returns an active Redis client generated from the given database URL.
Will attempt to extract the database id from the path url fragment, if
none is provided.
"""
from redis.client import Redis
return Redis.from_url(url, db, **kwargs)
from contextlib import contextmanager
@contextmanager
def pipeline(redis_obj):
p = redis_obj.pipeline()
yield p
p.execute()
## Instruction:
Move import statement on top for PEP8 compliancy.
## Code After:
from contextlib import contextmanager
try:
import hiredis
HIREDIS_AVAILABLE = True
except ImportError:
HIREDIS_AVAILABLE = False
def from_url(url, db=None, **kwargs):
"""
Returns an active Redis client generated from the given database URL.
Will attempt to extract the database id from the path url fragment, if
none is provided.
"""
from redis.client import Redis
return Redis.from_url(url, db, **kwargs)
@contextmanager
def pipeline(redis_obj):
p = redis_obj.pipeline()
yield p
p.execute()
|
+ from contextlib import contextmanager
+
+
try:
import hiredis
HIREDIS_AVAILABLE = True
except ImportError:
HIREDIS_AVAILABLE = False
def from_url(url, db=None, **kwargs):
"""
Returns an active Redis client generated from the given database URL.
Will attempt to extract the database id from the path url fragment, if
none is provided.
"""
from redis.client import Redis
return Redis.from_url(url, db, **kwargs)
- from contextlib import contextmanager
@contextmanager
def pipeline(redis_obj):
p = redis_obj.pipeline()
yield p
p.execute()
|
f96b4f3516905b13267d6c918f22e76556b4b56a
|
salt/modules/cmd.py
|
salt/modules/cmd.py
|
'''
Module for shelling out commands, inclusion of this module should be
configurable for security reasons
'''
|
'''
Module for shelling out commands, inclusion of this module should be
configurable for security reasons
'''
def echo(text):
'''
Return a string - used for testing the connection
'''
return text
|
Add a simple test function
|
Add a simple test function
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
'''
Module for shelling out commands, inclusion of this module should be
configurable for security reasons
'''
+ def echo(text):
+ '''
+ Return a string - used for testing the connection
+ '''
+ return text
+
|
Add a simple test function
|
## Code Before:
'''
Module for shelling out commands, inclusion of this module should be
configurable for security reasons
'''
## Instruction:
Add a simple test function
## Code After:
'''
Module for shelling out commands, inclusion of this module should be
configurable for security reasons
'''
def echo(text):
'''
Return a string - used for testing the connection
'''
return text
|
'''
Module for shelling out commands, inclusion of this module should be
configurable for security reasons
'''
+
+ def echo(text):
+ '''
+ Return a string - used for testing the connection
+ '''
+ return text
|
01e911926d37fa981fd7703f751ff91f052313e2
|
tkLibs/__init__.py
|
tkLibs/__init__.py
|
__all__ = ['autoScrollbar', 'button', 'combobox', 'listbox', 'window']
from .autoScrollbar import autoScrollbar
from .button import button
from .combobox import combobox
from .listbox import listbox
from .window import window
|
__all__ = ['autoScrollbar', 'button', 'combobox', 'entry', 'frame', 'label', 'listbox', 'toplevel', 'window']
from .autoScrollbar import autoScrollbar
from .button import button
from .combobox import combobox
from .entry import entry
from .frame import frame
from .label import label
from .listbox import listbox
from .toplevel import toplevel
from .window import window
|
Add import of new widgets.
|
Add import of new widgets.
|
Python
|
mit
|
Kyle-Fagan/tkLibs
|
- __all__ = ['autoScrollbar', 'button', 'combobox', 'listbox', 'window']
+ __all__ = ['autoScrollbar', 'button', 'combobox', 'entry', 'frame', 'label', 'listbox', 'toplevel', 'window']
from .autoScrollbar import autoScrollbar
from .button import button
from .combobox import combobox
+ from .entry import entry
+ from .frame import frame
+ from .label import label
from .listbox import listbox
+ from .toplevel import toplevel
from .window import window
|
Add import of new widgets.
|
## Code Before:
__all__ = ['autoScrollbar', 'button', 'combobox', 'listbox', 'window']
from .autoScrollbar import autoScrollbar
from .button import button
from .combobox import combobox
from .listbox import listbox
from .window import window
## Instruction:
Add import of new widgets.
## Code After:
__all__ = ['autoScrollbar', 'button', 'combobox', 'entry', 'frame', 'label', 'listbox', 'toplevel', 'window']
from .autoScrollbar import autoScrollbar
from .button import button
from .combobox import combobox
from .entry import entry
from .frame import frame
from .label import label
from .listbox import listbox
from .toplevel import toplevel
from .window import window
|
- __all__ = ['autoScrollbar', 'button', 'combobox', 'listbox', 'window']
+ __all__ = ['autoScrollbar', 'button', 'combobox', 'entry', 'frame', 'label', 'listbox', 'toplevel', 'window']
? +++++++++++++++++++++++++++ ++++++++++++
from .autoScrollbar import autoScrollbar
from .button import button
from .combobox import combobox
+ from .entry import entry
+ from .frame import frame
+ from .label import label
from .listbox import listbox
+ from .toplevel import toplevel
from .window import window
|
94be56593a43101abc21b30b187d340e7ef8c3f0
|
runtests.py
|
runtests.py
|
import sys
import django
from django.conf import settings
if not settings.configured:
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
},
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'stickyuploads',
),
SITE_ID=1,
SECRET_KEY='this-is-just-for-tests-so-not-that-secret',
ROOT_URLCONF='stickyuploads.tests.urls',
)
if hasattr(django, 'setup'):
django.setup()
from django.test.utils import get_runner
def runtests():
TestRunner = get_runner(settings)
test_runner = TestRunner(verbosity=1, interactive=True, failfast=False)
failures = test_runner.run_tests(['stickyuploads', ])
sys.exit(failures)
if __name__ == '__main__':
runtests()
|
import sys
import django
from django.conf import settings
if not settings.configured:
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
},
MIDDLEWARE_CLASSES=(
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
),
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'stickyuploads',
),
SITE_ID=1,
SECRET_KEY='this-is-just-for-tests-so-not-that-secret',
ROOT_URLCONF='stickyuploads.tests.urls',
)
if hasattr(django, 'setup'):
django.setup()
from django.test.utils import get_runner
def runtests():
if hasattr(django, 'setup'):
django.setup()
TestRunner = get_runner(settings)
test_runner = TestRunner(verbosity=1, interactive=True, failfast=False)
failures = test_runner.run_tests(['stickyuploads', ])
sys.exit(failures)
if __name__ == '__main__':
runtests()
|
Fix tests for Django 1.7 by calling the new setup and explicitly including MIDDLEWARE_CLASSES.
|
Fix tests for Django 1.7 by calling the new setup and explicitly including MIDDLEWARE_CLASSES.
|
Python
|
bsd-3-clause
|
vstoykov/django-sticky-uploads,caktus/django-sticky-uploads,caktus/django-sticky-uploads,vstoykov/django-sticky-uploads,caktus/django-sticky-uploads,vstoykov/django-sticky-uploads
|
import sys
import django
from django.conf import settings
if not settings.configured:
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
},
+ MIDDLEWARE_CLASSES=(
+ 'django.middleware.common.CommonMiddleware',
+ 'django.contrib.sessions.middleware.SessionMiddleware',
+ 'django.middleware.csrf.CsrfViewMiddleware',
+ 'django.contrib.auth.middleware.AuthenticationMiddleware',
+ ),
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'stickyuploads',
),
SITE_ID=1,
SECRET_KEY='this-is-just-for-tests-so-not-that-secret',
ROOT_URLCONF='stickyuploads.tests.urls',
)
if hasattr(django, 'setup'):
django.setup()
from django.test.utils import get_runner
def runtests():
+ if hasattr(django, 'setup'):
+ django.setup()
TestRunner = get_runner(settings)
test_runner = TestRunner(verbosity=1, interactive=True, failfast=False)
failures = test_runner.run_tests(['stickyuploads', ])
sys.exit(failures)
if __name__ == '__main__':
runtests()
|
Fix tests for Django 1.7 by calling the new setup and explicitly including MIDDLEWARE_CLASSES.
|
## Code Before:
import sys
import django
from django.conf import settings
if not settings.configured:
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
},
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'stickyuploads',
),
SITE_ID=1,
SECRET_KEY='this-is-just-for-tests-so-not-that-secret',
ROOT_URLCONF='stickyuploads.tests.urls',
)
if hasattr(django, 'setup'):
django.setup()
from django.test.utils import get_runner
def runtests():
TestRunner = get_runner(settings)
test_runner = TestRunner(verbosity=1, interactive=True, failfast=False)
failures = test_runner.run_tests(['stickyuploads', ])
sys.exit(failures)
if __name__ == '__main__':
runtests()
## Instruction:
Fix tests for Django 1.7 by calling the new setup and explicitly including MIDDLEWARE_CLASSES.
## Code After:
import sys
import django
from django.conf import settings
if not settings.configured:
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
},
MIDDLEWARE_CLASSES=(
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
),
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'stickyuploads',
),
SITE_ID=1,
SECRET_KEY='this-is-just-for-tests-so-not-that-secret',
ROOT_URLCONF='stickyuploads.tests.urls',
)
if hasattr(django, 'setup'):
django.setup()
from django.test.utils import get_runner
def runtests():
if hasattr(django, 'setup'):
django.setup()
TestRunner = get_runner(settings)
test_runner = TestRunner(verbosity=1, interactive=True, failfast=False)
failures = test_runner.run_tests(['stickyuploads', ])
sys.exit(failures)
if __name__ == '__main__':
runtests()
|
import sys
import django
from django.conf import settings
if not settings.configured:
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
},
+ MIDDLEWARE_CLASSES=(
+ 'django.middleware.common.CommonMiddleware',
+ 'django.contrib.sessions.middleware.SessionMiddleware',
+ 'django.middleware.csrf.CsrfViewMiddleware',
+ 'django.contrib.auth.middleware.AuthenticationMiddleware',
+ ),
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'stickyuploads',
),
SITE_ID=1,
SECRET_KEY='this-is-just-for-tests-so-not-that-secret',
ROOT_URLCONF='stickyuploads.tests.urls',
)
if hasattr(django, 'setup'):
django.setup()
from django.test.utils import get_runner
def runtests():
+ if hasattr(django, 'setup'):
+ django.setup()
TestRunner = get_runner(settings)
test_runner = TestRunner(verbosity=1, interactive=True, failfast=False)
failures = test_runner.run_tests(['stickyuploads', ])
sys.exit(failures)
if __name__ == '__main__':
runtests()
|
56a76b9dc0745d22d9b1eac54348d6109dc3c0e1
|
src/funding/urls.py
|
src/funding/urls.py
|
from django.urls import path, include
from . import views
urlpatterns = [
path('', views.CurrentView.as_view(), name='funding_current'),
path('teraz/', views.CurrentView.as_view()),
path('teraz/<slug:slug>/', views.CurrentView.as_view(), name='funding_current'),
path('lektura/', views.OfferListView.as_view(), name='funding'),
path('lektura/<slug:slug>/', views.OfferDetailView.as_view(), name='funding_offer'),
path('pozostale/', views.WLFundView.as_view(), name='funding_wlfund'),
path('dziekujemy/', views.ThanksView.as_view(), name='funding_thanks'),
path('niepowodzenie/', views.NoThanksView.as_view(), name='funding_nothanks'),
path('wylacz_email/', views.DisableNotifications.as_view(), name='funding_disable_notifications'),
path('getpaid/', include('getpaid.urls')),
]
|
from django.urls import path, include
from annoy.utils import banner_exempt
from . import views
urlpatterns = [
path('', banner_exempt(views.CurrentView.as_view()), name='funding_current'),
path('teraz/', banner_exempt(views.CurrentView.as_view())),
path('teraz/<slug:slug>/', banner_exempt(views.CurrentView.as_view()), name='funding_current'),
path('lektura/', banner_exempt(views.OfferListView.as_view()), name='funding'),
path('lektura/<slug:slug>/', banner_exempt(views.OfferDetailView.as_view()), name='funding_offer'),
path('pozostale/', banner_exempt(views.WLFundView.as_view()), name='funding_wlfund'),
path('dziekujemy/', banner_exempt(views.ThanksView.as_view()), name='funding_thanks'),
path('niepowodzenie/', banner_exempt(views.NoThanksView.as_view()), name='funding_nothanks'),
path('wylacz_email/', banner_exempt(views.DisableNotifications.as_view()), name='funding_disable_notifications'),
path('getpaid/', include('getpaid.urls')),
]
|
Disable banners on funding pages.
|
Disable banners on funding pages.
|
Python
|
agpl-3.0
|
fnp/wolnelektury,fnp/wolnelektury,fnp/wolnelektury,fnp/wolnelektury
|
from django.urls import path, include
+ from annoy.utils import banner_exempt
from . import views
urlpatterns = [
- path('', views.CurrentView.as_view(), name='funding_current'),
+ path('', banner_exempt(views.CurrentView.as_view()), name='funding_current'),
- path('teraz/', views.CurrentView.as_view()),
+ path('teraz/', banner_exempt(views.CurrentView.as_view())),
- path('teraz/<slug:slug>/', views.CurrentView.as_view(), name='funding_current'),
+ path('teraz/<slug:slug>/', banner_exempt(views.CurrentView.as_view()), name='funding_current'),
- path('lektura/', views.OfferListView.as_view(), name='funding'),
+ path('lektura/', banner_exempt(views.OfferListView.as_view()), name='funding'),
- path('lektura/<slug:slug>/', views.OfferDetailView.as_view(), name='funding_offer'),
+ path('lektura/<slug:slug>/', banner_exempt(views.OfferDetailView.as_view()), name='funding_offer'),
- path('pozostale/', views.WLFundView.as_view(), name='funding_wlfund'),
+ path('pozostale/', banner_exempt(views.WLFundView.as_view()), name='funding_wlfund'),
- path('dziekujemy/', views.ThanksView.as_view(), name='funding_thanks'),
+ path('dziekujemy/', banner_exempt(views.ThanksView.as_view()), name='funding_thanks'),
- path('niepowodzenie/', views.NoThanksView.as_view(), name='funding_nothanks'),
+ path('niepowodzenie/', banner_exempt(views.NoThanksView.as_view()), name='funding_nothanks'),
- path('wylacz_email/', views.DisableNotifications.as_view(), name='funding_disable_notifications'),
+ path('wylacz_email/', banner_exempt(views.DisableNotifications.as_view()), name='funding_disable_notifications'),
path('getpaid/', include('getpaid.urls')),
]
|
Disable banners on funding pages.
|
## Code Before:
from django.urls import path, include
from . import views
urlpatterns = [
path('', views.CurrentView.as_view(), name='funding_current'),
path('teraz/', views.CurrentView.as_view()),
path('teraz/<slug:slug>/', views.CurrentView.as_view(), name='funding_current'),
path('lektura/', views.OfferListView.as_view(), name='funding'),
path('lektura/<slug:slug>/', views.OfferDetailView.as_view(), name='funding_offer'),
path('pozostale/', views.WLFundView.as_view(), name='funding_wlfund'),
path('dziekujemy/', views.ThanksView.as_view(), name='funding_thanks'),
path('niepowodzenie/', views.NoThanksView.as_view(), name='funding_nothanks'),
path('wylacz_email/', views.DisableNotifications.as_view(), name='funding_disable_notifications'),
path('getpaid/', include('getpaid.urls')),
]
## Instruction:
Disable banners on funding pages.
## Code After:
from django.urls import path, include
from annoy.utils import banner_exempt
from . import views
urlpatterns = [
path('', banner_exempt(views.CurrentView.as_view()), name='funding_current'),
path('teraz/', banner_exempt(views.CurrentView.as_view())),
path('teraz/<slug:slug>/', banner_exempt(views.CurrentView.as_view()), name='funding_current'),
path('lektura/', banner_exempt(views.OfferListView.as_view()), name='funding'),
path('lektura/<slug:slug>/', banner_exempt(views.OfferDetailView.as_view()), name='funding_offer'),
path('pozostale/', banner_exempt(views.WLFundView.as_view()), name='funding_wlfund'),
path('dziekujemy/', banner_exempt(views.ThanksView.as_view()), name='funding_thanks'),
path('niepowodzenie/', banner_exempt(views.NoThanksView.as_view()), name='funding_nothanks'),
path('wylacz_email/', banner_exempt(views.DisableNotifications.as_view()), name='funding_disable_notifications'),
path('getpaid/', include('getpaid.urls')),
]
|
from django.urls import path, include
+ from annoy.utils import banner_exempt
from . import views
urlpatterns = [
- path('', views.CurrentView.as_view(), name='funding_current'),
+ path('', banner_exempt(views.CurrentView.as_view()), name='funding_current'),
? ++++++++++++++ +
- path('teraz/', views.CurrentView.as_view()),
+ path('teraz/', banner_exempt(views.CurrentView.as_view())),
? ++++++++++++++ +
- path('teraz/<slug:slug>/', views.CurrentView.as_view(), name='funding_current'),
+ path('teraz/<slug:slug>/', banner_exempt(views.CurrentView.as_view()), name='funding_current'),
? ++++++++++++++ +
- path('lektura/', views.OfferListView.as_view(), name='funding'),
+ path('lektura/', banner_exempt(views.OfferListView.as_view()), name='funding'),
? ++++++++++++++ +
- path('lektura/<slug:slug>/', views.OfferDetailView.as_view(), name='funding_offer'),
+ path('lektura/<slug:slug>/', banner_exempt(views.OfferDetailView.as_view()), name='funding_offer'),
? ++++++++++++++ +
- path('pozostale/', views.WLFundView.as_view(), name='funding_wlfund'),
+ path('pozostale/', banner_exempt(views.WLFundView.as_view()), name='funding_wlfund'),
? ++++++++++++++ +
- path('dziekujemy/', views.ThanksView.as_view(), name='funding_thanks'),
+ path('dziekujemy/', banner_exempt(views.ThanksView.as_view()), name='funding_thanks'),
? ++++++++++++++ +
- path('niepowodzenie/', views.NoThanksView.as_view(), name='funding_nothanks'),
+ path('niepowodzenie/', banner_exempt(views.NoThanksView.as_view()), name='funding_nothanks'),
? ++++++++++++++ +
- path('wylacz_email/', views.DisableNotifications.as_view(), name='funding_disable_notifications'),
+ path('wylacz_email/', banner_exempt(views.DisableNotifications.as_view()), name='funding_disable_notifications'),
? ++++++++++++++ +
path('getpaid/', include('getpaid.urls')),
]
|
e966ddd804eee2f1b053de6f0bbf943d80dccc59
|
django_elastipymemcache/client.py
|
django_elastipymemcache/client.py
|
from pymemcache.client.hash import HashClient
class Client(HashClient):
def get_many(self, keys, gets=False, *args, **kwargs):
# pymemcache's HashClient may returns {'key': False}
end = super(Client, self).get_many(keys, gets, args, kwargs)
return {key: end[key] for key in end if end[key]}
get_multi = get_many
|
from pymemcache.client.hash import HashClient
class Client(HashClient):
def get_many(self, keys, gets=False, *args, **kwargs):
# pymemcache's HashClient may returns {'key': False}
end = super(Client, self).get_many(keys, gets, args, kwargs)
return {key: end.get(key) for key in end if end.get(key)}
get_multi = get_many
|
Fix get value more safe
|
Fix get value more safe
|
Python
|
mit
|
uncovertruth/django-elastipymemcache
|
from pymemcache.client.hash import HashClient
class Client(HashClient):
def get_many(self, keys, gets=False, *args, **kwargs):
# pymemcache's HashClient may returns {'key': False}
end = super(Client, self).get_many(keys, gets, args, kwargs)
- return {key: end[key] for key in end if end[key]}
+ return {key: end.get(key) for key in end if end.get(key)}
get_multi = get_many
|
Fix get value more safe
|
## Code Before:
from pymemcache.client.hash import HashClient
class Client(HashClient):
def get_many(self, keys, gets=False, *args, **kwargs):
# pymemcache's HashClient may returns {'key': False}
end = super(Client, self).get_many(keys, gets, args, kwargs)
return {key: end[key] for key in end if end[key]}
get_multi = get_many
## Instruction:
Fix get value more safe
## Code After:
from pymemcache.client.hash import HashClient
class Client(HashClient):
def get_many(self, keys, gets=False, *args, **kwargs):
# pymemcache's HashClient may returns {'key': False}
end = super(Client, self).get_many(keys, gets, args, kwargs)
return {key: end.get(key) for key in end if end.get(key)}
get_multi = get_many
|
from pymemcache.client.hash import HashClient
class Client(HashClient):
def get_many(self, keys, gets=False, *args, **kwargs):
# pymemcache's HashClient may returns {'key': False}
end = super(Client, self).get_many(keys, gets, args, kwargs)
- return {key: end[key] for key in end if end[key]}
? ^ ^ ^ ^
+ return {key: end.get(key) for key in end if end.get(key)}
? ^^^^^ ^ ^^^^^ ^
get_multi = get_many
|
266105d371193ccf0f02a3975ebdca04980b675b
|
eche/special_forms.py
|
eche/special_forms.py
|
from funcy.seqs import partition
from eche.eche_types import Symbol, List
def def_exclamation_mark(ast):
from eche.eval import eval_ast
_, key, val = ast
l = List()
l.append(key)
l.append(val)
l.env = ast.env
_, val = eval_ast(l, ast.env)
ast.env[key] = val
# if not isinstance(ast, Node):
# ast = Node(data=ast)
return ast
def let_star(ast, env=None):
from eche.env import get_default_env
from eche.eval import eval_ast
inner_env = get_default_env()
inner_env.outer = env
_, new_bindings, commands_in_new_env = ast
new_bindings = partition(2, list(new_bindings.data))
for binding in new_bindings:
key, val = binding
inner_env[key] = val
commands_in_new_env = eval_ast(commands_in_new_env, inner_env)
new_ast = eval_ast(commands_in_new_env, inner_env)
return new_ast
special_forms = {
Symbol('def!'): def_exclamation_mark,
Symbol('let*'): let_star
}
|
from funcy.seqs import partition
from eche.eche_types import Symbol, List
def def_exclamation_mark(ast, env=None):
from eche.eval import eval_ast
_, key, val = ast
l = List()
l.append(key)
l.append(val)
l.env = ast.env
_, val = eval_ast(l, ast.env)
ast.env[key] = val
# if not isinstance(ast, Node):
# ast = Node(data=ast)
return ast
def let_star(ast, env=None):
from eche.env import get_default_env
from eche.eval import eval_ast
inner_env = get_default_env()
inner_env.outer = env
_, new_bindings, commands_in_new_env = ast
new_bindings = partition(2, list(new_bindings.data))
for binding in new_bindings:
key, val = binding
inner_env[key] = val
commands_in_new_env = eval_ast(commands_in_new_env, inner_env)
new_ast = eval_ast(commands_in_new_env, inner_env)
return new_ast
special_forms = {
Symbol('def!'): def_exclamation_mark,
Symbol('let*'): let_star
}
|
Add missing env keyword arg.
|
Add missing env keyword arg.
|
Python
|
mit
|
skk/eche
|
from funcy.seqs import partition
from eche.eche_types import Symbol, List
- def def_exclamation_mark(ast):
+ def def_exclamation_mark(ast, env=None):
from eche.eval import eval_ast
_, key, val = ast
l = List()
l.append(key)
l.append(val)
l.env = ast.env
_, val = eval_ast(l, ast.env)
ast.env[key] = val
# if not isinstance(ast, Node):
# ast = Node(data=ast)
return ast
def let_star(ast, env=None):
from eche.env import get_default_env
from eche.eval import eval_ast
inner_env = get_default_env()
inner_env.outer = env
_, new_bindings, commands_in_new_env = ast
new_bindings = partition(2, list(new_bindings.data))
for binding in new_bindings:
key, val = binding
inner_env[key] = val
commands_in_new_env = eval_ast(commands_in_new_env, inner_env)
new_ast = eval_ast(commands_in_new_env, inner_env)
return new_ast
special_forms = {
Symbol('def!'): def_exclamation_mark,
Symbol('let*'): let_star
}
|
Add missing env keyword arg.
|
## Code Before:
from funcy.seqs import partition
from eche.eche_types import Symbol, List
def def_exclamation_mark(ast):
from eche.eval import eval_ast
_, key, val = ast
l = List()
l.append(key)
l.append(val)
l.env = ast.env
_, val = eval_ast(l, ast.env)
ast.env[key] = val
# if not isinstance(ast, Node):
# ast = Node(data=ast)
return ast
def let_star(ast, env=None):
from eche.env import get_default_env
from eche.eval import eval_ast
inner_env = get_default_env()
inner_env.outer = env
_, new_bindings, commands_in_new_env = ast
new_bindings = partition(2, list(new_bindings.data))
for binding in new_bindings:
key, val = binding
inner_env[key] = val
commands_in_new_env = eval_ast(commands_in_new_env, inner_env)
new_ast = eval_ast(commands_in_new_env, inner_env)
return new_ast
special_forms = {
Symbol('def!'): def_exclamation_mark,
Symbol('let*'): let_star
}
## Instruction:
Add missing env keyword arg.
## Code After:
from funcy.seqs import partition
from eche.eche_types import Symbol, List
def def_exclamation_mark(ast, env=None):
from eche.eval import eval_ast
_, key, val = ast
l = List()
l.append(key)
l.append(val)
l.env = ast.env
_, val = eval_ast(l, ast.env)
ast.env[key] = val
# if not isinstance(ast, Node):
# ast = Node(data=ast)
return ast
def let_star(ast, env=None):
from eche.env import get_default_env
from eche.eval import eval_ast
inner_env = get_default_env()
inner_env.outer = env
_, new_bindings, commands_in_new_env = ast
new_bindings = partition(2, list(new_bindings.data))
for binding in new_bindings:
key, val = binding
inner_env[key] = val
commands_in_new_env = eval_ast(commands_in_new_env, inner_env)
new_ast = eval_ast(commands_in_new_env, inner_env)
return new_ast
special_forms = {
Symbol('def!'): def_exclamation_mark,
Symbol('let*'): let_star
}
|
from funcy.seqs import partition
from eche.eche_types import Symbol, List
- def def_exclamation_mark(ast):
+ def def_exclamation_mark(ast, env=None):
? ++++++++++
from eche.eval import eval_ast
_, key, val = ast
l = List()
l.append(key)
l.append(val)
l.env = ast.env
_, val = eval_ast(l, ast.env)
ast.env[key] = val
# if not isinstance(ast, Node):
# ast = Node(data=ast)
return ast
def let_star(ast, env=None):
from eche.env import get_default_env
from eche.eval import eval_ast
inner_env = get_default_env()
inner_env.outer = env
_, new_bindings, commands_in_new_env = ast
new_bindings = partition(2, list(new_bindings.data))
for binding in new_bindings:
key, val = binding
inner_env[key] = val
commands_in_new_env = eval_ast(commands_in_new_env, inner_env)
new_ast = eval_ast(commands_in_new_env, inner_env)
return new_ast
special_forms = {
Symbol('def!'): def_exclamation_mark,
Symbol('let*'): let_star
}
|
35529cfd3f93723e8d60b43f58419385137b9a01
|
saltapi/cli.py
|
saltapi/cli.py
|
'''
CLI entry-point for salt-api
'''
# Import salt libs
from salt.utils.parsers import (
ConfigDirMixIn,
DaemonMixIn,
LogLevelMixIn,
MergeConfigMixIn,
OptionParser,
OptionParserMeta,
PidfileMixin)
# Import salt-api libs
import saltapi.client
import saltapi.config
import saltapi.version
class SaltAPI(OptionParser, ConfigDirMixIn, LogLevelMixIn, PidfileMixin,
DaemonMixIn, MergeConfigMixIn):
'''
The cli parser object used to fire up the salt api system.
'''
__metaclass__ = OptionParserMeta
VERSION = saltapi.version.__version__
def setup_config(self):
return saltapi.config.api_config(self.get_config_file_path('master'))
def run(self):
'''
Run the api
'''
self.parse_args()
self.process_config_dir()
self.daemonize_if_required()
self.set_pidfile()
client = saltapi.client.SaltAPIClient(self.config)
client.run()
|
'''
CLI entry-point for salt-api
'''
# Import salt libs
from salt.utils.parsers import (
ConfigDirMixIn,
DaemonMixIn,
LogLevelMixIn,
MergeConfigMixIn,
OptionParser,
OptionParserMeta,
PidfileMixin)
# Import salt-api libs
import saltapi.client
import saltapi.config
import saltapi.version
class SaltAPI(OptionParser, ConfigDirMixIn, LogLevelMixIn, PidfileMixin,
DaemonMixIn, MergeConfigMixIn):
'''
The cli parser object used to fire up the salt api system.
'''
__metaclass__ = OptionParserMeta
VERSION = saltapi.version.__version__
def setup_config(self):
return saltapi.config.api_config(self.get_config_file_path('master'))
def run(self):
'''
Run the api
'''
self.parse_args()
self.daemonize_if_required()
self.set_pidfile()
client = saltapi.client.SaltAPIClient(self.config)
client.run()
|
Remove unnecessary call to `process_config_dir()`.
|
Remove unnecessary call to `process_config_dir()`.
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
'''
CLI entry-point for salt-api
'''
# Import salt libs
from salt.utils.parsers import (
ConfigDirMixIn,
DaemonMixIn,
LogLevelMixIn,
MergeConfigMixIn,
OptionParser,
OptionParserMeta,
PidfileMixin)
# Import salt-api libs
import saltapi.client
import saltapi.config
import saltapi.version
class SaltAPI(OptionParser, ConfigDirMixIn, LogLevelMixIn, PidfileMixin,
DaemonMixIn, MergeConfigMixIn):
'''
The cli parser object used to fire up the salt api system.
'''
__metaclass__ = OptionParserMeta
VERSION = saltapi.version.__version__
def setup_config(self):
return saltapi.config.api_config(self.get_config_file_path('master'))
def run(self):
'''
Run the api
'''
self.parse_args()
- self.process_config_dir()
self.daemonize_if_required()
self.set_pidfile()
client = saltapi.client.SaltAPIClient(self.config)
client.run()
|
Remove unnecessary call to `process_config_dir()`.
|
## Code Before:
'''
CLI entry-point for salt-api
'''
# Import salt libs
from salt.utils.parsers import (
ConfigDirMixIn,
DaemonMixIn,
LogLevelMixIn,
MergeConfigMixIn,
OptionParser,
OptionParserMeta,
PidfileMixin)
# Import salt-api libs
import saltapi.client
import saltapi.config
import saltapi.version
class SaltAPI(OptionParser, ConfigDirMixIn, LogLevelMixIn, PidfileMixin,
DaemonMixIn, MergeConfigMixIn):
'''
The cli parser object used to fire up the salt api system.
'''
__metaclass__ = OptionParserMeta
VERSION = saltapi.version.__version__
def setup_config(self):
return saltapi.config.api_config(self.get_config_file_path('master'))
def run(self):
'''
Run the api
'''
self.parse_args()
self.process_config_dir()
self.daemonize_if_required()
self.set_pidfile()
client = saltapi.client.SaltAPIClient(self.config)
client.run()
## Instruction:
Remove unnecessary call to `process_config_dir()`.
## Code After:
'''
CLI entry-point for salt-api
'''
# Import salt libs
from salt.utils.parsers import (
ConfigDirMixIn,
DaemonMixIn,
LogLevelMixIn,
MergeConfigMixIn,
OptionParser,
OptionParserMeta,
PidfileMixin)
# Import salt-api libs
import saltapi.client
import saltapi.config
import saltapi.version
class SaltAPI(OptionParser, ConfigDirMixIn, LogLevelMixIn, PidfileMixin,
DaemonMixIn, MergeConfigMixIn):
'''
The cli parser object used to fire up the salt api system.
'''
__metaclass__ = OptionParserMeta
VERSION = saltapi.version.__version__
def setup_config(self):
return saltapi.config.api_config(self.get_config_file_path('master'))
def run(self):
'''
Run the api
'''
self.parse_args()
self.daemonize_if_required()
self.set_pidfile()
client = saltapi.client.SaltAPIClient(self.config)
client.run()
|
'''
CLI entry-point for salt-api
'''
# Import salt libs
from salt.utils.parsers import (
ConfigDirMixIn,
DaemonMixIn,
LogLevelMixIn,
MergeConfigMixIn,
OptionParser,
OptionParserMeta,
PidfileMixin)
# Import salt-api libs
import saltapi.client
import saltapi.config
import saltapi.version
class SaltAPI(OptionParser, ConfigDirMixIn, LogLevelMixIn, PidfileMixin,
DaemonMixIn, MergeConfigMixIn):
'''
The cli parser object used to fire up the salt api system.
'''
__metaclass__ = OptionParserMeta
VERSION = saltapi.version.__version__
def setup_config(self):
return saltapi.config.api_config(self.get_config_file_path('master'))
def run(self):
'''
Run the api
'''
self.parse_args()
- self.process_config_dir()
self.daemonize_if_required()
self.set_pidfile()
client = saltapi.client.SaltAPIClient(self.config)
client.run()
|
737dca75d26a90d627be09144db7441156fee981
|
scraper/management/commands/run_scraper.py
|
scraper/management/commands/run_scraper.py
|
from django.core.management.base import NoArgsCommand
from scraper.models import Source
class Command(NoArgsCommand):
""" Crawl all active resources """
def handle_noargs(self, **options):
sources = Source.objects.filter(active=True)
for source in sources:
source.crawl()
|
from django.core.management.base import NoArgsCommand
from scraper.models import Spider
class Command(NoArgsCommand):
""" Crawl all active resources """
def handle_noargs(self, **options):
spiders = Spider.objects.all()
for spider in spiders:
spider.crawl_content()
|
Update management command to adapt new model
|
Update management command to adapt new model
|
Python
|
mit
|
zniper/django-scraper,zniper/django-scraper
|
from django.core.management.base import NoArgsCommand
- from scraper.models import Source
+ from scraper.models import Spider
class Command(NoArgsCommand):
""" Crawl all active resources """
def handle_noargs(self, **options):
- sources = Source.objects.filter(active=True)
- for source in sources:
- source.crawl()
+ spiders = Spider.objects.all()
+ for spider in spiders:
+ spider.crawl_content()
|
Update management command to adapt new model
|
## Code Before:
from django.core.management.base import NoArgsCommand
from scraper.models import Source
class Command(NoArgsCommand):
""" Crawl all active resources """
def handle_noargs(self, **options):
sources = Source.objects.filter(active=True)
for source in sources:
source.crawl()
## Instruction:
Update management command to adapt new model
## Code After:
from django.core.management.base import NoArgsCommand
from scraper.models import Spider
class Command(NoArgsCommand):
""" Crawl all active resources """
def handle_noargs(self, **options):
spiders = Spider.objects.all()
for spider in spiders:
spider.crawl_content()
|
from django.core.management.base import NoArgsCommand
- from scraper.models import Source
? ^^ --
+ from scraper.models import Spider
? ^^^^
class Command(NoArgsCommand):
""" Crawl all active resources """
def handle_noargs(self, **options):
- sources = Source.objects.filter(active=True)
- for source in sources:
- source.crawl()
+ spiders = Spider.objects.all()
+ for spider in spiders:
+ spider.crawl_content()
|
2c2deea36a7e040244152a345eb672e62c519c76
|
pulse_actions/publisher.py
|
pulse_actions/publisher.py
|
import os
import sys
from pulse_actions.authentication import (get_user_and_password,
AuthenticationError)
from mozillapulse.publishers import GenericPublisher
from mozillapulse.config import PulseConfiguration
from mozillapulse.messages.base import GenericMessage
class ExperimentalPublisher(GenericPublisher):
def __init__(self, **kwargs):
super(ExperimentalPublisher, self).__init__(
PulseConfiguration(**kwargs),
'exchange/adusca/experiment',
**kwargs)
class MessageHandler:
def __init__(self):
"""Create Publisher."""
try:
user, password = get_user_and_password()
except AuthenticationError as e:
print(e.message)
sys.exit(1)
self.publisher = ExperimentalPublisher(user=user, password=password)
def publish_message(self, data, routing_key):
"""Publish a message to exchange/adusca/experiment."""
msg = GenericMessage()
msg.routing_parts = routing_key.split('.')
for key, value in data.iteritems():
msg.set_data(key, value)
self.publisher.publish(msg)
|
import os
import sys
from pulse_actions.authentication import (
get_user_and_password,
AuthenticationError
)
from mozillapulse.publishers import GenericPublisher
from mozillapulse.config import PulseConfiguration
from mozillapulse.messages.base import GenericMessage
class ExperimentalPublisher(GenericPublisher):
def __init__(self, **kwargs):
super(ExperimentalPublisher, self).__init__(
PulseConfiguration(**kwargs),
'exchange/adusca/experiment',
**kwargs)
class MessageHandler:
def __init__(self):
"""Create Publisher."""
try:
user, password = get_user_and_password()
except AuthenticationError as e:
print(e.message)
sys.exit(1)
self.publisher = ExperimentalPublisher(user=user, password=password)
def publish_message(self, data, routing_key):
"""Publish a message to exchange/adusca/experiment."""
msg = GenericMessage()
msg.routing_parts = routing_key.split('.')
for key, value in data.iteritems():
msg.set_data(key, value)
try:
self.publisher.publish(msg)
except Exception as e:
print('ERROR: We failed to post a pulse message with what we did')
print(e.message)
|
Handle failing to publish to pulse
|
Handle failing to publish to pulse
|
Python
|
mpl-2.0
|
armenzg/pulse_actions,mozilla/pulse_actions,adusca/pulse_actions
|
import os
import sys
- from pulse_actions.authentication import (get_user_and_password,
+ from pulse_actions.authentication import (
+ get_user_and_password,
- AuthenticationError)
+ AuthenticationError
+ )
from mozillapulse.publishers import GenericPublisher
from mozillapulse.config import PulseConfiguration
from mozillapulse.messages.base import GenericMessage
class ExperimentalPublisher(GenericPublisher):
def __init__(self, **kwargs):
super(ExperimentalPublisher, self).__init__(
PulseConfiguration(**kwargs),
'exchange/adusca/experiment',
**kwargs)
class MessageHandler:
def __init__(self):
"""Create Publisher."""
try:
user, password = get_user_and_password()
except AuthenticationError as e:
print(e.message)
sys.exit(1)
+
self.publisher = ExperimentalPublisher(user=user, password=password)
def publish_message(self, data, routing_key):
"""Publish a message to exchange/adusca/experiment."""
msg = GenericMessage()
msg.routing_parts = routing_key.split('.')
for key, value in data.iteritems():
msg.set_data(key, value)
+ try:
- self.publisher.publish(msg)
+ self.publisher.publish(msg)
+ except Exception as e:
+ print('ERROR: We failed to post a pulse message with what we did')
+ print(e.message)
|
Handle failing to publish to pulse
|
## Code Before:
import os
import sys
from pulse_actions.authentication import (get_user_and_password,
AuthenticationError)
from mozillapulse.publishers import GenericPublisher
from mozillapulse.config import PulseConfiguration
from mozillapulse.messages.base import GenericMessage
class ExperimentalPublisher(GenericPublisher):
def __init__(self, **kwargs):
super(ExperimentalPublisher, self).__init__(
PulseConfiguration(**kwargs),
'exchange/adusca/experiment',
**kwargs)
class MessageHandler:
def __init__(self):
"""Create Publisher."""
try:
user, password = get_user_and_password()
except AuthenticationError as e:
print(e.message)
sys.exit(1)
self.publisher = ExperimentalPublisher(user=user, password=password)
def publish_message(self, data, routing_key):
"""Publish a message to exchange/adusca/experiment."""
msg = GenericMessage()
msg.routing_parts = routing_key.split('.')
for key, value in data.iteritems():
msg.set_data(key, value)
self.publisher.publish(msg)
## Instruction:
Handle failing to publish to pulse
## Code After:
import os
import sys
from pulse_actions.authentication import (
get_user_and_password,
AuthenticationError
)
from mozillapulse.publishers import GenericPublisher
from mozillapulse.config import PulseConfiguration
from mozillapulse.messages.base import GenericMessage
class ExperimentalPublisher(GenericPublisher):
def __init__(self, **kwargs):
super(ExperimentalPublisher, self).__init__(
PulseConfiguration(**kwargs),
'exchange/adusca/experiment',
**kwargs)
class MessageHandler:
def __init__(self):
"""Create Publisher."""
try:
user, password = get_user_and_password()
except AuthenticationError as e:
print(e.message)
sys.exit(1)
self.publisher = ExperimentalPublisher(user=user, password=password)
def publish_message(self, data, routing_key):
"""Publish a message to exchange/adusca/experiment."""
msg = GenericMessage()
msg.routing_parts = routing_key.split('.')
for key, value in data.iteritems():
msg.set_data(key, value)
try:
self.publisher.publish(msg)
except Exception as e:
print('ERROR: We failed to post a pulse message with what we did')
print(e.message)
|
import os
import sys
- from pulse_actions.authentication import (get_user_and_password,
? ----------------------
+ from pulse_actions.authentication import (
+ get_user_and_password,
- AuthenticationError)
? -
+ AuthenticationError
+ )
from mozillapulse.publishers import GenericPublisher
from mozillapulse.config import PulseConfiguration
from mozillapulse.messages.base import GenericMessage
class ExperimentalPublisher(GenericPublisher):
def __init__(self, **kwargs):
super(ExperimentalPublisher, self).__init__(
PulseConfiguration(**kwargs),
'exchange/adusca/experiment',
**kwargs)
class MessageHandler:
def __init__(self):
"""Create Publisher."""
try:
user, password = get_user_and_password()
except AuthenticationError as e:
print(e.message)
sys.exit(1)
+
self.publisher = ExperimentalPublisher(user=user, password=password)
def publish_message(self, data, routing_key):
"""Publish a message to exchange/adusca/experiment."""
msg = GenericMessage()
msg.routing_parts = routing_key.split('.')
for key, value in data.iteritems():
msg.set_data(key, value)
+ try:
- self.publisher.publish(msg)
+ self.publisher.publish(msg)
? ++++
+ except Exception as e:
+ print('ERROR: We failed to post a pulse message with what we did')
+ print(e.message)
|
bf08dfaa3384c67dbaf86f31006c1cea462ae7db
|
bot.py
|
bot.py
|
import discord
import commands
bot = discord.Client()
@bot.event
def on_ready():
print('Logged in as:')
print('Username: ' + bot.user.name)
print('ID: ' + bot.user.id)
print('------')
@bot.event
def on_message(message):
commands.dispatch_messages(bot, message)
if __name__ == '__main__':
commands.load_config()
bot.login(commands.config['username'], commands.config['password'])
bot.run()
|
import discord
import commands
bot = discord.Client()
@bot.event
def on_ready():
print('Logged in as:')
print('Username: ' + bot.user.name)
print('ID: ' + bot.user.id)
print('------')
@bot.event
def on_message(message):
commands.dispatch_messages(bot, message)
@bot.event
def on_member_join(member):
if member.server.id == '86177841854566400':
# check if this is /r/Splatoon
channel = discord.utils.find(lambda c: c.id == '86177841854566400', member.server.channels)
if channel is not None:
bot.send_message(channel, 'Welcome {}, to the /r/Splatoon Discord.'.format(member.name))
if __name__ == '__main__':
commands.load_config()
bot.login(commands.config['username'], commands.config['password'])
bot.run()
|
Add welcome message for /r/splatoon chat.
|
Add welcome message for /r/splatoon chat.
|
Python
|
mpl-2.0
|
Rapptz/RoboDanny,haitaka/DroiTaka
|
import discord
import commands
bot = discord.Client()
@bot.event
def on_ready():
print('Logged in as:')
print('Username: ' + bot.user.name)
print('ID: ' + bot.user.id)
print('------')
@bot.event
def on_message(message):
commands.dispatch_messages(bot, message)
+ @bot.event
+ def on_member_join(member):
+ if member.server.id == '86177841854566400':
+ # check if this is /r/Splatoon
+ channel = discord.utils.find(lambda c: c.id == '86177841854566400', member.server.channels)
+ if channel is not None:
+ bot.send_message(channel, 'Welcome {}, to the /r/Splatoon Discord.'.format(member.name))
+
if __name__ == '__main__':
commands.load_config()
bot.login(commands.config['username'], commands.config['password'])
bot.run()
|
Add welcome message for /r/splatoon chat.
|
## Code Before:
import discord
import commands
bot = discord.Client()
@bot.event
def on_ready():
print('Logged in as:')
print('Username: ' + bot.user.name)
print('ID: ' + bot.user.id)
print('------')
@bot.event
def on_message(message):
commands.dispatch_messages(bot, message)
if __name__ == '__main__':
commands.load_config()
bot.login(commands.config['username'], commands.config['password'])
bot.run()
## Instruction:
Add welcome message for /r/splatoon chat.
## Code After:
import discord
import commands
bot = discord.Client()
@bot.event
def on_ready():
print('Logged in as:')
print('Username: ' + bot.user.name)
print('ID: ' + bot.user.id)
print('------')
@bot.event
def on_message(message):
commands.dispatch_messages(bot, message)
@bot.event
def on_member_join(member):
if member.server.id == '86177841854566400':
# check if this is /r/Splatoon
channel = discord.utils.find(lambda c: c.id == '86177841854566400', member.server.channels)
if channel is not None:
bot.send_message(channel, 'Welcome {}, to the /r/Splatoon Discord.'.format(member.name))
if __name__ == '__main__':
commands.load_config()
bot.login(commands.config['username'], commands.config['password'])
bot.run()
|
import discord
import commands
bot = discord.Client()
@bot.event
def on_ready():
print('Logged in as:')
print('Username: ' + bot.user.name)
print('ID: ' + bot.user.id)
print('------')
@bot.event
def on_message(message):
commands.dispatch_messages(bot, message)
+ @bot.event
+ def on_member_join(member):
+ if member.server.id == '86177841854566400':
+ # check if this is /r/Splatoon
+ channel = discord.utils.find(lambda c: c.id == '86177841854566400', member.server.channels)
+ if channel is not None:
+ bot.send_message(channel, 'Welcome {}, to the /r/Splatoon Discord.'.format(member.name))
+
if __name__ == '__main__':
commands.load_config()
bot.login(commands.config['username'], commands.config['password'])
bot.run()
|
989abb47041e6a172765453c750c31144a92def5
|
doc/rst2html-manual.py
|
doc/rst2html-manual.py
|
import locale
from docutils.core import publish_cmdline, default_description
from docutils.parsers.rst import directives
from docutils.parsers.rst import roles
from rst2pdf.directives import code_block
from rst2pdf.directives import noop
from rst2pdf.roles import counter_off
locale.setlocale(locale.LC_ALL, '')
directives.register_directive('code-block', code_block.code_block_directive)
directives.register_directive('oddeven', noop.noop_directive)
roles.register_canonical_role('counter', counter_off.counter_fn)
description = (
'Generates (X)HTML documents from standalone reStructuredText '
'sources. ' + default_description
)
publish_cmdline(writer_name='html', description=description)
|
import locale
from docutils.core import default_description
from docutils.core import publish_cmdline
from docutils.parsers.rst import directives
from docutils.parsers.rst import roles
from rst2pdf.directives import code_block
from rst2pdf.directives import noop
from rst2pdf.roles import counter_off
locale.setlocale(locale.LC_ALL, '')
directives.register_directive('code-block', code_block.code_block_directive)
directives.register_directive('oddeven', noop.noop_directive)
roles.register_canonical_role('counter', counter_off.counter_fn)
description = (
'Generates HTML5 documents from standalone reStructuredText '
'sources.\n' + default_description
)
publish_cmdline(writer_name='html5', description=description)
|
Switch rst2html to HTML5 builder
|
Switch rst2html to HTML5 builder
This gives a prettier output and every browser in widespread usage has
supported it for years. The license, which was previously missing, is
added.
Signed-off-by: Stephen Finucane <[email protected]>
|
Python
|
mit
|
rst2pdf/rst2pdf,rst2pdf/rst2pdf
|
import locale
- from docutils.core import publish_cmdline, default_description
+ from docutils.core import default_description
+ from docutils.core import publish_cmdline
from docutils.parsers.rst import directives
from docutils.parsers.rst import roles
from rst2pdf.directives import code_block
from rst2pdf.directives import noop
from rst2pdf.roles import counter_off
locale.setlocale(locale.LC_ALL, '')
directives.register_directive('code-block', code_block.code_block_directive)
directives.register_directive('oddeven', noop.noop_directive)
roles.register_canonical_role('counter', counter_off.counter_fn)
description = (
- 'Generates (X)HTML documents from standalone reStructuredText '
+ 'Generates HTML5 documents from standalone reStructuredText '
- 'sources. ' + default_description
+ 'sources.\n' + default_description
)
- publish_cmdline(writer_name='html', description=description)
+ publish_cmdline(writer_name='html5', description=description)
|
Switch rst2html to HTML5 builder
|
## Code Before:
import locale
from docutils.core import publish_cmdline, default_description
from docutils.parsers.rst import directives
from docutils.parsers.rst import roles
from rst2pdf.directives import code_block
from rst2pdf.directives import noop
from rst2pdf.roles import counter_off
locale.setlocale(locale.LC_ALL, '')
directives.register_directive('code-block', code_block.code_block_directive)
directives.register_directive('oddeven', noop.noop_directive)
roles.register_canonical_role('counter', counter_off.counter_fn)
description = (
'Generates (X)HTML documents from standalone reStructuredText '
'sources. ' + default_description
)
publish_cmdline(writer_name='html', description=description)
## Instruction:
Switch rst2html to HTML5 builder
## Code After:
import locale
from docutils.core import default_description
from docutils.core import publish_cmdline
from docutils.parsers.rst import directives
from docutils.parsers.rst import roles
from rst2pdf.directives import code_block
from rst2pdf.directives import noop
from rst2pdf.roles import counter_off
locale.setlocale(locale.LC_ALL, '')
directives.register_directive('code-block', code_block.code_block_directive)
directives.register_directive('oddeven', noop.noop_directive)
roles.register_canonical_role('counter', counter_off.counter_fn)
description = (
'Generates HTML5 documents from standalone reStructuredText '
'sources.\n' + default_description
)
publish_cmdline(writer_name='html5', description=description)
|
import locale
- from docutils.core import publish_cmdline, default_description
? -----------------
+ from docutils.core import default_description
+ from docutils.core import publish_cmdline
from docutils.parsers.rst import directives
from docutils.parsers.rst import roles
from rst2pdf.directives import code_block
from rst2pdf.directives import noop
from rst2pdf.roles import counter_off
locale.setlocale(locale.LC_ALL, '')
directives.register_directive('code-block', code_block.code_block_directive)
directives.register_directive('oddeven', noop.noop_directive)
roles.register_canonical_role('counter', counter_off.counter_fn)
description = (
- 'Generates (X)HTML documents from standalone reStructuredText '
? ---
+ 'Generates HTML5 documents from standalone reStructuredText '
? +
- 'sources. ' + default_description
? ^^
+ 'sources.\n' + default_description
? ^^
)
- publish_cmdline(writer_name='html', description=description)
+ publish_cmdline(writer_name='html5', description=description)
? +
|
29aeca4df24c84cecd48f0893da94624dab0e1c7
|
manage.py
|
manage.py
|
import os
from app import create_app
from flask.ext.script import Manager
app = create_app(os.getenv('FLASK_CONFIG') or 'default')
manager = Manager(app)
if __name__ == '__main__':
manager.run()
|
import os
from app import create_app, db
from app.models import User
from flask.ext.script import Manager
app = create_app(os.getenv('FLASK_CONFIG') or 'default')
manager = Manager(app)
@manager.command
def adduser(email, username, admin=False):
""" Register a new user"""
from getpass import getpass
password = getpass()
password2 = getpass(prompt = 'Confirm: ')
if password != password2:
import sys
sys.exit("Error: Passwords do not match!")
db.create_all()
user = User(email=email, username=username, password=password, is_admin=admin)
db.session.add(user)
db.session.commit()
print('User {0} was registered successfully!'.format(username))
if __name__ == '__main__':
manager.run()
|
Add a custom script command to add a user to the database
|
Add a custom script command to add a user to the database
|
Python
|
mit
|
finnurtorfa/aflafrettir.is,finnurtorfa/aflafrettir.is,finnurtorfa/aflafrettir.is,finnurtorfa/aflafrettir.is
|
import os
- from app import create_app
+ from app import create_app, db
+ from app.models import User
from flask.ext.script import Manager
app = create_app(os.getenv('FLASK_CONFIG') or 'default')
manager = Manager(app)
+ @manager.command
+ def adduser(email, username, admin=False):
+ """ Register a new user"""
+ from getpass import getpass
+ password = getpass()
+ password2 = getpass(prompt = 'Confirm: ')
+
+ if password != password2:
+ import sys
+ sys.exit("Error: Passwords do not match!")
+
+ db.create_all()
+
+ user = User(email=email, username=username, password=password, is_admin=admin)
+ db.session.add(user)
+ db.session.commit()
+
+ print('User {0} was registered successfully!'.format(username))
+
if __name__ == '__main__':
manager.run()
|
Add a custom script command to add a user to the database
|
## Code Before:
import os
from app import create_app
from flask.ext.script import Manager
app = create_app(os.getenv('FLASK_CONFIG') or 'default')
manager = Manager(app)
if __name__ == '__main__':
manager.run()
## Instruction:
Add a custom script command to add a user to the database
## Code After:
import os
from app import create_app, db
from app.models import User
from flask.ext.script import Manager
app = create_app(os.getenv('FLASK_CONFIG') or 'default')
manager = Manager(app)
@manager.command
def adduser(email, username, admin=False):
""" Register a new user"""
from getpass import getpass
password = getpass()
password2 = getpass(prompt = 'Confirm: ')
if password != password2:
import sys
sys.exit("Error: Passwords do not match!")
db.create_all()
user = User(email=email, username=username, password=password, is_admin=admin)
db.session.add(user)
db.session.commit()
print('User {0} was registered successfully!'.format(username))
if __name__ == '__main__':
manager.run()
|
import os
- from app import create_app
+ from app import create_app, db
? ++++
+ from app.models import User
from flask.ext.script import Manager
app = create_app(os.getenv('FLASK_CONFIG') or 'default')
manager = Manager(app)
+ @manager.command
+ def adduser(email, username, admin=False):
+ """ Register a new user"""
+ from getpass import getpass
+ password = getpass()
+ password2 = getpass(prompt = 'Confirm: ')
+
+ if password != password2:
+ import sys
+ sys.exit("Error: Passwords do not match!")
+
+ db.create_all()
+
+ user = User(email=email, username=username, password=password, is_admin=admin)
+ db.session.add(user)
+ db.session.commit()
+
+ print('User {0} was registered successfully!'.format(username))
+
if __name__ == '__main__':
manager.run()
|
483ba69bca57899054270cb24c41b0d2c01e7ff0
|
opentreemap/stormwater/models.py
|
opentreemap/stormwater/models.py
|
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from django.contrib.gis.db import models
from treemap.models import MapFeature
class PolygonalMapFeature(MapFeature):
area_field_name = 'polygon'
skip_detail_form = True
polygon = models.MultiPolygonField(srid=3857)
class Bioswale(PolygonalMapFeature):
pass
|
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from django.contrib.gis.db import models
from treemap.models import MapFeature
class PolygonalMapFeature(MapFeature):
area_field_name = 'polygon'
skip_detail_form = True
polygon = models.MultiPolygonField(srid=3857)
class Bioswale(PolygonalMapFeature):
collection_udf_defaults = {
'Stewardship': [
{'name': 'Action',
'choices': ['Watered',
'Pruned',
'Mulched, Had Compost Added, or Soil Amended',
'Cleared of Trash or Debris'],
'type': 'choice'},
{'type': 'date',
'name': 'Date'}],
}
|
Add placeholder defaults for bioswale stewardship
|
Add placeholder defaults for bioswale stewardship
|
Python
|
agpl-3.0
|
clever-crow-consulting/otm-core,clever-crow-consulting/otm-core,recklessromeo/otm-core,maurizi/otm-core,recklessromeo/otm-core,RickMohr/otm-core,RickMohr/otm-core,RickMohr/otm-core,clever-crow-consulting/otm-core,clever-crow-consulting/otm-core,RickMohr/otm-core,recklessromeo/otm-core,maurizi/otm-core,maurizi/otm-core,maurizi/otm-core,recklessromeo/otm-core
|
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from django.contrib.gis.db import models
from treemap.models import MapFeature
class PolygonalMapFeature(MapFeature):
area_field_name = 'polygon'
skip_detail_form = True
polygon = models.MultiPolygonField(srid=3857)
class Bioswale(PolygonalMapFeature):
- pass
+ collection_udf_defaults = {
+ 'Stewardship': [
+ {'name': 'Action',
+ 'choices': ['Watered',
+ 'Pruned',
+ 'Mulched, Had Compost Added, or Soil Amended',
+ 'Cleared of Trash or Debris'],
+ 'type': 'choice'},
+ {'type': 'date',
+ 'name': 'Date'}],
+ }
|
Add placeholder defaults for bioswale stewardship
|
## Code Before:
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from django.contrib.gis.db import models
from treemap.models import MapFeature
class PolygonalMapFeature(MapFeature):
area_field_name = 'polygon'
skip_detail_form = True
polygon = models.MultiPolygonField(srid=3857)
class Bioswale(PolygonalMapFeature):
pass
## Instruction:
Add placeholder defaults for bioswale stewardship
## Code After:
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from django.contrib.gis.db import models
from treemap.models import MapFeature
class PolygonalMapFeature(MapFeature):
area_field_name = 'polygon'
skip_detail_form = True
polygon = models.MultiPolygonField(srid=3857)
class Bioswale(PolygonalMapFeature):
collection_udf_defaults = {
'Stewardship': [
{'name': 'Action',
'choices': ['Watered',
'Pruned',
'Mulched, Had Compost Added, or Soil Amended',
'Cleared of Trash or Debris'],
'type': 'choice'},
{'type': 'date',
'name': 'Date'}],
}
|
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from django.contrib.gis.db import models
from treemap.models import MapFeature
class PolygonalMapFeature(MapFeature):
area_field_name = 'polygon'
skip_detail_form = True
polygon = models.MultiPolygonField(srid=3857)
class Bioswale(PolygonalMapFeature):
- pass
+ collection_udf_defaults = {
+ 'Stewardship': [
+ {'name': 'Action',
+ 'choices': ['Watered',
+ 'Pruned',
+ 'Mulched, Had Compost Added, or Soil Amended',
+ 'Cleared of Trash or Debris'],
+ 'type': 'choice'},
+ {'type': 'date',
+ 'name': 'Date'}],
+ }
|
8998d0f617791f95b1ed6b4a1fffa0f71752b801
|
pybo/bayesopt/inits.py
|
pybo/bayesopt/inits.py
|
# future imports
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
# global imports
import numpy as np
# local imports
from ..utils import ldsample
# exported symbols
__all__ = ['init_middle', 'init_uniform', 'init_latin', 'init_sobol']
def init_middle(bounds):
return np.mean(bounds, axis=1)[None, :]
def init_uniform(bounds, rng=None):
n = 3*len(bounds)
X = ldsample.random(bounds, n, rng)
return X
def init_latin(bounds, rng=None):
n = 3*len(bounds)
X = ldsample.latin(bounds, n, rng)
return X
def init_sobol(bounds, rng=None):
n = 3*len(bounds)
X = ldsample.sobol(bounds, n, rng)
return X
|
# future imports
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
# global imports
import numpy as np
# local imports
from ..utils import ldsample
# exported symbols
__all__ = ['init_middle', 'init_uniform', 'init_latin', 'init_sobol']
def init_middle(bounds):
"""
Initialize using a single query in the middle of the space.
"""
return np.mean(bounds, axis=1)[None, :]
def init_uniform(bounds, n=None, rng=None):
"""
Initialize using `n` uniformly distributed query points. If `n` is `None`
then use 3D points where D is the dimensionality of the input space.
"""
n = 3*len(bounds) if (n is None) else n
X = ldsample.random(bounds, n, rng)
return X
def init_latin(bounds, n=None, rng=None):
"""
Initialize using a Latin hypercube design of size `n`. If `n` is `None`
then use 3D points where D is the dimensionality of the input space.
"""
n = 3*len(bounds) if (n is None) else n
X = ldsample.latin(bounds, n, rng)
return X
def init_sobol(bounds, n=None, rng=None):
"""
Initialize using a Sobol sequence of length `n`. If `n` is `None` then use
3D points where D is the dimensionality of the input space.
"""
n = 3*len(bounds) if (n is None) else n
X = ldsample.sobol(bounds, n, rng)
return X
|
Update docs/params for initialization methods.
|
Update docs/params for initialization methods.
|
Python
|
bsd-2-clause
|
mwhoffman/pybo,jhartford/pybo
|
# future imports
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
# global imports
import numpy as np
# local imports
from ..utils import ldsample
# exported symbols
__all__ = ['init_middle', 'init_uniform', 'init_latin', 'init_sobol']
def init_middle(bounds):
+ """
+ Initialize using a single query in the middle of the space.
+ """
return np.mean(bounds, axis=1)[None, :]
- def init_uniform(bounds, rng=None):
+ def init_uniform(bounds, n=None, rng=None):
- n = 3*len(bounds)
+ """
+ Initialize using `n` uniformly distributed query points. If `n` is `None`
+ then use 3D points where D is the dimensionality of the input space.
+ """
+ n = 3*len(bounds) if (n is None) else n
X = ldsample.random(bounds, n, rng)
return X
- def init_latin(bounds, rng=None):
+ def init_latin(bounds, n=None, rng=None):
- n = 3*len(bounds)
+ """
+ Initialize using a Latin hypercube design of size `n`. If `n` is `None`
+ then use 3D points where D is the dimensionality of the input space.
+ """
+ n = 3*len(bounds) if (n is None) else n
X = ldsample.latin(bounds, n, rng)
return X
- def init_sobol(bounds, rng=None):
+ def init_sobol(bounds, n=None, rng=None):
- n = 3*len(bounds)
+ """
+ Initialize using a Sobol sequence of length `n`. If `n` is `None` then use
+ 3D points where D is the dimensionality of the input space.
+ """
+ n = 3*len(bounds) if (n is None) else n
X = ldsample.sobol(bounds, n, rng)
return X
|
Update docs/params for initialization methods.
|
## Code Before:
# future imports
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
# global imports
import numpy as np
# local imports
from ..utils import ldsample
# exported symbols
__all__ = ['init_middle', 'init_uniform', 'init_latin', 'init_sobol']
def init_middle(bounds):
return np.mean(bounds, axis=1)[None, :]
def init_uniform(bounds, rng=None):
n = 3*len(bounds)
X = ldsample.random(bounds, n, rng)
return X
def init_latin(bounds, rng=None):
n = 3*len(bounds)
X = ldsample.latin(bounds, n, rng)
return X
def init_sobol(bounds, rng=None):
n = 3*len(bounds)
X = ldsample.sobol(bounds, n, rng)
return X
## Instruction:
Update docs/params for initialization methods.
## Code After:
# future imports
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
# global imports
import numpy as np
# local imports
from ..utils import ldsample
# exported symbols
__all__ = ['init_middle', 'init_uniform', 'init_latin', 'init_sobol']
def init_middle(bounds):
"""
Initialize using a single query in the middle of the space.
"""
return np.mean(bounds, axis=1)[None, :]
def init_uniform(bounds, n=None, rng=None):
"""
Initialize using `n` uniformly distributed query points. If `n` is `None`
then use 3D points where D is the dimensionality of the input space.
"""
n = 3*len(bounds) if (n is None) else n
X = ldsample.random(bounds, n, rng)
return X
def init_latin(bounds, n=None, rng=None):
"""
Initialize using a Latin hypercube design of size `n`. If `n` is `None`
then use 3D points where D is the dimensionality of the input space.
"""
n = 3*len(bounds) if (n is None) else n
X = ldsample.latin(bounds, n, rng)
return X
def init_sobol(bounds, n=None, rng=None):
"""
Initialize using a Sobol sequence of length `n`. If `n` is `None` then use
3D points where D is the dimensionality of the input space.
"""
n = 3*len(bounds) if (n is None) else n
X = ldsample.sobol(bounds, n, rng)
return X
|
# future imports
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
# global imports
import numpy as np
# local imports
from ..utils import ldsample
# exported symbols
__all__ = ['init_middle', 'init_uniform', 'init_latin', 'init_sobol']
def init_middle(bounds):
+ """
+ Initialize using a single query in the middle of the space.
+ """
return np.mean(bounds, axis=1)[None, :]
- def init_uniform(bounds, rng=None):
+ def init_uniform(bounds, n=None, rng=None):
? ++++++++
- n = 3*len(bounds)
+ """
+ Initialize using `n` uniformly distributed query points. If `n` is `None`
+ then use 3D points where D is the dimensionality of the input space.
+ """
+ n = 3*len(bounds) if (n is None) else n
X = ldsample.random(bounds, n, rng)
return X
- def init_latin(bounds, rng=None):
+ def init_latin(bounds, n=None, rng=None):
? ++++++++
- n = 3*len(bounds)
+ """
+ Initialize using a Latin hypercube design of size `n`. If `n` is `None`
+ then use 3D points where D is the dimensionality of the input space.
+ """
+ n = 3*len(bounds) if (n is None) else n
X = ldsample.latin(bounds, n, rng)
return X
- def init_sobol(bounds, rng=None):
+ def init_sobol(bounds, n=None, rng=None):
? ++++++++
- n = 3*len(bounds)
+ """
+ Initialize using a Sobol sequence of length `n`. If `n` is `None` then use
+ 3D points where D is the dimensionality of the input space.
+ """
+ n = 3*len(bounds) if (n is None) else n
X = ldsample.sobol(bounds, n, rng)
return X
|
d436bcc20be8eb81960a53d442f699e42e2f9ea7
|
src/tkjoincsv.py
|
src/tkjoincsv.py
|
import tkFileDialog
import joincsv
import os.path
import sys
if __name__ == '__main__':
filetypes=[("Spreadsheets", "*.csv"),
("Spreadsheets", "*.xls"),
("Spreadsheets", "*.xlsx")]
if len(sys.argv) == 2:
input_filename = sys.argv[1]
else:
input_filename = tkFileDialog.askopenfilename(filetypes=filetypes)
if not os.path.isfile(input_filename):
exit(0)
output_filename = tkFileDialog.asksaveasfilename(filetypes=filetypes, defaultextension=".csv")
if not os.path.isfile(output_filename):
exit(0)
joiner = joincsv.RecordJoiner(input_filename)
joiner.save(output_filename)
|
import tkFileDialog
import joincsv
import os.path
import sys
if __name__ == '__main__':
filetypes=[("Spreadsheets", "*.csv"),
("Spreadsheets", "*.xls"),
("Spreadsheets", "*.xlsx")]
if len(sys.argv) == 2:
input_filename = sys.argv[1]
else:
input_filename = tkFileDialog.askopenfilename(filetypes=filetypes)
if not os.path.isfile(input_filename):
exit(0)
output_filename = tkFileDialog.asksaveasfilename(filetypes=filetypes, defaultextension=".csv")
if output_filename:
joiner = joincsv.RecordJoiner(input_filename)
joiner.save(output_filename)
|
Allow saving to a file that does not already exist again.
|
Allow saving to a file that does not already exist again.
|
Python
|
apache-2.0
|
peterSW/corow
|
import tkFileDialog
import joincsv
import os.path
import sys
if __name__ == '__main__':
filetypes=[("Spreadsheets", "*.csv"),
("Spreadsheets", "*.xls"),
("Spreadsheets", "*.xlsx")]
if len(sys.argv) == 2:
input_filename = sys.argv[1]
else:
input_filename = tkFileDialog.askopenfilename(filetypes=filetypes)
if not os.path.isfile(input_filename):
exit(0)
output_filename = tkFileDialog.asksaveasfilename(filetypes=filetypes, defaultextension=".csv")
- if not os.path.isfile(output_filename):
- exit(0)
-
- joiner = joincsv.RecordJoiner(input_filename)
- joiner.save(output_filename)
+ if output_filename:
+ joiner = joincsv.RecordJoiner(input_filename)
+ joiner.save(output_filename)
+
|
Allow saving to a file that does not already exist again.
|
## Code Before:
import tkFileDialog
import joincsv
import os.path
import sys
if __name__ == '__main__':
filetypes=[("Spreadsheets", "*.csv"),
("Spreadsheets", "*.xls"),
("Spreadsheets", "*.xlsx")]
if len(sys.argv) == 2:
input_filename = sys.argv[1]
else:
input_filename = tkFileDialog.askopenfilename(filetypes=filetypes)
if not os.path.isfile(input_filename):
exit(0)
output_filename = tkFileDialog.asksaveasfilename(filetypes=filetypes, defaultextension=".csv")
if not os.path.isfile(output_filename):
exit(0)
joiner = joincsv.RecordJoiner(input_filename)
joiner.save(output_filename)
## Instruction:
Allow saving to a file that does not already exist again.
## Code After:
import tkFileDialog
import joincsv
import os.path
import sys
if __name__ == '__main__':
filetypes=[("Spreadsheets", "*.csv"),
("Spreadsheets", "*.xls"),
("Spreadsheets", "*.xlsx")]
if len(sys.argv) == 2:
input_filename = sys.argv[1]
else:
input_filename = tkFileDialog.askopenfilename(filetypes=filetypes)
if not os.path.isfile(input_filename):
exit(0)
output_filename = tkFileDialog.asksaveasfilename(filetypes=filetypes, defaultextension=".csv")
if output_filename:
joiner = joincsv.RecordJoiner(input_filename)
joiner.save(output_filename)
|
import tkFileDialog
import joincsv
import os.path
import sys
if __name__ == '__main__':
filetypes=[("Spreadsheets", "*.csv"),
("Spreadsheets", "*.xls"),
("Spreadsheets", "*.xlsx")]
if len(sys.argv) == 2:
input_filename = sys.argv[1]
else:
input_filename = tkFileDialog.askopenfilename(filetypes=filetypes)
if not os.path.isfile(input_filename):
exit(0)
output_filename = tkFileDialog.asksaveasfilename(filetypes=filetypes, defaultextension=".csv")
- if not os.path.isfile(output_filename):
- exit(0)
-
- joiner = joincsv.RecordJoiner(input_filename)
- joiner.save(output_filename)
+ if output_filename:
+ joiner = joincsv.RecordJoiner(input_filename)
+ joiner.save(output_filename)
+
|
6ae9da5ddf987873abb6f54992907542eaf80237
|
movieman2/tests.py
|
movieman2/tests.py
|
import datetime
from django.contrib.auth.models import User
from django.test import TestCase
import tmdbsimple as tmdb
from .models import Movie
class MovieTestCase(TestCase):
def setUp(self):
User.objects.create(username="test_user", password="pass", email="[email protected]", first_name="Test",
last_name="User")
def test_can_create_movie(self):
"""Can movies be created in the database?"""
movie_data = tmdb.Movies(550).info()
m = Movie.objects.create(tmdb_id=movie_data["id"], score=10, submitter=User.objects.first(),
title=movie_data["title"], tagline=movie_data["tagline"],
overview=movie_data["overview"],
release_date=datetime.datetime.strptime(movie_data["release_date"], "%Y-%m-%d"),
budget=movie_data["budget"], vote_average=movie_data["vote_average"],
vote_count=movie_data["vote_count"], original_language=movie_data["original_language"])
m.save()
def test_can_create_movie_from_id(self):
"""Does Movie.add_from_id work?"""
Movie.add_from_id(550, User.objects.first())
|
import datetime
import tmdbsimple as tmdb
from django.contrib.auth.models import User
from django.test import TestCase
from .models import Movie
class MovieTestCase(TestCase):
def setUp(self):
User.objects.create(username="movie_test_case", password="pass", email="movie@test_case.tld",
first_name="Movie", last_name="TestCase")
def test_can_create_movie(self):
"""Can movies be created in the database?"""
movie_data = tmdb.Movies(550).info()
m = Movie.objects.create(tmdb_id=movie_data["id"], score=10, submitter=User.objects.first(),
title=movie_data["title"], tagline=movie_data["tagline"],
overview=movie_data["overview"],
release_date=datetime.datetime.strptime(movie_data["release_date"], "%Y-%m-%d"),
budget=movie_data["budget"], vote_average=movie_data["vote_average"],
vote_count=movie_data["vote_count"], original_language=movie_data["original_language"])
m.save()
def test_can_create_movie_from_id(self):
"""Does Movie.add_from_id work?"""
Movie.add_from_id(550, User.objects.first())
|
Change test case user info
|
Change test case user info
|
Python
|
mit
|
simon-andrews/movieman2,simon-andrews/movieman2
|
import datetime
+ import tmdbsimple as tmdb
from django.contrib.auth.models import User
from django.test import TestCase
- import tmdbsimple as tmdb
from .models import Movie
class MovieTestCase(TestCase):
def setUp(self):
- User.objects.create(username="test_user", password="pass", email="[email protected]", first_name="Test",
+ User.objects.create(username="movie_test_case", password="pass", email="movie@test_case.tld",
- last_name="User")
+ first_name="Movie", last_name="TestCase")
def test_can_create_movie(self):
"""Can movies be created in the database?"""
movie_data = tmdb.Movies(550).info()
m = Movie.objects.create(tmdb_id=movie_data["id"], score=10, submitter=User.objects.first(),
title=movie_data["title"], tagline=movie_data["tagline"],
overview=movie_data["overview"],
release_date=datetime.datetime.strptime(movie_data["release_date"], "%Y-%m-%d"),
budget=movie_data["budget"], vote_average=movie_data["vote_average"],
vote_count=movie_data["vote_count"], original_language=movie_data["original_language"])
m.save()
def test_can_create_movie_from_id(self):
"""Does Movie.add_from_id work?"""
Movie.add_from_id(550, User.objects.first())
|
Change test case user info
|
## Code Before:
import datetime
from django.contrib.auth.models import User
from django.test import TestCase
import tmdbsimple as tmdb
from .models import Movie
class MovieTestCase(TestCase):
def setUp(self):
User.objects.create(username="test_user", password="pass", email="[email protected]", first_name="Test",
last_name="User")
def test_can_create_movie(self):
"""Can movies be created in the database?"""
movie_data = tmdb.Movies(550).info()
m = Movie.objects.create(tmdb_id=movie_data["id"], score=10, submitter=User.objects.first(),
title=movie_data["title"], tagline=movie_data["tagline"],
overview=movie_data["overview"],
release_date=datetime.datetime.strptime(movie_data["release_date"], "%Y-%m-%d"),
budget=movie_data["budget"], vote_average=movie_data["vote_average"],
vote_count=movie_data["vote_count"], original_language=movie_data["original_language"])
m.save()
def test_can_create_movie_from_id(self):
"""Does Movie.add_from_id work?"""
Movie.add_from_id(550, User.objects.first())
## Instruction:
Change test case user info
## Code After:
import datetime
import tmdbsimple as tmdb
from django.contrib.auth.models import User
from django.test import TestCase
from .models import Movie
class MovieTestCase(TestCase):
def setUp(self):
User.objects.create(username="movie_test_case", password="pass", email="movie@test_case.tld",
first_name="Movie", last_name="TestCase")
def test_can_create_movie(self):
"""Can movies be created in the database?"""
movie_data = tmdb.Movies(550).info()
m = Movie.objects.create(tmdb_id=movie_data["id"], score=10, submitter=User.objects.first(),
title=movie_data["title"], tagline=movie_data["tagline"],
overview=movie_data["overview"],
release_date=datetime.datetime.strptime(movie_data["release_date"], "%Y-%m-%d"),
budget=movie_data["budget"], vote_average=movie_data["vote_average"],
vote_count=movie_data["vote_count"], original_language=movie_data["original_language"])
m.save()
def test_can_create_movie_from_id(self):
"""Does Movie.add_from_id work?"""
Movie.add_from_id(550, User.objects.first())
|
import datetime
+ import tmdbsimple as tmdb
from django.contrib.auth.models import User
from django.test import TestCase
- import tmdbsimple as tmdb
from .models import Movie
class MovieTestCase(TestCase):
def setUp(self):
- User.objects.create(username="test_user", password="pass", email="[email protected]", first_name="Test",
? ^ - ^^ - -------------------
+ User.objects.create(username="movie_test_case", password="pass", email="movie@test_case.tld",
? ++++++ ^^ ++++++ ^^^
- last_name="User")
? ^ -
+ first_name="Movie", last_name="TestCase")
? ++++++++++++++++++++ ^^^^^^
def test_can_create_movie(self):
"""Can movies be created in the database?"""
movie_data = tmdb.Movies(550).info()
m = Movie.objects.create(tmdb_id=movie_data["id"], score=10, submitter=User.objects.first(),
title=movie_data["title"], tagline=movie_data["tagline"],
overview=movie_data["overview"],
release_date=datetime.datetime.strptime(movie_data["release_date"], "%Y-%m-%d"),
budget=movie_data["budget"], vote_average=movie_data["vote_average"],
vote_count=movie_data["vote_count"], original_language=movie_data["original_language"])
m.save()
def test_can_create_movie_from_id(self):
"""Does Movie.add_from_id work?"""
Movie.add_from_id(550, User.objects.first())
|
f74d57f4a05fa56b8668e371159affe37f4c38c3
|
opentreemap/otm_comments/models.py
|
opentreemap/otm_comments/models.py
|
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from threadedcomments.models import ThreadedComment
from django.contrib.gis.db import models
from treemap.instance import Instance
class EnhancedThreadedComment(ThreadedComment):
"""
This class wraps the ThreadedComment model with moderation specific fields
"""
# If the comment should be hidden in the default filter view for moderation
is_archived = models.BooleanField(default=False)
# We could retrieve this through the GenericForeignKey on ThreadedComment,
# but it makes things simpler to record instance here.
instance = models.ForeignKey(Instance)
def save(self, *args, **kwargs):
if hasattr(self.content_object, 'instance'):
self.instance = self.content_object.instance
super(EnhancedThreadedComment, self).save(*args, **kwargs)
|
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from threadedcomments.models import ThreadedComment
from django.contrib.gis.db import models
class EnhancedThreadedComment(ThreadedComment):
"""
This class wraps the ThreadedComment model with moderation specific fields
"""
# If the comment should be hidden in the default filter view for moderation
is_archived = models.BooleanField(default=False)
# We could retrieve this through the GenericForeignKey on ThreadedComment,
# but it makes things simpler to record instance here.
instance = models.ForeignKey('treemap.Instance')
def save(self, *args, **kwargs):
if hasattr(self.content_object, 'instance'):
self.instance = self.content_object.instance
super(EnhancedThreadedComment, self).save(*args, **kwargs)
|
Fix circular dependency problem with django apps
|
Fix circular dependency problem with django apps
It looks like translation is importing *all other* django apps in the project
when it is used from treemap. This means that it will load apps that
depend on treemap when it is not finished import treemap. So while it
appears that treemap/otm1_migrator/otm_comments have sane, non-circular
dependencies on each other, the translation app is causing the circle.
I'm pretty sure this is actually a django pattern. Django enjoys
including dynamic apps that walk through installed apps and do magic
stuff. To compensate, they provide this alternative, string-based import
strategy that dynamic apps adhere to.
|
Python
|
agpl-3.0
|
recklessromeo/otm-core,clever-crow-consulting/otm-core,maurizi/otm-core,recklessromeo/otm-core,maurizi/otm-core,RickMohr/otm-core,RickMohr/otm-core,recklessromeo/otm-core,RickMohr/otm-core,maurizi/otm-core,clever-crow-consulting/otm-core,recklessromeo/otm-core,RickMohr/otm-core,clever-crow-consulting/otm-core,maurizi/otm-core,clever-crow-consulting/otm-core
|
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from threadedcomments.models import ThreadedComment
from django.contrib.gis.db import models
-
- from treemap.instance import Instance
class EnhancedThreadedComment(ThreadedComment):
"""
This class wraps the ThreadedComment model with moderation specific fields
"""
# If the comment should be hidden in the default filter view for moderation
is_archived = models.BooleanField(default=False)
# We could retrieve this through the GenericForeignKey on ThreadedComment,
# but it makes things simpler to record instance here.
- instance = models.ForeignKey(Instance)
+ instance = models.ForeignKey('treemap.Instance')
def save(self, *args, **kwargs):
if hasattr(self.content_object, 'instance'):
self.instance = self.content_object.instance
super(EnhancedThreadedComment, self).save(*args, **kwargs)
|
Fix circular dependency problem with django apps
|
## Code Before:
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from threadedcomments.models import ThreadedComment
from django.contrib.gis.db import models
from treemap.instance import Instance
class EnhancedThreadedComment(ThreadedComment):
"""
This class wraps the ThreadedComment model with moderation specific fields
"""
# If the comment should be hidden in the default filter view for moderation
is_archived = models.BooleanField(default=False)
# We could retrieve this through the GenericForeignKey on ThreadedComment,
# but it makes things simpler to record instance here.
instance = models.ForeignKey(Instance)
def save(self, *args, **kwargs):
if hasattr(self.content_object, 'instance'):
self.instance = self.content_object.instance
super(EnhancedThreadedComment, self).save(*args, **kwargs)
## Instruction:
Fix circular dependency problem with django apps
## Code After:
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from threadedcomments.models import ThreadedComment
from django.contrib.gis.db import models
class EnhancedThreadedComment(ThreadedComment):
"""
This class wraps the ThreadedComment model with moderation specific fields
"""
# If the comment should be hidden in the default filter view for moderation
is_archived = models.BooleanField(default=False)
# We could retrieve this through the GenericForeignKey on ThreadedComment,
# but it makes things simpler to record instance here.
instance = models.ForeignKey('treemap.Instance')
def save(self, *args, **kwargs):
if hasattr(self.content_object, 'instance'):
self.instance = self.content_object.instance
super(EnhancedThreadedComment, self).save(*args, **kwargs)
|
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from threadedcomments.models import ThreadedComment
from django.contrib.gis.db import models
-
- from treemap.instance import Instance
class EnhancedThreadedComment(ThreadedComment):
"""
This class wraps the ThreadedComment model with moderation specific fields
"""
# If the comment should be hidden in the default filter view for moderation
is_archived = models.BooleanField(default=False)
# We could retrieve this through the GenericForeignKey on ThreadedComment,
# but it makes things simpler to record instance here.
- instance = models.ForeignKey(Instance)
+ instance = models.ForeignKey('treemap.Instance')
? +++++++++ +
def save(self, *args, **kwargs):
if hasattr(self.content_object, 'instance'):
self.instance = self.content_object.instance
super(EnhancedThreadedComment, self).save(*args, **kwargs)
|
461522c3b79202c915544466272d3bb2a3d0ecbe
|
api/radar_api/serializers/meta.py
|
api/radar_api/serializers/meta.py
|
from radar.models.users import User
from radar.serializers.fields import StringField, IntegerField
from radar.serializers.models import ModelSerializer
class TinyUserSerializer(ModelSerializer):
id = IntegerField()
username = StringField()
email = StringField()
first_name = StringField()
last_name = StringField()
class Meta:
model_class = User
fields = (
'id',
'username',
'email',
'first_name',
'last_name'
)
class CreatedUserMixin(object):
created_user = TinyUserSerializer(read_only=True)
def get_model_exclude(self):
model_exclude = super(CreatedUserMixin, self).get_model_exclude()
model_exclude.add('created_user_id')
return model_exclude
class ModifiedUserMixin(object):
modified_user = TinyUserSerializer(read_only=True)
def get_model_exclude(self):
model_exclude = super(ModifiedUserMixin, self).get_model_exclude()
model_exclude.add('modified_user_id')
return model_exclude
class MetaSerializerMixin(CreatedUserMixin, ModifiedUserMixin):
pass
|
from radar.models.users import User
from radar.serializers.fields import StringField, IntegerField, DateTimeField
from radar.serializers.models import ModelSerializer
class TinyUserSerializer(ModelSerializer):
id = IntegerField()
username = StringField()
email = StringField()
first_name = StringField()
last_name = StringField()
class Meta:
model_class = User
fields = (
'id',
'username',
'email',
'first_name',
'last_name'
)
class CreatedUserMixin(object):
created_user = TinyUserSerializer(read_only=True)
def get_model_exclude(self):
model_exclude = super(CreatedUserMixin, self).get_model_exclude()
model_exclude.add('created_user_id')
return model_exclude
class ModifiedUserMixin(object):
modified_user = TinyUserSerializer(read_only=True)
def get_model_exclude(self):
model_exclude = super(ModifiedUserMixin, self).get_model_exclude()
model_exclude.add('modified_user_id')
return model_exclude
class CreatedDateMixin(object):
created_date = DateTimeField(read_only=False)
class ModifiedDateMixin(object):
modified_date = DateTimeField(read_only=False)
class MetaSerializerMixin(CreatedUserMixin, ModifiedUserMixin, CreatedDateMixin, ModifiedDateMixin):
pass
|
Add created and modified date mixins
|
Add created and modified date mixins
|
Python
|
agpl-3.0
|
renalreg/radar,renalreg/radar,renalreg/radar,renalreg/radar
|
from radar.models.users import User
- from radar.serializers.fields import StringField, IntegerField
+ from radar.serializers.fields import StringField, IntegerField, DateTimeField
from radar.serializers.models import ModelSerializer
class TinyUserSerializer(ModelSerializer):
id = IntegerField()
username = StringField()
email = StringField()
first_name = StringField()
last_name = StringField()
class Meta:
model_class = User
fields = (
'id',
'username',
'email',
'first_name',
'last_name'
)
class CreatedUserMixin(object):
created_user = TinyUserSerializer(read_only=True)
def get_model_exclude(self):
model_exclude = super(CreatedUserMixin, self).get_model_exclude()
model_exclude.add('created_user_id')
return model_exclude
class ModifiedUserMixin(object):
modified_user = TinyUserSerializer(read_only=True)
def get_model_exclude(self):
model_exclude = super(ModifiedUserMixin, self).get_model_exclude()
model_exclude.add('modified_user_id')
return model_exclude
+ class CreatedDateMixin(object):
+ created_date = DateTimeField(read_only=False)
+
+
+ class ModifiedDateMixin(object):
+ modified_date = DateTimeField(read_only=False)
+
+
- class MetaSerializerMixin(CreatedUserMixin, ModifiedUserMixin):
+ class MetaSerializerMixin(CreatedUserMixin, ModifiedUserMixin, CreatedDateMixin, ModifiedDateMixin):
pass
|
Add created and modified date mixins
|
## Code Before:
from radar.models.users import User
from radar.serializers.fields import StringField, IntegerField
from radar.serializers.models import ModelSerializer
class TinyUserSerializer(ModelSerializer):
id = IntegerField()
username = StringField()
email = StringField()
first_name = StringField()
last_name = StringField()
class Meta:
model_class = User
fields = (
'id',
'username',
'email',
'first_name',
'last_name'
)
class CreatedUserMixin(object):
created_user = TinyUserSerializer(read_only=True)
def get_model_exclude(self):
model_exclude = super(CreatedUserMixin, self).get_model_exclude()
model_exclude.add('created_user_id')
return model_exclude
class ModifiedUserMixin(object):
modified_user = TinyUserSerializer(read_only=True)
def get_model_exclude(self):
model_exclude = super(ModifiedUserMixin, self).get_model_exclude()
model_exclude.add('modified_user_id')
return model_exclude
class MetaSerializerMixin(CreatedUserMixin, ModifiedUserMixin):
pass
## Instruction:
Add created and modified date mixins
## Code After:
from radar.models.users import User
from radar.serializers.fields import StringField, IntegerField, DateTimeField
from radar.serializers.models import ModelSerializer
class TinyUserSerializer(ModelSerializer):
id = IntegerField()
username = StringField()
email = StringField()
first_name = StringField()
last_name = StringField()
class Meta:
model_class = User
fields = (
'id',
'username',
'email',
'first_name',
'last_name'
)
class CreatedUserMixin(object):
created_user = TinyUserSerializer(read_only=True)
def get_model_exclude(self):
model_exclude = super(CreatedUserMixin, self).get_model_exclude()
model_exclude.add('created_user_id')
return model_exclude
class ModifiedUserMixin(object):
modified_user = TinyUserSerializer(read_only=True)
def get_model_exclude(self):
model_exclude = super(ModifiedUserMixin, self).get_model_exclude()
model_exclude.add('modified_user_id')
return model_exclude
class CreatedDateMixin(object):
created_date = DateTimeField(read_only=False)
class ModifiedDateMixin(object):
modified_date = DateTimeField(read_only=False)
class MetaSerializerMixin(CreatedUserMixin, ModifiedUserMixin, CreatedDateMixin, ModifiedDateMixin):
pass
|
from radar.models.users import User
- from radar.serializers.fields import StringField, IntegerField
+ from radar.serializers.fields import StringField, IntegerField, DateTimeField
? +++++++++++++++
from radar.serializers.models import ModelSerializer
class TinyUserSerializer(ModelSerializer):
id = IntegerField()
username = StringField()
email = StringField()
first_name = StringField()
last_name = StringField()
class Meta:
model_class = User
fields = (
'id',
'username',
'email',
'first_name',
'last_name'
)
class CreatedUserMixin(object):
created_user = TinyUserSerializer(read_only=True)
def get_model_exclude(self):
model_exclude = super(CreatedUserMixin, self).get_model_exclude()
model_exclude.add('created_user_id')
return model_exclude
class ModifiedUserMixin(object):
modified_user = TinyUserSerializer(read_only=True)
def get_model_exclude(self):
model_exclude = super(ModifiedUserMixin, self).get_model_exclude()
model_exclude.add('modified_user_id')
return model_exclude
+ class CreatedDateMixin(object):
+ created_date = DateTimeField(read_only=False)
+
+
+ class ModifiedDateMixin(object):
+ modified_date = DateTimeField(read_only=False)
+
+
- class MetaSerializerMixin(CreatedUserMixin, ModifiedUserMixin):
+ class MetaSerializerMixin(CreatedUserMixin, ModifiedUserMixin, CreatedDateMixin, ModifiedDateMixin):
? +++++++++++++++++++++++++++++++++++++
pass
|
0d0434744efef091fd8d26725f21c8015a06d8be
|
opentreemap/treemap/templatetags/instance_config.py
|
opentreemap/treemap/templatetags/instance_config.py
|
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from django import template
from treemap.json_field import get_attr_from_json_field
register = template.Library()
def _get_color_from_config(config, name):
color = config.get(name)
if color:
return '#' + color
else:
return ''
@register.filter
def primary_color(config):
return _get_color_from_config(config,
"scss_variables.primary-color")
@register.filter
def secondary_color(config):
return _get_color_from_config(config,
"scss_variables.secondary-color")
@register.filter
def feature_enabled(instance, feature):
return instance.feature_enabled(feature)
@register.filter
def plot_field_is_writable(instanceuser, field):
return plot_is_writable(instanceuser, field)
@register.filter
def plot_is_writable(instanceuser, field=None):
if instanceuser is None or instanceuser == '':
return False
else:
perms = instanceuser.role.plot_permissions.all()
if field:
perms = perms.filter(field_name=field)
if len(perms) == 0:
return False
else:
return perms[0].allows_writes
@register.filter
def instance_config(instance, field):
if instance:
return get_attr_from_json_field(instance, "config." + field)
else:
return None
|
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from django import template
from treemap.json_field import get_attr_from_json_field
register = template.Library()
def _get_color_from_config(config, name):
color = config.get(name)
if color:
return '#' + color
else:
return ''
@register.filter
def primary_color(config):
return _get_color_from_config(config,
"scss_variables.primary-color")
@register.filter
def secondary_color(config):
return _get_color_from_config(config,
"scss_variables.secondary-color")
@register.filter
def feature_enabled(instance, feature):
return instance.feature_enabled(feature)
@register.filter
def plot_field_is_writable(instanceuser, field):
return plot_is_writable(instanceuser, field)
@register.filter
def plot_is_writable(instanceuser, field=None):
if instanceuser is None or instanceuser == '':
return False
else:
perms = instanceuser.role.plot_permissions.all()
if field:
perms = perms.filter(field_name=field)
return any(perm.allows_writes for perm in perms)
@register.filter
def instance_config(instance, field):
if instance:
return get_attr_from_json_field(instance, "config." + field)
else:
return None
|
Allow "writable" if *any* field is writable
|
Allow "writable" if *any* field is writable
Fixes Internal Issue 598
|
Python
|
agpl-3.0
|
recklessromeo/otm-core,RickMohr/otm-core,clever-crow-consulting/otm-core,clever-crow-consulting/otm-core,recklessromeo/otm-core,clever-crow-consulting/otm-core,maurizi/otm-core,RickMohr/otm-core,maurizi/otm-core,maurizi/otm-core,recklessromeo/otm-core,RickMohr/otm-core,recklessromeo/otm-core,maurizi/otm-core,RickMohr/otm-core,clever-crow-consulting/otm-core
|
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from django import template
from treemap.json_field import get_attr_from_json_field
register = template.Library()
def _get_color_from_config(config, name):
color = config.get(name)
if color:
return '#' + color
else:
return ''
@register.filter
def primary_color(config):
return _get_color_from_config(config,
"scss_variables.primary-color")
@register.filter
def secondary_color(config):
return _get_color_from_config(config,
"scss_variables.secondary-color")
@register.filter
def feature_enabled(instance, feature):
return instance.feature_enabled(feature)
@register.filter
def plot_field_is_writable(instanceuser, field):
return plot_is_writable(instanceuser, field)
@register.filter
def plot_is_writable(instanceuser, field=None):
if instanceuser is None or instanceuser == '':
return False
else:
perms = instanceuser.role.plot_permissions.all()
if field:
perms = perms.filter(field_name=field)
+ return any(perm.allows_writes for perm in perms)
- if len(perms) == 0:
- return False
- else:
- return perms[0].allows_writes
@register.filter
def instance_config(instance, field):
if instance:
return get_attr_from_json_field(instance, "config." + field)
else:
return None
|
Allow "writable" if *any* field is writable
|
## Code Before:
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from django import template
from treemap.json_field import get_attr_from_json_field
register = template.Library()
def _get_color_from_config(config, name):
color = config.get(name)
if color:
return '#' + color
else:
return ''
@register.filter
def primary_color(config):
return _get_color_from_config(config,
"scss_variables.primary-color")
@register.filter
def secondary_color(config):
return _get_color_from_config(config,
"scss_variables.secondary-color")
@register.filter
def feature_enabled(instance, feature):
return instance.feature_enabled(feature)
@register.filter
def plot_field_is_writable(instanceuser, field):
return plot_is_writable(instanceuser, field)
@register.filter
def plot_is_writable(instanceuser, field=None):
if instanceuser is None or instanceuser == '':
return False
else:
perms = instanceuser.role.plot_permissions.all()
if field:
perms = perms.filter(field_name=field)
if len(perms) == 0:
return False
else:
return perms[0].allows_writes
@register.filter
def instance_config(instance, field):
if instance:
return get_attr_from_json_field(instance, "config." + field)
else:
return None
## Instruction:
Allow "writable" if *any* field is writable
## Code After:
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from django import template
from treemap.json_field import get_attr_from_json_field
register = template.Library()
def _get_color_from_config(config, name):
color = config.get(name)
if color:
return '#' + color
else:
return ''
@register.filter
def primary_color(config):
return _get_color_from_config(config,
"scss_variables.primary-color")
@register.filter
def secondary_color(config):
return _get_color_from_config(config,
"scss_variables.secondary-color")
@register.filter
def feature_enabled(instance, feature):
return instance.feature_enabled(feature)
@register.filter
def plot_field_is_writable(instanceuser, field):
return plot_is_writable(instanceuser, field)
@register.filter
def plot_is_writable(instanceuser, field=None):
if instanceuser is None or instanceuser == '':
return False
else:
perms = instanceuser.role.plot_permissions.all()
if field:
perms = perms.filter(field_name=field)
return any(perm.allows_writes for perm in perms)
@register.filter
def instance_config(instance, field):
if instance:
return get_attr_from_json_field(instance, "config." + field)
else:
return None
|
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from django import template
from treemap.json_field import get_attr_from_json_field
register = template.Library()
def _get_color_from_config(config, name):
color = config.get(name)
if color:
return '#' + color
else:
return ''
@register.filter
def primary_color(config):
return _get_color_from_config(config,
"scss_variables.primary-color")
@register.filter
def secondary_color(config):
return _get_color_from_config(config,
"scss_variables.secondary-color")
@register.filter
def feature_enabled(instance, feature):
return instance.feature_enabled(feature)
@register.filter
def plot_field_is_writable(instanceuser, field):
return plot_is_writable(instanceuser, field)
@register.filter
def plot_is_writable(instanceuser, field=None):
if instanceuser is None or instanceuser == '':
return False
else:
perms = instanceuser.role.plot_permissions.all()
if field:
perms = perms.filter(field_name=field)
+ return any(perm.allows_writes for perm in perms)
- if len(perms) == 0:
- return False
- else:
- return perms[0].allows_writes
@register.filter
def instance_config(instance, field):
if instance:
return get_attr_from_json_field(instance, "config." + field)
else:
return None
|
221bb27796036b348c5cf0fd06a0d57984b3591c
|
tests/integ/test_basic.py
|
tests/integ/test_basic.py
|
"""Basic scenarios, symmetric tests"""
import pytest
from bloop import (
BaseModel,
Column,
GlobalSecondaryIndex,
Integer,
MissingObjects,
)
from .models import User
def test_crud(engine):
engine.bind(User)
user = User(email="[email protected]", username="user", profile="first")
engine.save(user)
same_user = User(email=user.email, username=user.username)
engine.load(same_user)
assert user.profile == same_user.profile
same_user.profile = "second"
engine.save(same_user)
engine.load(user, consistent=True)
assert user.profile == same_user.profile
engine.delete(user)
with pytest.raises(MissingObjects) as excinfo:
engine.load(same_user, consistent=True)
assert [same_user] == excinfo.value.objects
def test_projection_overlap(engine):
class Model(BaseModel):
hash = Column(Integer, hash_key=True)
range = Column(Integer, range_key=True)
other = Column(Integer)
by_other = GlobalSecondaryIndex(projection=["other", "range"], hash_key="other")
# by_other's projected attributes overlap with the model and its own keys
engine.bind(Model)
def test_stream_creation(engine):
class Model(BaseModel):
class Meta:
stream = {
"include": ["keys"]
}
hash = Column(Integer, hash_key=True)
engine.bind(Model)
|
"""Basic scenarios, symmetric tests"""
import pytest
from bloop import (
BaseModel,
Column,
GlobalSecondaryIndex,
Integer,
MissingObjects,
)
from .models import User
def test_crud(engine):
engine.bind(User)
user = User(email="[email protected]", username="user", profile="first")
engine.save(user)
same_user = User(email=user.email, username=user.username)
engine.load(same_user)
assert user.profile == same_user.profile
same_user.profile = "second"
engine.save(same_user)
engine.load(user, consistent=True)
assert user.profile == same_user.profile
engine.delete(user)
with pytest.raises(MissingObjects) as excinfo:
engine.load(same_user, consistent=True)
assert [same_user] == excinfo.value.objects
def test_projection_overlap(engine):
class ProjectionOverlap(BaseModel):
hash = Column(Integer, hash_key=True)
range = Column(Integer, range_key=True)
other = Column(Integer)
by_other = GlobalSecondaryIndex(projection=["other", "range"], hash_key="other")
# by_other's projected attributes overlap with the model and its own keys
engine.bind(ProjectionOverlap)
def test_stream_creation(engine):
class StreamCreation(BaseModel):
class Meta:
stream = {
"include": ["keys"]
}
hash = Column(Integer, hash_key=True)
engine.bind(StreamCreation)
|
Rename integration test model names for debugging in console
|
Rename integration test model names for debugging in console
|
Python
|
mit
|
numberoverzero/bloop,numberoverzero/bloop
|
"""Basic scenarios, symmetric tests"""
import pytest
from bloop import (
BaseModel,
Column,
GlobalSecondaryIndex,
Integer,
MissingObjects,
)
from .models import User
def test_crud(engine):
engine.bind(User)
user = User(email="[email protected]", username="user", profile="first")
engine.save(user)
same_user = User(email=user.email, username=user.username)
engine.load(same_user)
assert user.profile == same_user.profile
same_user.profile = "second"
engine.save(same_user)
engine.load(user, consistent=True)
assert user.profile == same_user.profile
engine.delete(user)
with pytest.raises(MissingObjects) as excinfo:
engine.load(same_user, consistent=True)
assert [same_user] == excinfo.value.objects
def test_projection_overlap(engine):
- class Model(BaseModel):
+ class ProjectionOverlap(BaseModel):
hash = Column(Integer, hash_key=True)
range = Column(Integer, range_key=True)
other = Column(Integer)
by_other = GlobalSecondaryIndex(projection=["other", "range"], hash_key="other")
# by_other's projected attributes overlap with the model and its own keys
- engine.bind(Model)
+ engine.bind(ProjectionOverlap)
def test_stream_creation(engine):
- class Model(BaseModel):
+ class StreamCreation(BaseModel):
class Meta:
stream = {
"include": ["keys"]
}
hash = Column(Integer, hash_key=True)
- engine.bind(Model)
+ engine.bind(StreamCreation)
|
Rename integration test model names for debugging in console
|
## Code Before:
"""Basic scenarios, symmetric tests"""
import pytest
from bloop import (
BaseModel,
Column,
GlobalSecondaryIndex,
Integer,
MissingObjects,
)
from .models import User
def test_crud(engine):
engine.bind(User)
user = User(email="[email protected]", username="user", profile="first")
engine.save(user)
same_user = User(email=user.email, username=user.username)
engine.load(same_user)
assert user.profile == same_user.profile
same_user.profile = "second"
engine.save(same_user)
engine.load(user, consistent=True)
assert user.profile == same_user.profile
engine.delete(user)
with pytest.raises(MissingObjects) as excinfo:
engine.load(same_user, consistent=True)
assert [same_user] == excinfo.value.objects
def test_projection_overlap(engine):
class Model(BaseModel):
hash = Column(Integer, hash_key=True)
range = Column(Integer, range_key=True)
other = Column(Integer)
by_other = GlobalSecondaryIndex(projection=["other", "range"], hash_key="other")
# by_other's projected attributes overlap with the model and its own keys
engine.bind(Model)
def test_stream_creation(engine):
class Model(BaseModel):
class Meta:
stream = {
"include": ["keys"]
}
hash = Column(Integer, hash_key=True)
engine.bind(Model)
## Instruction:
Rename integration test model names for debugging in console
## Code After:
"""Basic scenarios, symmetric tests"""
import pytest
from bloop import (
BaseModel,
Column,
GlobalSecondaryIndex,
Integer,
MissingObjects,
)
from .models import User
def test_crud(engine):
engine.bind(User)
user = User(email="[email protected]", username="user", profile="first")
engine.save(user)
same_user = User(email=user.email, username=user.username)
engine.load(same_user)
assert user.profile == same_user.profile
same_user.profile = "second"
engine.save(same_user)
engine.load(user, consistent=True)
assert user.profile == same_user.profile
engine.delete(user)
with pytest.raises(MissingObjects) as excinfo:
engine.load(same_user, consistent=True)
assert [same_user] == excinfo.value.objects
def test_projection_overlap(engine):
class ProjectionOverlap(BaseModel):
hash = Column(Integer, hash_key=True)
range = Column(Integer, range_key=True)
other = Column(Integer)
by_other = GlobalSecondaryIndex(projection=["other", "range"], hash_key="other")
# by_other's projected attributes overlap with the model and its own keys
engine.bind(ProjectionOverlap)
def test_stream_creation(engine):
class StreamCreation(BaseModel):
class Meta:
stream = {
"include": ["keys"]
}
hash = Column(Integer, hash_key=True)
engine.bind(StreamCreation)
|
"""Basic scenarios, symmetric tests"""
import pytest
from bloop import (
BaseModel,
Column,
GlobalSecondaryIndex,
Integer,
MissingObjects,
)
from .models import User
def test_crud(engine):
engine.bind(User)
user = User(email="[email protected]", username="user", profile="first")
engine.save(user)
same_user = User(email=user.email, username=user.username)
engine.load(same_user)
assert user.profile == same_user.profile
same_user.profile = "second"
engine.save(same_user)
engine.load(user, consistent=True)
assert user.profile == same_user.profile
engine.delete(user)
with pytest.raises(MissingObjects) as excinfo:
engine.load(same_user, consistent=True)
assert [same_user] == excinfo.value.objects
def test_projection_overlap(engine):
- class Model(BaseModel):
? ^ ^
+ class ProjectionOverlap(BaseModel):
? ^^ ^ +++++++++ ++
hash = Column(Integer, hash_key=True)
range = Column(Integer, range_key=True)
other = Column(Integer)
by_other = GlobalSecondaryIndex(projection=["other", "range"], hash_key="other")
# by_other's projected attributes overlap with the model and its own keys
- engine.bind(Model)
+ engine.bind(ProjectionOverlap)
def test_stream_creation(engine):
- class Model(BaseModel):
+ class StreamCreation(BaseModel):
class Meta:
stream = {
"include": ["keys"]
}
hash = Column(Integer, hash_key=True)
- engine.bind(Model)
+ engine.bind(StreamCreation)
|
15fe43d0be3c665c09c898864bd2815b39fbc8a5
|
toolbox/config/common.py
|
toolbox/config/common.py
|
CURRENT_MIN_VERSION = 'v3.0'
CURRENT_MAX_VERSION = 'v3.1'
ACTIVE_REMOTE_BRANCHES = ['master', 'staging', 'demo']
DEFAULT_COMMAND_TIMEOUT = 60 * 60
CONTROLLER_PROTOCOL = 'controller'
PROTOCOLS = {'udp', 'tcp', 'ssh', 'http', 'ws', CONTROLLER_PROTOCOL}
CRP_TYPES = {'docker', 'gce', 'static'}
|
CURRENT_MIN_VERSION = 'v3.0'
CURRENT_MAX_VERSION = 'v3.1'
# Once the Next platform supports challenge versions this can be extended.
ACTIVE_REMOTE_BRANCHES = ['master']
DEFAULT_COMMAND_TIMEOUT = 60 * 60
CONTROLLER_PROTOCOL = 'controller'
PROTOCOLS = {'udp', 'tcp', 'ssh', 'http', 'ws', CONTROLLER_PROTOCOL}
CRP_TYPES = {'docker', 'gce', 'static'}
|
Change v3 active branches to
|
Change v3 active branches to [master]
Extend the list when it becomes relevant.
The old platform shall use the legacy branch.
|
Python
|
apache-2.0
|
avatao-content/challenge-toolbox,avatao-content/challenge-toolbox,avatao-content/challenge-toolbox,avatao-content/challenge-toolbox,avatao-content/challenge-toolbox,avatao-content/challenge-toolbox,avatao-content/challenge-toolbox,avatao-content/challenge-toolbox,avatao-content/challenge-toolbox
|
CURRENT_MIN_VERSION = 'v3.0'
CURRENT_MAX_VERSION = 'v3.1'
+ # Once the Next platform supports challenge versions this can be extended.
- ACTIVE_REMOTE_BRANCHES = ['master', 'staging', 'demo']
+ ACTIVE_REMOTE_BRANCHES = ['master']
DEFAULT_COMMAND_TIMEOUT = 60 * 60
CONTROLLER_PROTOCOL = 'controller'
PROTOCOLS = {'udp', 'tcp', 'ssh', 'http', 'ws', CONTROLLER_PROTOCOL}
CRP_TYPES = {'docker', 'gce', 'static'}
|
Change v3 active branches to
|
## Code Before:
CURRENT_MIN_VERSION = 'v3.0'
CURRENT_MAX_VERSION = 'v3.1'
ACTIVE_REMOTE_BRANCHES = ['master', 'staging', 'demo']
DEFAULT_COMMAND_TIMEOUT = 60 * 60
CONTROLLER_PROTOCOL = 'controller'
PROTOCOLS = {'udp', 'tcp', 'ssh', 'http', 'ws', CONTROLLER_PROTOCOL}
CRP_TYPES = {'docker', 'gce', 'static'}
## Instruction:
Change v3 active branches to
## Code After:
CURRENT_MIN_VERSION = 'v3.0'
CURRENT_MAX_VERSION = 'v3.1'
# Once the Next platform supports challenge versions this can be extended.
ACTIVE_REMOTE_BRANCHES = ['master']
DEFAULT_COMMAND_TIMEOUT = 60 * 60
CONTROLLER_PROTOCOL = 'controller'
PROTOCOLS = {'udp', 'tcp', 'ssh', 'http', 'ws', CONTROLLER_PROTOCOL}
CRP_TYPES = {'docker', 'gce', 'static'}
|
CURRENT_MIN_VERSION = 'v3.0'
CURRENT_MAX_VERSION = 'v3.1'
+ # Once the Next platform supports challenge versions this can be extended.
- ACTIVE_REMOTE_BRANCHES = ['master', 'staging', 'demo']
? -------------------
+ ACTIVE_REMOTE_BRANCHES = ['master']
DEFAULT_COMMAND_TIMEOUT = 60 * 60
CONTROLLER_PROTOCOL = 'controller'
PROTOCOLS = {'udp', 'tcp', 'ssh', 'http', 'ws', CONTROLLER_PROTOCOL}
CRP_TYPES = {'docker', 'gce', 'static'}
|
20017da43fe1bf5287b33d9d2fc7f597850bb5b5
|
readthedocs/settings/proxito/base.py
|
readthedocs/settings/proxito/base.py
|
class CommunityProxitoSettingsMixin:
ROOT_URLCONF = 'readthedocs.proxito.urls'
USE_SUBDOMAIN = True
@property
def MIDDLEWARE(self): # noqa
# Use our new middleware instead of the old one
classes = super().MIDDLEWARE
classes = list(classes)
index = classes.index(
'readthedocs.core.middleware.SubdomainMiddleware'
)
classes[index] = 'readthedocs.proxito.middleware.ProxitoMiddleware'
middleware_to_remove = (
'readthedocs.core.middleware.SingleVersionMiddleware',
'csp.middleware.CSPMiddleware',
)
for mw in middleware_to_remove:
if mw in classes:
classes.remove(mw)
return classes
|
class CommunityProxitoSettingsMixin:
ROOT_URLCONF = 'readthedocs.proxito.urls'
USE_SUBDOMAIN = True
@property
def DATABASES(self):
# This keeps connections to the DB alive,
# which reduces latency with connecting to postgres
dbs = getattr(super(), 'DATABASES', {})
for db in dbs.keys():
dbs[db]['CONN_MAX_AGE'] = 86400
return dbs
@property
def MIDDLEWARE(self): # noqa
# Use our new middleware instead of the old one
classes = super().MIDDLEWARE
classes = list(classes)
index = classes.index(
'readthedocs.core.middleware.SubdomainMiddleware'
)
classes[index] = 'readthedocs.proxito.middleware.ProxitoMiddleware'
middleware_to_remove = (
'readthedocs.core.middleware.SingleVersionMiddleware',
'csp.middleware.CSPMiddleware',
)
for mw in middleware_to_remove:
if mw in classes:
classes.remove(mw)
return classes
|
Expand the logic in our proxito mixin.
|
Expand the logic in our proxito mixin.
This makes proxito mixin match production for .com/.org
in the areas where we are overriding the same things.
|
Python
|
mit
|
rtfd/readthedocs.org,rtfd/readthedocs.org,rtfd/readthedocs.org,rtfd/readthedocs.org
|
+
class CommunityProxitoSettingsMixin:
ROOT_URLCONF = 'readthedocs.proxito.urls'
USE_SUBDOMAIN = True
+
+ @property
+ def DATABASES(self):
+ # This keeps connections to the DB alive,
+ # which reduces latency with connecting to postgres
+ dbs = getattr(super(), 'DATABASES', {})
+ for db in dbs.keys():
+ dbs[db]['CONN_MAX_AGE'] = 86400
+ return dbs
@property
def MIDDLEWARE(self): # noqa
# Use our new middleware instead of the old one
classes = super().MIDDLEWARE
classes = list(classes)
index = classes.index(
'readthedocs.core.middleware.SubdomainMiddleware'
)
classes[index] = 'readthedocs.proxito.middleware.ProxitoMiddleware'
middleware_to_remove = (
'readthedocs.core.middleware.SingleVersionMiddleware',
'csp.middleware.CSPMiddleware',
)
for mw in middleware_to_remove:
if mw in classes:
classes.remove(mw)
return classes
|
Expand the logic in our proxito mixin.
|
## Code Before:
class CommunityProxitoSettingsMixin:
ROOT_URLCONF = 'readthedocs.proxito.urls'
USE_SUBDOMAIN = True
@property
def MIDDLEWARE(self): # noqa
# Use our new middleware instead of the old one
classes = super().MIDDLEWARE
classes = list(classes)
index = classes.index(
'readthedocs.core.middleware.SubdomainMiddleware'
)
classes[index] = 'readthedocs.proxito.middleware.ProxitoMiddleware'
middleware_to_remove = (
'readthedocs.core.middleware.SingleVersionMiddleware',
'csp.middleware.CSPMiddleware',
)
for mw in middleware_to_remove:
if mw in classes:
classes.remove(mw)
return classes
## Instruction:
Expand the logic in our proxito mixin.
## Code After:
class CommunityProxitoSettingsMixin:
ROOT_URLCONF = 'readthedocs.proxito.urls'
USE_SUBDOMAIN = True
@property
def DATABASES(self):
# This keeps connections to the DB alive,
# which reduces latency with connecting to postgres
dbs = getattr(super(), 'DATABASES', {})
for db in dbs.keys():
dbs[db]['CONN_MAX_AGE'] = 86400
return dbs
@property
def MIDDLEWARE(self): # noqa
# Use our new middleware instead of the old one
classes = super().MIDDLEWARE
classes = list(classes)
index = classes.index(
'readthedocs.core.middleware.SubdomainMiddleware'
)
classes[index] = 'readthedocs.proxito.middleware.ProxitoMiddleware'
middleware_to_remove = (
'readthedocs.core.middleware.SingleVersionMiddleware',
'csp.middleware.CSPMiddleware',
)
for mw in middleware_to_remove:
if mw in classes:
classes.remove(mw)
return classes
|
+
class CommunityProxitoSettingsMixin:
ROOT_URLCONF = 'readthedocs.proxito.urls'
USE_SUBDOMAIN = True
+
+ @property
+ def DATABASES(self):
+ # This keeps connections to the DB alive,
+ # which reduces latency with connecting to postgres
+ dbs = getattr(super(), 'DATABASES', {})
+ for db in dbs.keys():
+ dbs[db]['CONN_MAX_AGE'] = 86400
+ return dbs
@property
def MIDDLEWARE(self): # noqa
# Use our new middleware instead of the old one
classes = super().MIDDLEWARE
classes = list(classes)
index = classes.index(
'readthedocs.core.middleware.SubdomainMiddleware'
)
classes[index] = 'readthedocs.proxito.middleware.ProxitoMiddleware'
middleware_to_remove = (
'readthedocs.core.middleware.SingleVersionMiddleware',
'csp.middleware.CSPMiddleware',
)
for mw in middleware_to_remove:
if mw in classes:
classes.remove(mw)
return classes
|
f39f7d64ba8ca8051b24407811239f960cc6f561
|
lib/collect/backend.py
|
lib/collect/backend.py
|
import lib.collect.config as config
if config.BACKEND == 'dynamodb':
import lib.collect.backends.dymamodb as api
else:
import lib.collect.backends.localfs as api
|
import lib.collect.config as config
try:
if config.BACKEND == 'dynamodb':
import lib.collect.backends.dymamodb as api
else:
import lib.collect.backends.localfs as api
except AttributeError:
import lib.collect.backends.localfs as api
|
Fix bug in module selection.
|
Fix bug in module selection.
|
Python
|
mit
|
ic/mark0
|
import lib.collect.config as config
+ try:
- if config.BACKEND == 'dynamodb':
+ if config.BACKEND == 'dynamodb':
- import lib.collect.backends.dymamodb as api
+ import lib.collect.backends.dymamodb as api
- else:
+ else:
+ import lib.collect.backends.localfs as api
+ except AttributeError:
import lib.collect.backends.localfs as api
|
Fix bug in module selection.
|
## Code Before:
import lib.collect.config as config
if config.BACKEND == 'dynamodb':
import lib.collect.backends.dymamodb as api
else:
import lib.collect.backends.localfs as api
## Instruction:
Fix bug in module selection.
## Code After:
import lib.collect.config as config
try:
if config.BACKEND == 'dynamodb':
import lib.collect.backends.dymamodb as api
else:
import lib.collect.backends.localfs as api
except AttributeError:
import lib.collect.backends.localfs as api
|
import lib.collect.config as config
+ try:
- if config.BACKEND == 'dynamodb':
+ if config.BACKEND == 'dynamodb':
? ++++
- import lib.collect.backends.dymamodb as api
+ import lib.collect.backends.dymamodb as api
? ++++
- else:
+ else:
+ import lib.collect.backends.localfs as api
+ except AttributeError:
import lib.collect.backends.localfs as api
|
43d14f73055643a2e4921a58aa1bf5e14fdf8e74
|
linter.py
|
linter.py
|
import logging
import re
from SublimeLinter.lint import NodeLinter
logger = logging.getLogger('SublimeLinter.plugin.tslint')
class Tslint(NodeLinter):
cmd = 'tslint --format verbose ${file}'
regex = (
r'^(?:'
r'(ERROR:\s+\((?P<error>.*)\))|'
r'(WARNING:\s+\((?P<warning>.*)\))'
r')?'
r'.+?\[(?P<line>\d+), (?P<col>\d+)\]: '
r'(?P<message>.+)'
)
tempfile_suffix = '-'
defaults = {
'selector': 'source.ts, source.tsx'
}
def on_stderr(self, stderr):
# suppress warnings like "rule requires type information"
stderr = re.sub(
'Warning: .+\n', '', stderr)
if stderr:
self.notify_failure()
logger.error(stderr)
|
import logging
import re
from SublimeLinter.lint import NodeLinter
logger = logging.getLogger('SublimeLinter.plugin.tslint')
class Tslint(NodeLinter):
cmd = 'tslint --format verbose ${file}'
regex = (
r'^(?:'
r'(ERROR:\s+\((?P<error>.*)\))|'
r'(WARNING:\s+\((?P<warning>.*)\))'
r')?'
r'\s+(?P<filename>.+?)'
r'\[(?P<line>\d+), (?P<col>\d+)\]: '
r'(?P<message>.+)'
)
tempfile_suffix = '-'
defaults = {
'selector': 'source.ts, source.tsx'
}
def on_stderr(self, stderr):
# suppress warnings like "rule requires type information"
stderr = re.sub(
'Warning: .+\n', '', stderr)
if stderr:
self.notify_failure()
logger.error(stderr)
|
Update regex to include filename capture group.
|
Update regex to include filename capture group.
|
Python
|
mit
|
lavrton/SublimeLinter-contrib-tslint
|
import logging
import re
from SublimeLinter.lint import NodeLinter
logger = logging.getLogger('SublimeLinter.plugin.tslint')
class Tslint(NodeLinter):
cmd = 'tslint --format verbose ${file}'
regex = (
r'^(?:'
r'(ERROR:\s+\((?P<error>.*)\))|'
r'(WARNING:\s+\((?P<warning>.*)\))'
r')?'
+ r'\s+(?P<filename>.+?)'
- r'.+?\[(?P<line>\d+), (?P<col>\d+)\]: '
+ r'\[(?P<line>\d+), (?P<col>\d+)\]: '
r'(?P<message>.+)'
)
tempfile_suffix = '-'
defaults = {
'selector': 'source.ts, source.tsx'
}
def on_stderr(self, stderr):
# suppress warnings like "rule requires type information"
stderr = re.sub(
'Warning: .+\n', '', stderr)
if stderr:
self.notify_failure()
logger.error(stderr)
|
Update regex to include filename capture group.
|
## Code Before:
import logging
import re
from SublimeLinter.lint import NodeLinter
logger = logging.getLogger('SublimeLinter.plugin.tslint')
class Tslint(NodeLinter):
cmd = 'tslint --format verbose ${file}'
regex = (
r'^(?:'
r'(ERROR:\s+\((?P<error>.*)\))|'
r'(WARNING:\s+\((?P<warning>.*)\))'
r')?'
r'.+?\[(?P<line>\d+), (?P<col>\d+)\]: '
r'(?P<message>.+)'
)
tempfile_suffix = '-'
defaults = {
'selector': 'source.ts, source.tsx'
}
def on_stderr(self, stderr):
# suppress warnings like "rule requires type information"
stderr = re.sub(
'Warning: .+\n', '', stderr)
if stderr:
self.notify_failure()
logger.error(stderr)
## Instruction:
Update regex to include filename capture group.
## Code After:
import logging
import re
from SublimeLinter.lint import NodeLinter
logger = logging.getLogger('SublimeLinter.plugin.tslint')
class Tslint(NodeLinter):
cmd = 'tslint --format verbose ${file}'
regex = (
r'^(?:'
r'(ERROR:\s+\((?P<error>.*)\))|'
r'(WARNING:\s+\((?P<warning>.*)\))'
r')?'
r'\s+(?P<filename>.+?)'
r'\[(?P<line>\d+), (?P<col>\d+)\]: '
r'(?P<message>.+)'
)
tempfile_suffix = '-'
defaults = {
'selector': 'source.ts, source.tsx'
}
def on_stderr(self, stderr):
# suppress warnings like "rule requires type information"
stderr = re.sub(
'Warning: .+\n', '', stderr)
if stderr:
self.notify_failure()
logger.error(stderr)
|
import logging
import re
from SublimeLinter.lint import NodeLinter
logger = logging.getLogger('SublimeLinter.plugin.tslint')
class Tslint(NodeLinter):
cmd = 'tslint --format verbose ${file}'
regex = (
r'^(?:'
r'(ERROR:\s+\((?P<error>.*)\))|'
r'(WARNING:\s+\((?P<warning>.*)\))'
r')?'
+ r'\s+(?P<filename>.+?)'
- r'.+?\[(?P<line>\d+), (?P<col>\d+)\]: '
? ---
+ r'\[(?P<line>\d+), (?P<col>\d+)\]: '
r'(?P<message>.+)'
)
tempfile_suffix = '-'
defaults = {
'selector': 'source.ts, source.tsx'
}
def on_stderr(self, stderr):
# suppress warnings like "rule requires type information"
stderr = re.sub(
'Warning: .+\n', '', stderr)
if stderr:
self.notify_failure()
logger.error(stderr)
|
c7ef5d2c049beba4bd1b12ec2e62a61446746a8a
|
unsubscribe/views.py
|
unsubscribe/views.py
|
from django import http
from mailgun import utils
import models as unsubscribe_model
def unsubscribe_webhook(request):
verified = utils.verify_webhook(
request.POST.get('token'),
request.POST.get('timestamp'),
request.POST.get('signature')
)
if not verified:
return http.HttpResponseForbidden()
address = request.POST.get('recipient')
if request.POST.get('mailing-list'):
unsubscribe_model.unsubscribe_from_sequence(address)
else:
unsubscribe_model.unsubscribe_completely(address)
|
from django import http
from django.views.decorators.csrf import csrf_exempt
from mailgun import utils
import models as unsubscribe_model
@csrf_exempt
def unsubscribe_webhook(request):
verified = utils.verify_webhook(
request.POST.get('token'),
request.POST.get('timestamp'),
request.POST.get('signature')
)
if not verified:
return http.HttpResponseForbidden()
address = request.POST.get('recipient')
try:
if request.POST.get('mailing-list'):
unsubscribe_model.unsubscribe_from_sequence(address)
else:
unsubscribe_model.unsubscribe_user(address)
except:
raise
return http.HttpResponse('')
|
Return http 200 for webhooks
|
Return http 200 for webhooks
|
Python
|
mit
|
p2pu/mechanical-mooc,p2pu/mechanical-mooc,p2pu/mechanical-mooc,p2pu/mechanical-mooc
|
from django import http
+ from django.views.decorators.csrf import csrf_exempt
+
from mailgun import utils
-
import models as unsubscribe_model
+ @csrf_exempt
def unsubscribe_webhook(request):
verified = utils.verify_webhook(
request.POST.get('token'),
request.POST.get('timestamp'),
request.POST.get('signature')
)
if not verified:
return http.HttpResponseForbidden()
address = request.POST.get('recipient')
+ try:
- if request.POST.get('mailing-list'):
+ if request.POST.get('mailing-list'):
- unsubscribe_model.unsubscribe_from_sequence(address)
+ unsubscribe_model.unsubscribe_from_sequence(address)
- else:
+ else:
- unsubscribe_model.unsubscribe_completely(address)
+ unsubscribe_model.unsubscribe_user(address)
+ except:
+ raise
+
+ return http.HttpResponse('')
|
Return http 200 for webhooks
|
## Code Before:
from django import http
from mailgun import utils
import models as unsubscribe_model
def unsubscribe_webhook(request):
verified = utils.verify_webhook(
request.POST.get('token'),
request.POST.get('timestamp'),
request.POST.get('signature')
)
if not verified:
return http.HttpResponseForbidden()
address = request.POST.get('recipient')
if request.POST.get('mailing-list'):
unsubscribe_model.unsubscribe_from_sequence(address)
else:
unsubscribe_model.unsubscribe_completely(address)
## Instruction:
Return http 200 for webhooks
## Code After:
from django import http
from django.views.decorators.csrf import csrf_exempt
from mailgun import utils
import models as unsubscribe_model
@csrf_exempt
def unsubscribe_webhook(request):
verified = utils.verify_webhook(
request.POST.get('token'),
request.POST.get('timestamp'),
request.POST.get('signature')
)
if not verified:
return http.HttpResponseForbidden()
address = request.POST.get('recipient')
try:
if request.POST.get('mailing-list'):
unsubscribe_model.unsubscribe_from_sequence(address)
else:
unsubscribe_model.unsubscribe_user(address)
except:
raise
return http.HttpResponse('')
|
from django import http
+ from django.views.decorators.csrf import csrf_exempt
+
from mailgun import utils
-
import models as unsubscribe_model
+ @csrf_exempt
def unsubscribe_webhook(request):
verified = utils.verify_webhook(
request.POST.get('token'),
request.POST.get('timestamp'),
request.POST.get('signature')
)
if not verified:
return http.HttpResponseForbidden()
address = request.POST.get('recipient')
+ try:
- if request.POST.get('mailing-list'):
+ if request.POST.get('mailing-list'):
? ++++
- unsubscribe_model.unsubscribe_from_sequence(address)
+ unsubscribe_model.unsubscribe_from_sequence(address)
? ++++
- else:
+ else:
? ++++
- unsubscribe_model.unsubscribe_completely(address)
? ^^^^^ ^^^^
+ unsubscribe_model.unsubscribe_user(address)
? ++++ ^^ ^
+ except:
+ raise
+ return http.HttpResponse('')
+
|
046fe99e4e2de0503c44555287eedaedc56ef280
|
skimage/filters/tests/test_filter_import.py
|
skimage/filters/tests/test_filter_import.py
|
from warnings import catch_warnings, simplefilter
def test_filter_import():
with catch_warnings():
simplefilter('ignore')
from skimage import filter as F
assert('sobel' in dir(F))
assert any(['has been renamed' in w
for (w, _, _) in F.__warningregistry__]), F.__warningregistry__
|
from warnings import catch_warnings, simplefilter
def test_filter_import():
with catch_warnings():
simplefilter('always')
from skimage import filter as F
assert('sobel' in dir(F))
assert any(['has been renamed' in w
for (w, _, _) in F.__warningregistry__])
|
Make sure warning is raised upon import
|
Make sure warning is raised upon import
|
Python
|
bsd-3-clause
|
bennlich/scikit-image,vighneshbirodkar/scikit-image,ofgulban/scikit-image,youprofit/scikit-image,pratapvardhan/scikit-image,robintw/scikit-image,ofgulban/scikit-image,GaZ3ll3/scikit-image,Midafi/scikit-image,Britefury/scikit-image,pratapvardhan/scikit-image,ofgulban/scikit-image,Hiyorimi/scikit-image,Britefury/scikit-image,emon10005/scikit-image,jwiggins/scikit-image,Hiyorimi/scikit-image,juliusbierk/scikit-image,ClinicalGraphics/scikit-image,rjeli/scikit-image,chriscrosscutler/scikit-image,michaelpacer/scikit-image,chriscrosscutler/scikit-image,bsipocz/scikit-image,keflavich/scikit-image,rjeli/scikit-image,oew1v07/scikit-image,youprofit/scikit-image,warmspringwinds/scikit-image,Midafi/scikit-image,paalge/scikit-image,ajaybhat/scikit-image,oew1v07/scikit-image,dpshelio/scikit-image,michaelaye/scikit-image,emon10005/scikit-image,vighneshbirodkar/scikit-image,newville/scikit-image,vighneshbirodkar/scikit-image,newville/scikit-image,paalge/scikit-image,warmspringwinds/scikit-image,michaelpacer/scikit-image,juliusbierk/scikit-image,rjeli/scikit-image,michaelaye/scikit-image,paalge/scikit-image,blink1073/scikit-image,robintw/scikit-image,blink1073/scikit-image,ClinicalGraphics/scikit-image,dpshelio/scikit-image,ajaybhat/scikit-image,WarrenWeckesser/scikits-image,keflavich/scikit-image,bennlich/scikit-image,bsipocz/scikit-image,jwiggins/scikit-image,GaZ3ll3/scikit-image,WarrenWeckesser/scikits-image
|
from warnings import catch_warnings, simplefilter
def test_filter_import():
with catch_warnings():
- simplefilter('ignore')
+ simplefilter('always')
from skimage import filter as F
assert('sobel' in dir(F))
assert any(['has been renamed' in w
- for (w, _, _) in F.__warningregistry__]), F.__warningregistry__
+ for (w, _, _) in F.__warningregistry__])
|
Make sure warning is raised upon import
|
## Code Before:
from warnings import catch_warnings, simplefilter
def test_filter_import():
with catch_warnings():
simplefilter('ignore')
from skimage import filter as F
assert('sobel' in dir(F))
assert any(['has been renamed' in w
for (w, _, _) in F.__warningregistry__]), F.__warningregistry__
## Instruction:
Make sure warning is raised upon import
## Code After:
from warnings import catch_warnings, simplefilter
def test_filter_import():
with catch_warnings():
simplefilter('always')
from skimage import filter as F
assert('sobel' in dir(F))
assert any(['has been renamed' in w
for (w, _, _) in F.__warningregistry__])
|
from warnings import catch_warnings, simplefilter
def test_filter_import():
with catch_warnings():
- simplefilter('ignore')
? ^^^^^^
+ simplefilter('always')
? ^^^^^^
from skimage import filter as F
assert('sobel' in dir(F))
assert any(['has been renamed' in w
- for (w, _, _) in F.__warningregistry__]), F.__warningregistry__
? -----------------------
+ for (w, _, _) in F.__warningregistry__])
|
e6a1e9670e857119c7e6c9250849ee4edd026bad
|
tests/settings_base.py
|
tests/settings_base.py
|
import os
ROOT_URLCONF = 'tests.urls'
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'tests.app',
]
STATIC_URL = '/static/'
SECRET_KEY = 'foobar'
SITE_ID = 1234 # Needed for 1.3 compatibility
# Used to construct unique test database names to allow detox to run multiple
# versions at the same time
uid = os.getenv('UID', '')
if uid:
db_suffix = '_%s' % uid
else:
db_suffix = ''
|
import os
ROOT_URLCONF = 'tests.urls'
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'tests.app',
]
STATIC_URL = '/static/'
SECRET_KEY = 'foobar'
SITE_ID = 1234 # Needed for 1.3 compatibility
# Used to construct unique test database names to allow detox to run multiple
# versions at the same time
uid = os.getenv('UID', '')
if uid:
db_suffix = '_%s' % uid
else:
db_suffix = ''
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
|
Make sure AuthenticationMiddleware is defined in settings during tests.
|
Make sure AuthenticationMiddleware is defined in settings
during tests.
|
Python
|
bsd-3-clause
|
ktosiek/pytest-django,RonnyPfannschmidt/pytest_django,pombredanne/pytest_django,hoh/pytest-django,davidszotten/pytest-django,ojake/pytest-django,bforchhammer/pytest-django,reincubate/pytest-django,pelme/pytest-django,thedrow/pytest-django,aptivate/pytest-django,tomviner/pytest-django,felixonmars/pytest-django
|
import os
ROOT_URLCONF = 'tests.urls'
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'tests.app',
]
STATIC_URL = '/static/'
SECRET_KEY = 'foobar'
SITE_ID = 1234 # Needed for 1.3 compatibility
# Used to construct unique test database names to allow detox to run multiple
# versions at the same time
uid = os.getenv('UID', '')
if uid:
db_suffix = '_%s' % uid
else:
db_suffix = ''
+ MIDDLEWARE_CLASSES = (
+ 'django.contrib.sessions.middleware.SessionMiddleware',
+ 'django.middleware.common.CommonMiddleware',
+ 'django.middleware.csrf.CsrfViewMiddleware',
+ 'django.contrib.auth.middleware.AuthenticationMiddleware',
+ 'django.contrib.messages.middleware.MessageMiddleware',
+ 'django.middleware.clickjacking.XFrameOptionsMiddleware',
+ )
+
|
Make sure AuthenticationMiddleware is defined in settings during tests.
|
## Code Before:
import os
ROOT_URLCONF = 'tests.urls'
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'tests.app',
]
STATIC_URL = '/static/'
SECRET_KEY = 'foobar'
SITE_ID = 1234 # Needed for 1.3 compatibility
# Used to construct unique test database names to allow detox to run multiple
# versions at the same time
uid = os.getenv('UID', '')
if uid:
db_suffix = '_%s' % uid
else:
db_suffix = ''
## Instruction:
Make sure AuthenticationMiddleware is defined in settings during tests.
## Code After:
import os
ROOT_URLCONF = 'tests.urls'
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'tests.app',
]
STATIC_URL = '/static/'
SECRET_KEY = 'foobar'
SITE_ID = 1234 # Needed for 1.3 compatibility
# Used to construct unique test database names to allow detox to run multiple
# versions at the same time
uid = os.getenv('UID', '')
if uid:
db_suffix = '_%s' % uid
else:
db_suffix = ''
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
|
import os
ROOT_URLCONF = 'tests.urls'
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'tests.app',
]
STATIC_URL = '/static/'
SECRET_KEY = 'foobar'
SITE_ID = 1234 # Needed for 1.3 compatibility
# Used to construct unique test database names to allow detox to run multiple
# versions at the same time
uid = os.getenv('UID', '')
if uid:
db_suffix = '_%s' % uid
else:
db_suffix = ''
+
+ MIDDLEWARE_CLASSES = (
+ 'django.contrib.sessions.middleware.SessionMiddleware',
+ 'django.middleware.common.CommonMiddleware',
+ 'django.middleware.csrf.CsrfViewMiddleware',
+ 'django.contrib.auth.middleware.AuthenticationMiddleware',
+ 'django.contrib.messages.middleware.MessageMiddleware',
+ 'django.middleware.clickjacking.XFrameOptionsMiddleware',
+ )
|
f3f210b523f1733e48bb6316ecbb15e198dd503c
|
examples/field_example.py
|
examples/field_example.py
|
import graphene
class Person(graphene.Interface):
name = graphene.String()
age = graphene.ID()
class Patron(Person):
id = graphene.ID()
class Query(graphene.ObjectType):
patron = graphene.Field(Patron)
def resolve_patron(self, args, info):
return Patron(id=1, name='Demo')
schema = graphene.Schema(query=Query)
query = '''
query something{
patron {
id
name
}
}
'''
result = schema.execute(query)
# Print the result
print(result.data['patron'])
|
import graphene
class Patron(graphene.ObjectType):
id = graphene.ID()
name = graphene.String()
age = graphene.ID()
class Query(graphene.ObjectType):
patron = graphene.Field(Patron)
def resolve_patron(self, args, info):
return Patron(id=1, name='Demo')
schema = graphene.Schema(query=Query)
query = '''
query something{
patron {
id
name
}
}
'''
result = schema.execute(query)
print(result.data['patron'])
|
Modify the field example to remove the use of interface
|
Modify the field example to remove the use of interface
|
Python
|
mit
|
sjhewitt/graphene,Globegitter/graphene,Globegitter/graphene,graphql-python/graphene,graphql-python/graphene,sjhewitt/graphene
|
import graphene
- class Person(graphene.Interface):
+ class Patron(graphene.ObjectType):
+ id = graphene.ID()
name = graphene.String()
age = graphene.ID()
-
-
- class Patron(Person):
- id = graphene.ID()
class Query(graphene.ObjectType):
patron = graphene.Field(Patron)
def resolve_patron(self, args, info):
return Patron(id=1, name='Demo')
schema = graphene.Schema(query=Query)
query = '''
query something{
patron {
id
name
}
}
'''
result = schema.execute(query)
- # Print the result
print(result.data['patron'])
|
Modify the field example to remove the use of interface
|
## Code Before:
import graphene
class Person(graphene.Interface):
name = graphene.String()
age = graphene.ID()
class Patron(Person):
id = graphene.ID()
class Query(graphene.ObjectType):
patron = graphene.Field(Patron)
def resolve_patron(self, args, info):
return Patron(id=1, name='Demo')
schema = graphene.Schema(query=Query)
query = '''
query something{
patron {
id
name
}
}
'''
result = schema.execute(query)
# Print the result
print(result.data['patron'])
## Instruction:
Modify the field example to remove the use of interface
## Code After:
import graphene
class Patron(graphene.ObjectType):
id = graphene.ID()
name = graphene.String()
age = graphene.ID()
class Query(graphene.ObjectType):
patron = graphene.Field(Patron)
def resolve_patron(self, args, info):
return Patron(id=1, name='Demo')
schema = graphene.Schema(query=Query)
query = '''
query something{
patron {
id
name
}
}
'''
result = schema.execute(query)
print(result.data['patron'])
|
import graphene
- class Person(graphene.Interface):
+ class Patron(graphene.ObjectType):
+ id = graphene.ID()
name = graphene.String()
age = graphene.ID()
-
-
- class Patron(Person):
- id = graphene.ID()
class Query(graphene.ObjectType):
patron = graphene.Field(Patron)
def resolve_patron(self, args, info):
return Patron(id=1, name='Demo')
schema = graphene.Schema(query=Query)
query = '''
query something{
patron {
id
name
}
}
'''
result = schema.execute(query)
- # Print the result
print(result.data['patron'])
|
ac664513eb1e99bc7aad9dda70a155e25fcff084
|
tests/services/shop/order/test_models_order_payment_state.py
|
tests/services/shop/order/test_models_order_payment_state.py
|
from byceps.services.shop.order.models.order import PaymentState
from testfixtures.shop_order import create_order
from testfixtures.user import create_user
def test_is_open():
payment_state = PaymentState.open
order = create_order_with_payment_state(payment_state)
assert order.payment_state == payment_state
assert order.is_open == True
assert order.is_canceled == False
assert order.is_paid == False
def test_is_open():
payment_state = PaymentState.canceled
order = create_order_with_payment_state(payment_state)
assert order.payment_state == payment_state
assert order.is_open == False
assert order.is_canceled == True
assert order.is_paid == False
def test_is_open():
payment_state = PaymentState.paid
order = create_order_with_payment_state(payment_state)
assert order.payment_state == payment_state
assert order.is_open == False
assert order.is_canceled == False
assert order.is_paid == True
# helpers
def create_order_with_payment_state(payment_state):
user = create_user(42)
party_id = 'acme-party-2016'
placed_by = user
order = create_order(party_id, placed_by)
order.payment_state = payment_state
return order
|
from byceps.services.shop.order.models.order import PaymentState
from testfixtures.shop_order import create_order
from testfixtures.user import create_user
def test_is_open():
payment_state = PaymentState.open
order = create_order_with_payment_state(payment_state)
assert order.payment_state == payment_state
assert order.is_open == True
assert order.is_canceled == False
assert order.is_paid == False
def test_is_canceled():
payment_state = PaymentState.canceled
order = create_order_with_payment_state(payment_state)
assert order.payment_state == payment_state
assert order.is_open == False
assert order.is_canceled == True
assert order.is_paid == False
def test_is_paid():
payment_state = PaymentState.paid
order = create_order_with_payment_state(payment_state)
assert order.payment_state == payment_state
assert order.is_open == False
assert order.is_canceled == False
assert order.is_paid == True
# helpers
def create_order_with_payment_state(payment_state):
user = create_user(42)
party_id = 'acme-party-2016'
placed_by = user
order = create_order(party_id, placed_by)
order.payment_state = payment_state
return order
|
Fix overshadowed tests by giving test functions unique names
|
Fix overshadowed tests by giving test functions unique names
|
Python
|
bsd-3-clause
|
homeworkprod/byceps,m-ober/byceps,homeworkprod/byceps,m-ober/byceps,m-ober/byceps,homeworkprod/byceps
|
from byceps.services.shop.order.models.order import PaymentState
from testfixtures.shop_order import create_order
from testfixtures.user import create_user
def test_is_open():
payment_state = PaymentState.open
order = create_order_with_payment_state(payment_state)
assert order.payment_state == payment_state
assert order.is_open == True
assert order.is_canceled == False
assert order.is_paid == False
- def test_is_open():
+ def test_is_canceled():
payment_state = PaymentState.canceled
order = create_order_with_payment_state(payment_state)
assert order.payment_state == payment_state
assert order.is_open == False
assert order.is_canceled == True
assert order.is_paid == False
- def test_is_open():
+ def test_is_paid():
payment_state = PaymentState.paid
order = create_order_with_payment_state(payment_state)
assert order.payment_state == payment_state
assert order.is_open == False
assert order.is_canceled == False
assert order.is_paid == True
# helpers
def create_order_with_payment_state(payment_state):
user = create_user(42)
party_id = 'acme-party-2016'
placed_by = user
order = create_order(party_id, placed_by)
order.payment_state = payment_state
return order
|
Fix overshadowed tests by giving test functions unique names
|
## Code Before:
from byceps.services.shop.order.models.order import PaymentState
from testfixtures.shop_order import create_order
from testfixtures.user import create_user
def test_is_open():
payment_state = PaymentState.open
order = create_order_with_payment_state(payment_state)
assert order.payment_state == payment_state
assert order.is_open == True
assert order.is_canceled == False
assert order.is_paid == False
def test_is_open():
payment_state = PaymentState.canceled
order = create_order_with_payment_state(payment_state)
assert order.payment_state == payment_state
assert order.is_open == False
assert order.is_canceled == True
assert order.is_paid == False
def test_is_open():
payment_state = PaymentState.paid
order = create_order_with_payment_state(payment_state)
assert order.payment_state == payment_state
assert order.is_open == False
assert order.is_canceled == False
assert order.is_paid == True
# helpers
def create_order_with_payment_state(payment_state):
user = create_user(42)
party_id = 'acme-party-2016'
placed_by = user
order = create_order(party_id, placed_by)
order.payment_state = payment_state
return order
## Instruction:
Fix overshadowed tests by giving test functions unique names
## Code After:
from byceps.services.shop.order.models.order import PaymentState
from testfixtures.shop_order import create_order
from testfixtures.user import create_user
def test_is_open():
payment_state = PaymentState.open
order = create_order_with_payment_state(payment_state)
assert order.payment_state == payment_state
assert order.is_open == True
assert order.is_canceled == False
assert order.is_paid == False
def test_is_canceled():
payment_state = PaymentState.canceled
order = create_order_with_payment_state(payment_state)
assert order.payment_state == payment_state
assert order.is_open == False
assert order.is_canceled == True
assert order.is_paid == False
def test_is_paid():
payment_state = PaymentState.paid
order = create_order_with_payment_state(payment_state)
assert order.payment_state == payment_state
assert order.is_open == False
assert order.is_canceled == False
assert order.is_paid == True
# helpers
def create_order_with_payment_state(payment_state):
user = create_user(42)
party_id = 'acme-party-2016'
placed_by = user
order = create_order(party_id, placed_by)
order.payment_state = payment_state
return order
|
from byceps.services.shop.order.models.order import PaymentState
from testfixtures.shop_order import create_order
from testfixtures.user import create_user
def test_is_open():
payment_state = PaymentState.open
order = create_order_with_payment_state(payment_state)
assert order.payment_state == payment_state
assert order.is_open == True
assert order.is_canceled == False
assert order.is_paid == False
- def test_is_open():
? ^^ ^
+ def test_is_canceled():
? ^^^^ ^^^
payment_state = PaymentState.canceled
order = create_order_with_payment_state(payment_state)
assert order.payment_state == payment_state
assert order.is_open == False
assert order.is_canceled == True
assert order.is_paid == False
- def test_is_open():
? - ^^
+ def test_is_paid():
? ^^^
payment_state = PaymentState.paid
order = create_order_with_payment_state(payment_state)
assert order.payment_state == payment_state
assert order.is_open == False
assert order.is_canceled == False
assert order.is_paid == True
# helpers
def create_order_with_payment_state(payment_state):
user = create_user(42)
party_id = 'acme-party-2016'
placed_by = user
order = create_order(party_id, placed_by)
order.payment_state = payment_state
return order
|
d3992b1677a5186b8b4072c9fdf50e4cb44dc5ef
|
base_accounts/models.py
|
base_accounts/models.py
|
from django.db import models
from django.contrib.auth.models import AbstractUser
from django.utils.translation import ugettext_lazy as _
from django.template.defaultfilters import slugify
class BaseUser(AbstractUser):
slug = models.SlugField(_('slug'), max_length=255)
name = models.CharField(_('name'), max_length=255, blank=True)
first_login = models.BooleanField(_('first login'), default=True)
image = models.ImageField(_('image'), blank=True, null=True, upload_to="images/avatars/%Y/%m/%d", max_length=255)
class Meta:
abstract = True
def save(self, *args, **kwargs):
if not self.id:
self.slug = slugify(self.username)
if not self.name.strip():
self.name = "%s %s" % (self.first_name, self.last_name)
super(BaseUser, self).save(*args, **kwargs)
def get_display_name(self):
return self.name or self.username
|
from django.db import models
from django.contrib.auth.models import AbstractUser
from django.utils.translation import ugettext_lazy as _
from django.template.defaultfilters import slugify
class BaseUser(AbstractUser):
slug = models.SlugField(_('slug'), max_length=255)
name = models.CharField(_('name'), max_length=255, blank=True)
first_login = models.BooleanField(_('first login'), default=True)
image = models.ImageField(_('image'), blank=True, null=True, upload_to="images/avatars/%Y/%m/%d", max_length=255)
class Meta:
abstract = True
def save(self, *args, **kwargs):
# Create slug from username. Altough field is not unique at database
# level, it will be as long as username stays unique as well.
if not self.id:
self.slug = slugify(self.username)
# Assign username as name if empty
if not self.name.strip():
if not self.first_name:
self.first_name = self.username
name = "%s %s" % (self.first_name, self.last_name)
self.name = name.strip()
super(BaseUser, self).save(*args, **kwargs)
def get_display_name(self):
return self.name or self.username
|
Fix name field for empty values
|
Fix name field for empty values
|
Python
|
bsd-3-clause
|
Nomadblue/django-nomad-base-accounts,Nomadblue/django-nomad-base-accounts
|
from django.db import models
from django.contrib.auth.models import AbstractUser
from django.utils.translation import ugettext_lazy as _
from django.template.defaultfilters import slugify
class BaseUser(AbstractUser):
slug = models.SlugField(_('slug'), max_length=255)
name = models.CharField(_('name'), max_length=255, blank=True)
first_login = models.BooleanField(_('first login'), default=True)
image = models.ImageField(_('image'), blank=True, null=True, upload_to="images/avatars/%Y/%m/%d", max_length=255)
class Meta:
abstract = True
def save(self, *args, **kwargs):
+
+ # Create slug from username. Altough field is not unique at database
+ # level, it will be as long as username stays unique as well.
if not self.id:
self.slug = slugify(self.username)
+
+ # Assign username as name if empty
if not self.name.strip():
+ if not self.first_name:
+ self.first_name = self.username
- self.name = "%s %s" % (self.first_name, self.last_name)
+ name = "%s %s" % (self.first_name, self.last_name)
+ self.name = name.strip()
+
super(BaseUser, self).save(*args, **kwargs)
def get_display_name(self):
return self.name or self.username
|
Fix name field for empty values
|
## Code Before:
from django.db import models
from django.contrib.auth.models import AbstractUser
from django.utils.translation import ugettext_lazy as _
from django.template.defaultfilters import slugify
class BaseUser(AbstractUser):
slug = models.SlugField(_('slug'), max_length=255)
name = models.CharField(_('name'), max_length=255, blank=True)
first_login = models.BooleanField(_('first login'), default=True)
image = models.ImageField(_('image'), blank=True, null=True, upload_to="images/avatars/%Y/%m/%d", max_length=255)
class Meta:
abstract = True
def save(self, *args, **kwargs):
if not self.id:
self.slug = slugify(self.username)
if not self.name.strip():
self.name = "%s %s" % (self.first_name, self.last_name)
super(BaseUser, self).save(*args, **kwargs)
def get_display_name(self):
return self.name or self.username
## Instruction:
Fix name field for empty values
## Code After:
from django.db import models
from django.contrib.auth.models import AbstractUser
from django.utils.translation import ugettext_lazy as _
from django.template.defaultfilters import slugify
class BaseUser(AbstractUser):
slug = models.SlugField(_('slug'), max_length=255)
name = models.CharField(_('name'), max_length=255, blank=True)
first_login = models.BooleanField(_('first login'), default=True)
image = models.ImageField(_('image'), blank=True, null=True, upload_to="images/avatars/%Y/%m/%d", max_length=255)
class Meta:
abstract = True
def save(self, *args, **kwargs):
# Create slug from username. Altough field is not unique at database
# level, it will be as long as username stays unique as well.
if not self.id:
self.slug = slugify(self.username)
# Assign username as name if empty
if not self.name.strip():
if not self.first_name:
self.first_name = self.username
name = "%s %s" % (self.first_name, self.last_name)
self.name = name.strip()
super(BaseUser, self).save(*args, **kwargs)
def get_display_name(self):
return self.name or self.username
|
from django.db import models
from django.contrib.auth.models import AbstractUser
from django.utils.translation import ugettext_lazy as _
from django.template.defaultfilters import slugify
class BaseUser(AbstractUser):
slug = models.SlugField(_('slug'), max_length=255)
name = models.CharField(_('name'), max_length=255, blank=True)
first_login = models.BooleanField(_('first login'), default=True)
image = models.ImageField(_('image'), blank=True, null=True, upload_to="images/avatars/%Y/%m/%d", max_length=255)
class Meta:
abstract = True
def save(self, *args, **kwargs):
+
+ # Create slug from username. Altough field is not unique at database
+ # level, it will be as long as username stays unique as well.
if not self.id:
self.slug = slugify(self.username)
+
+ # Assign username as name if empty
if not self.name.strip():
+ if not self.first_name:
+ self.first_name = self.username
- self.name = "%s %s" % (self.first_name, self.last_name)
? -----
+ name = "%s %s" % (self.first_name, self.last_name)
+ self.name = name.strip()
+
super(BaseUser, self).save(*args, **kwargs)
def get_display_name(self):
return self.name or self.username
|
1d043a9fa2140992435bc5d6583601464d96f5b0
|
wafer/schedule/renderers.py
|
wafer/schedule/renderers.py
|
from django_medusa.renderers import StaticSiteRenderer
class ScheduleRenderer(StaticSiteRenderer):
def get_paths(self):
paths = ["/schedule/", ]
return paths
renderers = [ScheduleRenderer, ]
|
from django_medusa.renderers import StaticSiteRenderer
from wafer.schedule.models import Venue
class ScheduleRenderer(StaticSiteRenderer):
def get_paths(self):
paths = ["/schedule/", ]
# Add the venues
items = Venue.objects.all()
for item in items:
paths.append(item.get_absolute_url())
return paths
renderers = [ScheduleRenderer, ]
|
Add venues to site export
|
Add venues to site export
|
Python
|
isc
|
CarlFK/wafer,CarlFK/wafer,CTPUG/wafer,CTPUG/wafer,CTPUG/wafer,CTPUG/wafer,CarlFK/wafer,CarlFK/wafer
|
from django_medusa.renderers import StaticSiteRenderer
+ from wafer.schedule.models import Venue
class ScheduleRenderer(StaticSiteRenderer):
def get_paths(self):
paths = ["/schedule/", ]
+
+ # Add the venues
+ items = Venue.objects.all()
+ for item in items:
+ paths.append(item.get_absolute_url())
return paths
renderers = [ScheduleRenderer, ]
|
Add venues to site export
|
## Code Before:
from django_medusa.renderers import StaticSiteRenderer
class ScheduleRenderer(StaticSiteRenderer):
def get_paths(self):
paths = ["/schedule/", ]
return paths
renderers = [ScheduleRenderer, ]
## Instruction:
Add venues to site export
## Code After:
from django_medusa.renderers import StaticSiteRenderer
from wafer.schedule.models import Venue
class ScheduleRenderer(StaticSiteRenderer):
def get_paths(self):
paths = ["/schedule/", ]
# Add the venues
items = Venue.objects.all()
for item in items:
paths.append(item.get_absolute_url())
return paths
renderers = [ScheduleRenderer, ]
|
from django_medusa.renderers import StaticSiteRenderer
+ from wafer.schedule.models import Venue
class ScheduleRenderer(StaticSiteRenderer):
def get_paths(self):
paths = ["/schedule/", ]
+
+ # Add the venues
+ items = Venue.objects.all()
+ for item in items:
+ paths.append(item.get_absolute_url())
return paths
renderers = [ScheduleRenderer, ]
|
d20347f4a57bb195291ebc79fc1ca0858b3f1d65
|
PyLunch/pylunch/specials/models.py
|
PyLunch/pylunch/specials/models.py
|
from django.db import models
MAX_PRICE_FORMAT = {
'max_digits': 5,
'decimal_places': 2
}
SPECIAL_TYPES = (
('LU', 'Lunch'),
('BR', 'Breakfast'),
('DI', 'Dinner'),
)
MAX_RESTAURANT_NAME_LENGTH = 50
MAX_DESCRIPTION_LENGTH = 500
class Restaurant(models.Model):
name = models.CharField(max_length=MAX_RESTAURANT_NAME_LENGTH)
description = models.CharField(max_length=MAX_DESCRIPTION_LENGTH)
class Special(models.Model):
restaurant = models.ForeignKey(Restaurant)
description = models.CharField(max_length=MAX_DESCRIPTION_LENGTH)
special_type = models.CharField(max_length=2, choices=SPECIAL_TYPES)
special_price = models.DecimalField(**MAX_PRICE_FORMAT)
normal_price = models.DecimalField(**MAX_PRICE_FORMAT)
|
from django.db import models
MAX_PRICE_FORMAT = {
'max_digits': 5,
'decimal_places': 2
}
SPECIAL_TYPES = (
('LU', 'Lunch'),
('BR', 'Breakfast'),
('DI', 'Dinner'),
)
MAX_RESTAURANT_NAME_LENGTH = 50
MAX_DESCRIPTION_LENGTH = 500
class Restaurant(models.Model):
name = models.CharField(max_length=MAX_RESTAURANT_NAME_LENGTH)
description = models.CharField(max_length=MAX_DESCRIPTION_LENGTH)
def __unicode__(self):
return self.name
class Special(models.Model):
restaurant = models.ForeignKey(Restaurant)
description = models.CharField(max_length=MAX_DESCRIPTION_LENGTH)
special_type = models.CharField(max_length=2, choices=SPECIAL_TYPES)
special_price = models.DecimalField(**MAX_PRICE_FORMAT)
normal_price = models.DecimalField(**MAX_PRICE_FORMAT)
valid_from = models.DateField()
valid_until = models.DateField()
def __unicode__(self):
return "%s: %s" % (self.restaurant.name, self.description)
|
Add fields to Special model
|
Add fields to Special model
|
Python
|
unlicense
|
wiehan-a/pylunch
|
from django.db import models
MAX_PRICE_FORMAT = {
'max_digits': 5,
'decimal_places': 2
}
SPECIAL_TYPES = (
('LU', 'Lunch'),
('BR', 'Breakfast'),
('DI', 'Dinner'),
)
MAX_RESTAURANT_NAME_LENGTH = 50
MAX_DESCRIPTION_LENGTH = 500
class Restaurant(models.Model):
name = models.CharField(max_length=MAX_RESTAURANT_NAME_LENGTH)
description = models.CharField(max_length=MAX_DESCRIPTION_LENGTH)
+ def __unicode__(self):
+ return self.name
+
class Special(models.Model):
restaurant = models.ForeignKey(Restaurant)
description = models.CharField(max_length=MAX_DESCRIPTION_LENGTH)
special_type = models.CharField(max_length=2, choices=SPECIAL_TYPES)
special_price = models.DecimalField(**MAX_PRICE_FORMAT)
normal_price = models.DecimalField(**MAX_PRICE_FORMAT)
+
+ valid_from = models.DateField()
+ valid_until = models.DateField()
+
+ def __unicode__(self):
+ return "%s: %s" % (self.restaurant.name, self.description)
|
Add fields to Special model
|
## Code Before:
from django.db import models
MAX_PRICE_FORMAT = {
'max_digits': 5,
'decimal_places': 2
}
SPECIAL_TYPES = (
('LU', 'Lunch'),
('BR', 'Breakfast'),
('DI', 'Dinner'),
)
MAX_RESTAURANT_NAME_LENGTH = 50
MAX_DESCRIPTION_LENGTH = 500
class Restaurant(models.Model):
name = models.CharField(max_length=MAX_RESTAURANT_NAME_LENGTH)
description = models.CharField(max_length=MAX_DESCRIPTION_LENGTH)
class Special(models.Model):
restaurant = models.ForeignKey(Restaurant)
description = models.CharField(max_length=MAX_DESCRIPTION_LENGTH)
special_type = models.CharField(max_length=2, choices=SPECIAL_TYPES)
special_price = models.DecimalField(**MAX_PRICE_FORMAT)
normal_price = models.DecimalField(**MAX_PRICE_FORMAT)
## Instruction:
Add fields to Special model
## Code After:
from django.db import models
MAX_PRICE_FORMAT = {
'max_digits': 5,
'decimal_places': 2
}
SPECIAL_TYPES = (
('LU', 'Lunch'),
('BR', 'Breakfast'),
('DI', 'Dinner'),
)
MAX_RESTAURANT_NAME_LENGTH = 50
MAX_DESCRIPTION_LENGTH = 500
class Restaurant(models.Model):
name = models.CharField(max_length=MAX_RESTAURANT_NAME_LENGTH)
description = models.CharField(max_length=MAX_DESCRIPTION_LENGTH)
def __unicode__(self):
return self.name
class Special(models.Model):
restaurant = models.ForeignKey(Restaurant)
description = models.CharField(max_length=MAX_DESCRIPTION_LENGTH)
special_type = models.CharField(max_length=2, choices=SPECIAL_TYPES)
special_price = models.DecimalField(**MAX_PRICE_FORMAT)
normal_price = models.DecimalField(**MAX_PRICE_FORMAT)
valid_from = models.DateField()
valid_until = models.DateField()
def __unicode__(self):
return "%s: %s" % (self.restaurant.name, self.description)
|
from django.db import models
MAX_PRICE_FORMAT = {
'max_digits': 5,
'decimal_places': 2
}
SPECIAL_TYPES = (
('LU', 'Lunch'),
('BR', 'Breakfast'),
('DI', 'Dinner'),
)
MAX_RESTAURANT_NAME_LENGTH = 50
MAX_DESCRIPTION_LENGTH = 500
class Restaurant(models.Model):
name = models.CharField(max_length=MAX_RESTAURANT_NAME_LENGTH)
description = models.CharField(max_length=MAX_DESCRIPTION_LENGTH)
+ def __unicode__(self):
+ return self.name
+
class Special(models.Model):
restaurant = models.ForeignKey(Restaurant)
description = models.CharField(max_length=MAX_DESCRIPTION_LENGTH)
special_type = models.CharField(max_length=2, choices=SPECIAL_TYPES)
special_price = models.DecimalField(**MAX_PRICE_FORMAT)
normal_price = models.DecimalField(**MAX_PRICE_FORMAT)
+
+ valid_from = models.DateField()
+ valid_until = models.DateField()
+
+ def __unicode__(self):
+ return "%s: %s" % (self.restaurant.name, self.description)
|
b674f76c93b5208ad302fcba2d43b8c30bbaf14c
|
main.py
|
main.py
|
from altitude import run
run.run("27279", "C://Program Files (x86)/Altitude/servers/command.txt", "C://Program Files (x86)/Altitude/servers/log.txt",
"C://Program Files (x86)/Altitude/servers/log_old.txt", "C://Program Files (x86)/Altitude/servers/logs_archive.txt")
|
from altitude import run
run.run("27279", "/home/user/altitude-files/servers/command.txt", "/home/user/altitude-files/servers/log.txt",
"/home/user/altitude-files/servers/log_old.txt", "/home/user/altitude-files/servers/logs_archive.txt")
|
Put the directories back as they were for the server
|
Put the directories back as they were for the server
|
Python
|
mit
|
StamKaly/altitude-mod,StamKaly/altitude-mod
|
from altitude import run
- run.run("27279", "C://Program Files (x86)/Altitude/servers/command.txt", "C://Program Files (x86)/Altitude/servers/log.txt",
- "C://Program Files (x86)/Altitude/servers/log_old.txt", "C://Program Files (x86)/Altitude/servers/logs_archive.txt")
+ run.run("27279", "/home/user/altitude-files/servers/command.txt", "/home/user/altitude-files/servers/log.txt",
+ "/home/user/altitude-files/servers/log_old.txt", "/home/user/altitude-files/servers/logs_archive.txt")
|
Put the directories back as they were for the server
|
## Code Before:
from altitude import run
run.run("27279", "C://Program Files (x86)/Altitude/servers/command.txt", "C://Program Files (x86)/Altitude/servers/log.txt",
"C://Program Files (x86)/Altitude/servers/log_old.txt", "C://Program Files (x86)/Altitude/servers/logs_archive.txt")
## Instruction:
Put the directories back as they were for the server
## Code After:
from altitude import run
run.run("27279", "/home/user/altitude-files/servers/command.txt", "/home/user/altitude-files/servers/log.txt",
"/home/user/altitude-files/servers/log_old.txt", "/home/user/altitude-files/servers/logs_archive.txt")
|
from altitude import run
- run.run("27279", "C://Program Files (x86)/Altitude/servers/command.txt", "C://Program Files (x86)/Altitude/servers/log.txt",
- "C://Program Files (x86)/Altitude/servers/log_old.txt", "C://Program Files (x86)/Altitude/servers/logs_archive.txt")
+ run.run("27279", "/home/user/altitude-files/servers/command.txt", "/home/user/altitude-files/servers/log.txt",
+ "/home/user/altitude-files/servers/log_old.txt", "/home/user/altitude-files/servers/logs_archive.txt")
|
c767ee0b4392c519335a6055f64bbbb5a500e997
|
api_tests/base/test_pagination.py
|
api_tests/base/test_pagination.py
|
from nose.tools import * # flake8: noqa
from tests.base import ApiTestCase
from api.base.pagination import MaxSizePagination
class TestMaxPagination(ApiTestCase):
def test_no_query_param_alters_page_size(self):
assert_is_none(MaxSizePagination.page_size_query_param)
|
from nose.tools import * # flake8: noqa
from tests.base import ApiTestCase
from api.base.pagination import MaxSizePagination
class TestMaxPagination(ApiTestCase):
def test_no_query_param_alters_page_size(self):
assert MaxSizePagination.page_size_query_param is None, 'Adding variable page sizes to the paginator ' +\
'requires tests to ensure that you can\'t request more than the class\'s maximum number of values.'
|
Add error message for future breakers-of-tests
|
Add error message for future breakers-of-tests
|
Python
|
apache-2.0
|
HalcyonChimera/osf.io,DanielSBrown/osf.io,HalcyonChimera/osf.io,TomBaxter/osf.io,crcresearch/osf.io,crcresearch/osf.io,alexschiller/osf.io,hmoco/osf.io,rdhyee/osf.io,caneruguz/osf.io,SSJohns/osf.io,laurenrevere/osf.io,icereval/osf.io,sloria/osf.io,emetsger/osf.io,felliott/osf.io,HalcyonChimera/osf.io,mattclark/osf.io,leb2dg/osf.io,cwisecarver/osf.io,chrisseto/osf.io,icereval/osf.io,SSJohns/osf.io,binoculars/osf.io,wearpants/osf.io,emetsger/osf.io,monikagrabowska/osf.io,amyshi188/osf.io,sloria/osf.io,caseyrollins/osf.io,monikagrabowska/osf.io,pattisdr/osf.io,crcresearch/osf.io,chrisseto/osf.io,hmoco/osf.io,saradbowman/osf.io,mfraezz/osf.io,alexschiller/osf.io,mluo613/osf.io,wearpants/osf.io,baylee-d/osf.io,cslzchen/osf.io,mfraezz/osf.io,mfraezz/osf.io,Nesiehr/osf.io,felliott/osf.io,acshi/osf.io,samchrisinger/osf.io,SSJohns/osf.io,pattisdr/osf.io,laurenrevere/osf.io,aaxelb/osf.io,caneruguz/osf.io,baylee-d/osf.io,monikagrabowska/osf.io,CenterForOpenScience/osf.io,adlius/osf.io,hmoco/osf.io,Johnetordoff/osf.io,acshi/osf.io,baylee-d/osf.io,CenterForOpenScience/osf.io,caseyrollins/osf.io,samchrisinger/osf.io,acshi/osf.io,laurenrevere/osf.io,chennan47/osf.io,amyshi188/osf.io,Nesiehr/osf.io,DanielSBrown/osf.io,adlius/osf.io,rdhyee/osf.io,aaxelb/osf.io,pattisdr/osf.io,samchrisinger/osf.io,mluo613/osf.io,HalcyonChimera/osf.io,alexschiller/osf.io,rdhyee/osf.io,chrisseto/osf.io,monikagrabowska/osf.io,leb2dg/osf.io,alexschiller/osf.io,erinspace/osf.io,felliott/osf.io,mluo613/osf.io,brianjgeiger/osf.io,cwisecarver/osf.io,monikagrabowska/osf.io,aaxelb/osf.io,Johnetordoff/osf.io,amyshi188/osf.io,CenterForOpenScience/osf.io,emetsger/osf.io,sloria/osf.io,DanielSBrown/osf.io,leb2dg/osf.io,acshi/osf.io,chrisseto/osf.io,TomBaxter/osf.io,cwisecarver/osf.io,binoculars/osf.io,mattclark/osf.io,chennan47/osf.io,rdhyee/osf.io,caneruguz/osf.io,chennan47/osf.io,adlius/osf.io,aaxelb/osf.io,saradbowman/osf.io,Johnetordoff/osf.io,mluo613/osf.io,cslzchen/osf.io,Nesiehr/osf.io,erinspace/osf.io,Johnetordoff/osf.io,samchrisinger/osf.io,binoculars/osf.io,icereval/osf.io,adlius/osf.io,TomBaxter/osf.io,brianjgeiger/osf.io,hmoco/osf.io,amyshi188/osf.io,erinspace/osf.io,caseyrollins/osf.io,acshi/osf.io,emetsger/osf.io,felliott/osf.io,cslzchen/osf.io,mluo613/osf.io,brianjgeiger/osf.io,leb2dg/osf.io,brianjgeiger/osf.io,cwisecarver/osf.io,mfraezz/osf.io,Nesiehr/osf.io,alexschiller/osf.io,DanielSBrown/osf.io,CenterForOpenScience/osf.io,cslzchen/osf.io,mattclark/osf.io,caneruguz/osf.io,wearpants/osf.io,wearpants/osf.io,SSJohns/osf.io
|
from nose.tools import * # flake8: noqa
from tests.base import ApiTestCase
from api.base.pagination import MaxSizePagination
class TestMaxPagination(ApiTestCase):
def test_no_query_param_alters_page_size(self):
- assert_is_none(MaxSizePagination.page_size_query_param)
+ assert MaxSizePagination.page_size_query_param is None, 'Adding variable page sizes to the paginator ' +\
+ 'requires tests to ensure that you can\'t request more than the class\'s maximum number of values.'
|
Add error message for future breakers-of-tests
|
## Code Before:
from nose.tools import * # flake8: noqa
from tests.base import ApiTestCase
from api.base.pagination import MaxSizePagination
class TestMaxPagination(ApiTestCase):
def test_no_query_param_alters_page_size(self):
assert_is_none(MaxSizePagination.page_size_query_param)
## Instruction:
Add error message for future breakers-of-tests
## Code After:
from nose.tools import * # flake8: noqa
from tests.base import ApiTestCase
from api.base.pagination import MaxSizePagination
class TestMaxPagination(ApiTestCase):
def test_no_query_param_alters_page_size(self):
assert MaxSizePagination.page_size_query_param is None, 'Adding variable page sizes to the paginator ' +\
'requires tests to ensure that you can\'t request more than the class\'s maximum number of values.'
|
from nose.tools import * # flake8: noqa
from tests.base import ApiTestCase
from api.base.pagination import MaxSizePagination
class TestMaxPagination(ApiTestCase):
def test_no_query_param_alters_page_size(self):
- assert_is_none(MaxSizePagination.page_size_query_param)
+ assert MaxSizePagination.page_size_query_param is None, 'Adding variable page sizes to the paginator ' +\
+ 'requires tests to ensure that you can\'t request more than the class\'s maximum number of values.'
|
47bf5160010d0975297d39b200492270a5279e81
|
common/lib/xmodule/xmodule/discussion_module.py
|
common/lib/xmodule/xmodule/discussion_module.py
|
from lxml import etree
from xmodule.x_module import XModule
from xmodule.raw_module import RawDescriptor
import comment_client
import json
class DiscussionModule(XModule):
def get_html(self):
context = {
'discussion_id': self.discussion_id,
}
return self.system.render_template('discussion/_discussion_module.html', context)
def __init__(self, system, location, definition, descriptor, instance_state=None, shared_state=None, **kwargs):
XModule.__init__(self, system, location, definition, descriptor, instance_state, shared_state, **kwargs)
if isinstance(instance_state, str):
instance_state = json.loads(instance_state)
xml_data = etree.fromstring(definition['data'])
self.discussion_id = xml_data.attrib['id']
self.title = xml_data.attrib['for']
self.discussion_category = xml_data.attrib['discussion_category']
class DiscussionDescriptor(RawDescriptor):
module_class = DiscussionModule
|
from lxml import etree
from xmodule.x_module import XModule
from xmodule.raw_module import RawDescriptor
import json
class DiscussionModule(XModule):
def get_html(self):
context = {
'discussion_id': self.discussion_id,
}
return self.system.render_template('discussion/_discussion_module.html', context)
def __init__(self, system, location, definition, descriptor, instance_state=None, shared_state=None, **kwargs):
XModule.__init__(self, system, location, definition, descriptor, instance_state, shared_state, **kwargs)
if isinstance(instance_state, str):
instance_state = json.loads(instance_state)
xml_data = etree.fromstring(definition['data'])
self.discussion_id = xml_data.attrib['id']
self.title = xml_data.attrib['for']
self.discussion_category = xml_data.attrib['discussion_category']
class DiscussionDescriptor(RawDescriptor):
module_class = DiscussionModule
|
Remove unnecessary import that was failing a test
|
Remove unnecessary import that was failing a test
|
Python
|
agpl-3.0
|
franosincic/edx-platform,shabab12/edx-platform,motion2015/edx-platform,rue89-tech/edx-platform,nanolearning/edx-platform,J861449197/edx-platform,mcgachey/edx-platform,halvertoluke/edx-platform,cyanna/edx-platform,jruiperezv/ANALYSE,jbassen/edx-platform,abdoosh00/edraak,LearnEra/LearnEraPlaftform,doganov/edx-platform,alexthered/kienhoc-platform,teltek/edx-platform,motion2015/a3,Lektorium-LLC/edx-platform,iivic/BoiseStateX,peterm-itr/edx-platform,martynovp/edx-platform,MSOpenTech/edx-platform,auferack08/edx-platform,mjirayu/sit_academy,EduPepperPDTesting/pepper2013-testing,bitifirefly/edx-platform,pepeportela/edx-platform,philanthropy-u/edx-platform,hkawasaki/kawasaki-aio8-0,adoosii/edx-platform,tiagochiavericosta/edx-platform,LearnEra/LearnEraPlaftform,TsinghuaX/edx-platform,chrisndodge/edx-platform,pepeportela/edx-platform,kursitet/edx-platform,halvertoluke/edx-platform,shashank971/edx-platform,dcosentino/edx-platform,DefyVentures/edx-platform,dkarakats/edx-platform,kmoocdev2/edx-platform,inares/edx-platform,jswope00/GAI,nanolearning/edx-platform,jolyonb/edx-platform,mcgachey/edx-platform,shubhdev/openedx,ubc/edx-platform,4eek/edx-platform,sameetb-cuelogic/edx-platform-test,RPI-OPENEDX/edx-platform,Unow/edx-platform,appliedx/edx-platform,cecep-edu/edx-platform,edx/edx-platform,openfun/edx-platform,ESOedX/edx-platform,ferabra/edx-platform,jjmiranda/edx-platform,CourseTalk/edx-platform,itsjeyd/edx-platform,y12uc231/edx-platform,JioEducation/edx-platform,EduPepperPD/pepper2013,chrisndodge/edx-platform,ampax/edx-platform,motion2015/a3,alexthered/kienhoc-platform,motion2015/edx-platform,jazztpt/edx-platform,Unow/edx-platform,beacloudgenius/edx-platform,EduPepperPD/pepper2013,stvstnfrd/edx-platform,edx-solutions/edx-platform,10clouds/edx-platform,rue89-tech/edx-platform,dsajkl/reqiop,vismartltd/edx-platform,openfun/edx-platform,beni55/edx-platform,playm2mboy/edx-platform,abdoosh00/edx-rtl-final,cognitiveclass/edx-platform,utecuy/edx-platform,mtlchun/edx,pelikanchik/edx-platform,nagyistoce/edx-platform,philanthropy-u/edx-platform,a-parhom/edx-platform,kalebhartje/schoolboost,praveen-pal/edx-platform,PepperPD/edx-pepper-platform,polimediaupv/edx-platform,jolyonb/edx-platform,mcgachey/edx-platform,miptliot/edx-platform,kamalx/edx-platform,motion2015/edx-platform,teltek/edx-platform,dcosentino/edx-platform,shubhdev/edx-platform,etzhou/edx-platform,Edraak/edx-platform,BehavioralInsightsTeam/edx-platform,raccoongang/edx-platform,pomegranited/edx-platform,jamiefolsom/edx-platform,shubhdev/edx-platform,Semi-global/edx-platform,praveen-pal/edx-platform,AkA84/edx-platform,eestay/edx-platform,utecuy/edx-platform,nikolas/edx-platform,ak2703/edx-platform,waheedahmed/edx-platform,shubhdev/openedx,wwj718/ANALYSE,LICEF/edx-platform,jswope00/griffinx,analyseuc3m/ANALYSE-v1,PepperPD/edx-pepper-platform,ak2703/edx-platform,angelapper/edx-platform,alexthered/kienhoc-platform,yokose-ks/edx-platform,chauhanhardik/populo_2,B-MOOC/edx-platform,gymnasium/edx-platform,hkawasaki/kawasaki-aio8-1,gsehub/edx-platform,SravanthiSinha/edx-platform,antoviaque/edx-platform,morenopc/edx-platform,Edraak/edraak-platform,rismalrv/edx-platform,inares/edx-platform,dsajkl/123,Unow/edx-platform,chand3040/cloud_that,chauhanhardik/populo,pdehaye/theming-edx-platform,jonathan-beard/edx-platform,gymnasium/edx-platform,playm2mboy/edx-platform,jelugbo/tundex,nanolearningllc/edx-platform-cypress-2,procangroup/edx-platform,motion2015/edx-platform,carsongee/edx-platform,arifsetiawan/edx-platform,appliedx/edx-platform,morenopc/edx-platform,bitifirefly/edx-platform,halvertoluke/edx-platform,deepsrijit1105/edx-platform,vikas1885/test1,waheedahmed/edx-platform,zerobatu/edx-platform,vismartltd/edx-platform,vasyarv/edx-platform,fintech-circle/edx-platform,EDUlib/edx-platform,wwj718/edx-platform,kalebhartje/schoolboost,PepperPD/edx-pepper-platform,shurihell/testasia,inares/edx-platform,devs1991/test_edx_docmode,jbassen/edx-platform,SivilTaram/edx-platform,deepsrijit1105/edx-platform,leansoft/edx-platform,LICEF/edx-platform,gsehub/edx-platform,dsajkl/123,cyanna/edx-platform,carsongee/edx-platform,lduarte1991/edx-platform,solashirai/edx-platform,amir-qayyum-khan/edx-platform,TsinghuaX/edx-platform,jbzdak/edx-platform,mahendra-r/edx-platform,pomegranited/edx-platform,hamzehd/edx-platform,motion2015/edx-platform,synergeticsedx/deployment-wipro,UOMx/edx-platform,shubhdev/edxOnBaadal,EduPepperPDTesting/pepper2013-testing,EduPepperPDTesting/pepper2013-testing,rue89-tech/edx-platform,DefyVentures/edx-platform,CourseTalk/edx-platform,DNFcode/edx-platform,procangroup/edx-platform,romain-li/edx-platform,dsajkl/123,eduNEXT/edx-platform,bdero/edx-platform,doganov/edx-platform,cognitiveclass/edx-platform,jazkarta/edx-platform-for-isc,romain-li/edx-platform,sameetb-cuelogic/edx-platform-test,eemirtekin/edx-platform,playm2mboy/edx-platform,morenopc/edx-platform,ampax/edx-platform-backup,zhenzhai/edx-platform,cyanna/edx-platform,ferabra/edx-platform,shubhdev/edxOnBaadal,openfun/edx-platform,dsajkl/reqiop,leansoft/edx-platform,cselis86/edx-platform,iivic/BoiseStateX,antonve/s4-project-mooc,Edraak/edx-platform,BehavioralInsightsTeam/edx-platform,rismalrv/edx-platform,mitocw/edx-platform,zerobatu/edx-platform,kalebhartje/schoolboost,kursitet/edx-platform,edx-solutions/edx-platform,hamzehd/edx-platform,morenopc/edx-platform,chand3040/cloud_that,Shrhawk/edx-platform,Lektorium-LLC/edx-platform,prarthitm/edxplatform,Shrhawk/edx-platform,bigdatauniversity/edx-platform,angelapper/edx-platform,JioEducation/edx-platform,gymnasium/edx-platform,Stanford-Online/edx-platform,ZLLab-Mooc/edx-platform,caesar2164/edx-platform,rationalAgent/edx-platform-custom,fly19890211/edx-platform,MSOpenTech/edx-platform,rue89-tech/edx-platform,SravanthiSinha/edx-platform,dkarakats/edx-platform,ampax/edx-platform,edry/edx-platform,shabab12/edx-platform,abdoosh00/edx-rtl-final,ahmadio/edx-platform,zadgroup/edx-platform,marcore/edx-platform,DNFcode/edx-platform,sudheerchintala/LearnEraPlatForm,pomegranited/edx-platform,mcgachey/edx-platform,sameetb-cuelogic/edx-platform-test,zofuthan/edx-platform,arifsetiawan/edx-platform,simbs/edx-platform,franosincic/edx-platform,Edraak/edx-platform,cselis86/edx-platform,B-MOOC/edx-platform,zubair-arbi/edx-platform,alu042/edx-platform,Edraak/circleci-edx-platform,pepeportela/edx-platform,kxliugang/edx-platform,peterm-itr/edx-platform,itsjeyd/edx-platform,solashirai/edx-platform,chauhanhardik/populo,kxliugang/edx-platform,torchingloom/edx-platform,jamiefolsom/edx-platform,chand3040/cloud_that,Livit/Livit.Learn.EdX,pku9104038/edx-platform,vikas1885/test1,doganov/edx-platform,andyzsf/edx,Softmotions/edx-platform,jelugbo/tundex,eduNEXT/edx-platform,TeachAtTUM/edx-platform,Livit/Livit.Learn.EdX,simbs/edx-platform,jruiperezv/ANALYSE,fintech-circle/edx-platform,chrisndodge/edx-platform,doismellburning/edx-platform,zofuthan/edx-platform,pabloborrego93/edx-platform,yokose-ks/edx-platform,olexiim/edx-platform,Ayub-Khan/edx-platform,sameetb-cuelogic/edx-platform-test,knehez/edx-platform,eemirtekin/edx-platform,Semi-global/edx-platform,franosincic/edx-platform,arbrandes/edx-platform,doismellburning/edx-platform,shashank971/edx-platform,shubhdev/edxOnBaadal,jonathan-beard/edx-platform,SravanthiSinha/edx-platform,IONISx/edx-platform,jelugbo/tundex,rismalrv/edx-platform,mbareta/edx-platform-ft,msegado/edx-platform,chauhanhardik/populo_2,chudaol/edx-platform,TeachAtTUM/edx-platform,mbareta/edx-platform-ft,SivilTaram/edx-platform,Shrhawk/edx-platform,dkarakats/edx-platform,eduNEXT/edunext-platform,wwj718/edx-platform,chudaol/edx-platform,Semi-global/edx-platform,zhenzhai/edx-platform,zhenzhai/edx-platform,carsongee/edx-platform,benpatterson/edx-platform,cognitiveclass/edx-platform,ahmadio/edx-platform,edry/edx-platform,valtech-mooc/edx-platform,nagyistoce/edx-platform,nttks/jenkins-test,EduPepperPDTesting/pepper2013-testing,synergeticsedx/deployment-wipro,cpennington/edx-platform,leansoft/edx-platform,kmoocdev2/edx-platform,ampax/edx-platform-backup,ampax/edx-platform,ampax/edx-platform-backup,atsolakid/edx-platform,sudheerchintala/LearnEraPlatForm,PepperPD/edx-pepper-platform,Edraak/edraak-platform,SivilTaram/edx-platform,shubhdev/edx-platform,dsajkl/reqiop,TeachAtTUM/edx-platform,tiagochiavericosta/edx-platform,xingyepei/edx-platform,jazztpt/edx-platform,simbs/edx-platform,xuxiao19910803/edx,jzoldak/edx-platform,cognitiveclass/edx-platform,peterm-itr/edx-platform,CourseTalk/edx-platform,louyihua/edx-platform,ahmadiga/min_edx,a-parhom/edx-platform,WatanabeYasumasa/edx-platform,hastexo/edx-platform,pabloborrego93/edx-platform,tiagochiavericosta/edx-platform,naresh21/synergetics-edx-platform,nanolearningllc/edx-platform-cypress,mushtaqak/edx-platform,rhndg/openedx,hkawasaki/kawasaki-aio8-0,hkawasaki/kawasaki-aio8-2,IONISx/edx-platform,appsembler/edx-platform,devs1991/test_edx_docmode,nttks/jenkins-test,UOMx/edx-platform,WatanabeYasumasa/edx-platform,leansoft/edx-platform,naresh21/synergetics-edx-platform,proversity-org/edx-platform,marcore/edx-platform,IITBinterns13/edx-platform-dev,Semi-global/edx-platform,eestay/edx-platform,Shrhawk/edx-platform,jjmiranda/edx-platform,polimediaupv/edx-platform,valtech-mooc/edx-platform,edry/edx-platform,nanolearningllc/edx-platform-cypress,4eek/edx-platform,alexthered/kienhoc-platform,eestay/edx-platform,don-github/edx-platform,shubhdev/openedx,nikolas/edx-platform,TsinghuaX/edx-platform,appliedx/edx-platform,pabloborrego93/edx-platform,Lektorium-LLC/edx-platform,dsajkl/123,shubhdev/edxOnBaadal,openfun/edx-platform,vikas1885/test1,xinjiguaike/edx-platform,Ayub-Khan/edx-platform,sudheerchintala/LearnEraPlatForm,zerobatu/edx-platform,UOMx/edx-platform,zubair-arbi/edx-platform,jamesblunt/edx-platform,Unow/edx-platform,ahmadiga/min_edx,jswope00/GAI,chauhanhardik/populo_2,pepeportela/edx-platform,atsolakid/edx-platform,andyzsf/edx,mitocw/edx-platform,jzoldak/edx-platform,eduNEXT/edx-platform,abdoosh00/edx-rtl-final,ak2703/edx-platform,10clouds/edx-platform,atsolakid/edx-platform,nanolearning/edx-platform,shabab12/edx-platform,angelapper/edx-platform,mjirayu/sit_academy,jazkarta/edx-platform,fly19890211/edx-platform,ampax/edx-platform-backup,cecep-edu/edx-platform,playm2mboy/edx-platform,jazkarta/edx-platform,chauhanhardik/populo,don-github/edx-platform,devs1991/test_edx_docmode,atsolakid/edx-platform,polimediaupv/edx-platform,don-github/edx-platform,syjeon/new_edx,jelugbo/tundex,chauhanhardik/populo_2,MSOpenTech/edx-platform,cselis86/edx-platform,utecuy/edx-platform,mitocw/edx-platform,naresh21/synergetics-edx-platform,vasyarv/edx-platform,ubc/edx-platform,marcore/edx-platform,jazztpt/edx-platform,shurihell/testasia,unicri/edx-platform,devs1991/test_edx_docmode,Softmotions/edx-platform,franosincic/edx-platform,xuxiao19910803/edx-platform,iivic/BoiseStateX,y12uc231/edx-platform,abdoosh00/edx-rtl-final,don-github/edx-platform,ubc/edx-platform,nanolearningllc/edx-platform-cypress-2,stvstnfrd/edx-platform,bdero/edx-platform,LearnEra/LearnEraPlaftform,IndonesiaX/edx-platform,sameetb-cuelogic/edx-platform-test,Livit/Livit.Learn.EdX,wwj718/edx-platform,arifsetiawan/edx-platform,ahmadiga/min_edx,arifsetiawan/edx-platform,J861449197/edx-platform,antoviaque/edx-platform,jswope00/griffinx,apigee/edx-platform,chrisndodge/edx-platform,UXE/local-edx,procangroup/edx-platform,arbrandes/edx-platform,vasyarv/edx-platform,polimediaupv/edx-platform,Edraak/edraak-platform,halvertoluke/edx-platform,jbzdak/edx-platform,rationalAgent/edx-platform-custom,shubhdev/edx-platform,cselis86/edx-platform,xingyepei/edx-platform,DNFcode/edx-platform,appliedx/edx-platform,chauhanhardik/populo,antonve/s4-project-mooc,IndonesiaX/edx-platform,kmoocdev/edx-platform,ferabra/edx-platform,wwj718/ANALYSE,adoosii/edx-platform,tiagochiavericosta/edx-platform,J861449197/edx-platform,hkawasaki/kawasaki-aio8-2,appsembler/edx-platform,Shrhawk/edx-platform,pelikanchik/edx-platform,longmen21/edx-platform,bigdatauniversity/edx-platform,nttks/edx-platform,louyihua/edx-platform,ESOedX/edx-platform,morpheby/levelup-by,EduPepperPD/pepper2013,ferabra/edx-platform,IITBinterns13/edx-platform-dev,amir-qayyum-khan/edx-platform,xuxiao19910803/edx,raccoongang/edx-platform,xingyepei/edx-platform,AkA84/edx-platform,DefyVentures/edx-platform,motion2015/a3,halvertoluke/edx-platform,kxliugang/edx-platform,stvstnfrd/edx-platform,ahmadiga/min_edx,ESOedX/edx-platform,adoosii/edx-platform,lduarte1991/edx-platform,Edraak/circleci-edx-platform,xuxiao19910803/edx-platform,mjirayu/sit_academy,deepsrijit1105/edx-platform,Kalyzee/edx-platform,auferack08/edx-platform,polimediaupv/edx-platform,nttks/jenkins-test,xuxiao19910803/edx-platform,eduNEXT/edunext-platform,UOMx/edx-platform,msegado/edx-platform,zerobatu/edx-platform,benpatterson/edx-platform,xinjiguaike/edx-platform,bigdatauniversity/edx-platform,IONISx/edx-platform,zerobatu/edx-platform,SivilTaram/edx-platform,nikolas/edx-platform,Stanford-Online/edx-platform,4eek/edx-platform,rationalAgent/edx-platform-custom,etzhou/edx-platform,devs1991/test_edx_docmode,DNFcode/edx-platform,rationalAgent/edx-platform-custom,xuxiao19910803/edx,Kalyzee/edx-platform,RPI-OPENEDX/edx-platform,hastexo/edx-platform,miptliot/edx-platform,Livit/Livit.Learn.EdX,4eek/edx-platform,hamzehd/edx-platform,B-MOOC/edx-platform,CourseTalk/edx-platform,shubhdev/openedx,WatanabeYasumasa/edx-platform,OmarIthawi/edx-platform,jamesblunt/edx-platform,yokose-ks/edx-platform,zadgroup/edx-platform,beacloudgenius/edx-platform,DefyVentures/edx-platform,JCBarahona/edX,JCBarahona/edX,dkarakats/edx-platform,Edraak/circleci-edx-platform,alexthered/kienhoc-platform,mahendra-r/edx-platform,ovnicraft/edx-platform,kamalx/edx-platform,unicri/edx-platform,UXE/local-edx,waheedahmed/edx-platform,nttks/edx-platform,jonathan-beard/edx-platform,cselis86/edx-platform,abdoosh00/edraak,jbzdak/edx-platform,analyseuc3m/ANALYSE-v1,ahmedaljazzar/edx-platform,pelikanchik/edx-platform,ovnicraft/edx-platform,proversity-org/edx-platform,ampax/edx-platform,MakeHer/edx-platform,yokose-ks/edx-platform,bitifirefly/edx-platform,kmoocdev/edx-platform,Endika/edx-platform,RPI-OPENEDX/edx-platform,y12uc231/edx-platform,xinjiguaike/edx-platform,cecep-edu/edx-platform,philanthropy-u/edx-platform,pdehaye/theming-edx-platform,olexiim/edx-platform,longmen21/edx-platform,etzhou/edx-platform,nagyistoce/edx-platform,Edraak/edx-platform,analyseuc3m/ANALYSE-v1,defance/edx-platform,MakeHer/edx-platform,Ayub-Khan/edx-platform,ovnicraft/edx-platform,hmcmooc/muddx-platform,jamesblunt/edx-platform,bdero/edx-platform,OmarIthawi/edx-platform,torchingloom/edx-platform,Endika/edx-platform,syjeon/new_edx,olexiim/edx-platform,nttks/edx-platform,defance/edx-platform,etzhou/edx-platform,cpennington/edx-platform,kmoocdev/edx-platform,TeachAtTUM/edx-platform,J861449197/edx-platform,syjeon/new_edx,defance/edx-platform,kursitet/edx-platform,nanolearningllc/edx-platform-cypress,kamalx/edx-platform,knehez/edx-platform,utecuy/edx-platform,jelugbo/tundex,eemirtekin/edx-platform,jswope00/GAI,OmarIthawi/edx-platform,longmen21/edx-platform,beni55/edx-platform,solashirai/edx-platform,UXE/local-edx,marcore/edx-platform,vismartltd/edx-platform,praveen-pal/edx-platform,atsolakid/edx-platform,AkA84/edx-platform,pabloborrego93/edx-platform,fintech-circle/edx-platform,eemirtekin/edx-platform,y12uc231/edx-platform,hkawasaki/kawasaki-aio8-2,mtlchun/edx,zubair-arbi/edx-platform,zhenzhai/edx-platform,UXE/local-edx,msegado/edx-platform,MakeHer/edx-platform,xinjiguaike/edx-platform,edx/edx-platform,raccoongang/edx-platform,unicri/edx-platform,gymnasium/edx-platform,don-github/edx-platform,tiagochiavericosta/edx-platform,shashank971/edx-platform,proversity-org/edx-platform,valtech-mooc/edx-platform,hastexo/edx-platform,Softmotions/edx-platform,Softmotions/edx-platform,kalebhartje/schoolboost,Ayub-Khan/edx-platform,rhndg/openedx,IITBinterns13/edx-platform-dev,dsajkl/123,RPI-OPENEDX/edx-platform,jazkarta/edx-platform-for-isc,shashank971/edx-platform,caesar2164/edx-platform,martynovp/edx-platform,cpennington/edx-platform,pdehaye/theming-edx-platform,nanolearningllc/edx-platform-cypress,jazkarta/edx-platform-for-isc,ubc/edx-platform,shabab12/edx-platform,tanmaykm/edx-platform,EduPepperPDTesting/pepper2013-testing,chand3040/cloud_that,benpatterson/edx-platform,SravanthiSinha/edx-platform,caesar2164/edx-platform,romain-li/edx-platform,WatanabeYasumasa/edx-platform,dcosentino/edx-platform,vasyarv/edx-platform,prarthitm/edxplatform,alu042/edx-platform,teltek/edx-platform,kxliugang/edx-platform,kalebhartje/schoolboost,cyanna/edx-platform,nanolearningllc/edx-platform-cypress-2,waheedahmed/edx-platform,nanolearning/edx-platform,auferack08/edx-platform,EduPepperPD/pepper2013,mahendra-r/edx-platform,benpatterson/edx-platform,iivic/BoiseStateX,unicri/edx-platform,amir-qayyum-khan/edx-platform,miptliot/edx-platform,itsjeyd/edx-platform,mahendra-r/edx-platform,mbareta/edx-platform-ft,eduNEXT/edunext-platform,synergeticsedx/deployment-wipro,eduNEXT/edx-platform,ubc/edx-platform,jamiefolsom/edx-platform,longmen21/edx-platform,jamiefolsom/edx-platform,cognitiveclass/edx-platform,msegado/edx-platform,Kalyzee/edx-platform,defance/edx-platform,shashank971/edx-platform,OmarIthawi/edx-platform,openfun/edx-platform,zofuthan/edx-platform,zubair-arbi/edx-platform,hkawasaki/kawasaki-aio8-0,nanolearningllc/edx-platform-cypress,mjg2203/edx-platform-seas,ahmadio/edx-platform,chudaol/edx-platform,antoviaque/edx-platform,playm2mboy/edx-platform,jonathan-beard/edx-platform,stvstnfrd/edx-platform,hmcmooc/muddx-platform,abdoosh00/edraak,Lektorium-LLC/edx-platform,EDUlib/edx-platform,MakeHer/edx-platform,lduarte1991/edx-platform,morenopc/edx-platform,devs1991/test_edx_docmode,hkawasaki/kawasaki-aio8-1,ZLLab-Mooc/edx-platform,jswope00/griffinx,hkawasaki/kawasaki-aio8-2,xingyepei/edx-platform,simbs/edx-platform,10clouds/edx-platform,doganov/edx-platform,nikolas/edx-platform,Kalyzee/edx-platform,msegado/edx-platform,xuxiao19910803/edx,jazztpt/edx-platform,hkawasaki/kawasaki-aio8-0,romain-li/edx-platform,beacloudgenius/edx-platform,dcosentino/edx-platform,wwj718/edx-platform,edx/edx-platform,BehavioralInsightsTeam/edx-platform,tanmaykm/edx-platform,chauhanhardik/populo,naresh21/synergetics-edx-platform,beni55/edx-platform,martynovp/edx-platform,itsjeyd/edx-platform,nagyistoce/edx-platform,utecuy/edx-platform,simbs/edx-platform,pku9104038/edx-platform,amir-qayyum-khan/edx-platform,vismartltd/edx-platform,EduPepperPD/pepper2013,valtech-mooc/edx-platform,Semi-global/edx-platform,jolyonb/edx-platform,fly19890211/edx-platform,nttks/edx-platform,louyihua/edx-platform,ak2703/edx-platform,syjeon/new_edx,devs1991/test_edx_docmode,cecep-edu/edx-platform,hkawasaki/kawasaki-aio8-1,ferabra/edx-platform,ahmedaljazzar/edx-platform,IndonesiaX/edx-platform,IndonesiaX/edx-platform,bdero/edx-platform,proversity-org/edx-platform,mbareta/edx-platform-ft,IndonesiaX/edx-platform,IITBinterns13/edx-platform-dev,romain-li/edx-platform,jolyonb/edx-platform,abdoosh00/edraak,pelikanchik/edx-platform,chudaol/edx-platform,arifsetiawan/edx-platform,jruiperezv/ANALYSE,carsongee/edx-platform,jbassen/edx-platform,kursitet/edx-platform,ovnicraft/edx-platform,shurihell/testasia,bigdatauniversity/edx-platform,etzhou/edx-platform,JCBarahona/edX,adoosii/edx-platform,synergeticsedx/deployment-wipro,beacloudgenius/edx-platform,xuxiao19910803/edx-platform,jzoldak/edx-platform,ahmedaljazzar/edx-platform,wwj718/ANALYSE,louyihua/edx-platform,kmoocdev2/edx-platform,mushtaqak/edx-platform,eduNEXT/edunext-platform,jbzdak/edx-platform,mushtaqak/edx-platform,adoosii/edx-platform,SravanthiSinha/edx-platform,prarthitm/edxplatform,ZLLab-Mooc/edx-platform,caesar2164/edx-platform,jamesblunt/edx-platform,Endika/edx-platform,leansoft/edx-platform,BehavioralInsightsTeam/edx-platform,cyanna/edx-platform,dkarakats/edx-platform,ahmadio/edx-platform,B-MOOC/edx-platform,jazkarta/edx-platform-for-isc,MSOpenTech/edx-platform,gsehub/edx-platform,pdehaye/theming-edx-platform,alu042/edx-platform,y12uc231/edx-platform,olexiim/edx-platform,rismalrv/edx-platform,pomegranited/edx-platform,miptliot/edx-platform,eestay/edx-platform,shubhdev/edx-platform,IONISx/edx-platform,LICEF/edx-platform,edx-solutions/edx-platform,yokose-ks/edx-platform,beni55/edx-platform,mjirayu/sit_academy,ZLLab-Mooc/edx-platform,ESOedX/edx-platform,chand3040/cloud_that,mtlchun/edx,edx-solutions/edx-platform,torchingloom/edx-platform,jjmiranda/edx-platform,MSOpenTech/edx-platform,zofuthan/edx-platform,JCBarahona/edX,jazkarta/edx-platform,CredoReference/edx-platform,antonve/s4-project-mooc,edry/edx-platform,doismellburning/edx-platform,Stanford-Online/edx-platform,gsehub/edx-platform,CredoReference/edx-platform,mtlchun/edx,motion2015/a3,MakeHer/edx-platform,fly19890211/edx-platform,LICEF/edx-platform,zadgroup/edx-platform,chauhanhardik/populo_2,jbassen/edx-platform,hmcmooc/muddx-platform,LearnEra/LearnEraPlaftform,hmcmooc/muddx-platform,analyseuc3m/ANALYSE-v1,jazkarta/edx-platform,zubair-arbi/edx-platform,tanmaykm/edx-platform,Edraak/edx-platform,ak2703/edx-platform,mjirayu/sit_academy,JCBarahona/edX,lduarte1991/edx-platform,10clouds/edx-platform,waheedahmed/edx-platform,apigee/edx-platform,DefyVentures/edx-platform,Softmotions/edx-platform,a-parhom/edx-platform,teltek/edx-platform,J861449197/edx-platform,fintech-circle/edx-platform,EduPepperPDTesting/pepper2013-testing,prarthitm/edxplatform,mushtaqak/edx-platform,jswope00/GAI,edx/edx-platform,xinjiguaike/edx-platform,motion2015/a3,iivic/BoiseStateX,procangroup/edx-platform,solashirai/edx-platform,pku9104038/edx-platform,jazztpt/edx-platform,appsembler/edx-platform,hastexo/edx-platform,RPI-OPENEDX/edx-platform,olexiim/edx-platform,angelapper/edx-platform,apigee/edx-platform,vikas1885/test1,EDUlib/edx-platform,nanolearningllc/edx-platform-cypress-2,zadgroup/edx-platform,Edraak/circleci-edx-platform,mjg2203/edx-platform-seas,arbrandes/edx-platform,LICEF/edx-platform,nttks/edx-platform,morpheby/levelup-by,raccoongang/edx-platform,DNFcode/edx-platform,xuxiao19910803/edx,torchingloom/edx-platform,philanthropy-u/edx-platform,hamzehd/edx-platform,arbrandes/edx-platform,hamzehd/edx-platform,zofuthan/edx-platform,mjg2203/edx-platform-seas,peterm-itr/edx-platform,jswope00/griffinx,xingyepei/edx-platform,rismalrv/edx-platform,rue89-tech/edx-platform,shubhdev/edxOnBaadal,doismellburning/edx-platform,JioEducation/edx-platform,mcgachey/edx-platform,bitifirefly/edx-platform,doganov/edx-platform,bitifirefly/edx-platform,ZLLab-Mooc/edx-platform,morpheby/levelup-by,vismartltd/edx-platform,ahmadio/edx-platform,Kalyzee/edx-platform,PepperPD/edx-pepper-platform,inares/edx-platform,fly19890211/edx-platform,andyzsf/edx,deepsrijit1105/edx-platform,martynovp/edx-platform,mitocw/edx-platform,edry/edx-platform,mahendra-r/edx-platform,inares/edx-platform,nikolas/edx-platform,shurihell/testasia,shurihell/testasia,hkawasaki/kawasaki-aio8-1,rhndg/openedx,bigdatauniversity/edx-platform,benpatterson/edx-platform,kxliugang/edx-platform,knehez/edx-platform,jswope00/griffinx,4eek/edx-platform,Stanford-Online/edx-platform,vikas1885/test1,B-MOOC/edx-platform,ahmadiga/min_edx,torchingloom/edx-platform,nttks/jenkins-test,a-parhom/edx-platform,jzoldak/edx-platform,shubhdev/openedx,sudheerchintala/LearnEraPlatForm,morpheby/levelup-by,appliedx/edx-platform,jamiefolsom/edx-platform,tanmaykm/edx-platform,devs1991/test_edx_docmode,franosincic/edx-platform,IONISx/edx-platform,wwj718/edx-platform,nagyistoce/edx-platform,dcosentino/edx-platform,longmen21/edx-platform,valtech-mooc/edx-platform,Ayub-Khan/edx-platform,SivilTaram/edx-platform,xuxiao19910803/edx-platform,apigee/edx-platform,solashirai/edx-platform,Endika/edx-platform,kmoocdev/edx-platform,appsembler/edx-platform,jonathan-beard/edx-platform,wwj718/ANALYSE,jruiperezv/ANALYSE,pomegranited/edx-platform,unicri/edx-platform,Edraak/circleci-edx-platform,kamalx/edx-platform,ampax/edx-platform-backup,rhndg/openedx,chudaol/edx-platform,pku9104038/edx-platform,TsinghuaX/edx-platform,kmoocdev/edx-platform,mushtaqak/edx-platform,kmoocdev2/edx-platform,auferack08/edx-platform,eemirtekin/edx-platform,beni55/edx-platform,andyzsf/edx,CredoReference/edx-platform,nttks/jenkins-test,alu042/edx-platform,nanolearningllc/edx-platform-cypress-2,rationalAgent/edx-platform-custom,jazkarta/edx-platform,jbassen/edx-platform,zhenzhai/edx-platform,cpennington/edx-platform,jjmiranda/edx-platform,ahmedaljazzar/edx-platform,vasyarv/edx-platform,zadgroup/edx-platform,antonve/s4-project-mooc,mjg2203/edx-platform-seas,Edraak/edraak-platform,martynovp/edx-platform,rhndg/openedx,doismellburning/edx-platform,mtlchun/edx,JioEducation/edx-platform,beacloudgenius/edx-platform,nanolearning/edx-platform,kursitet/edx-platform,EDUlib/edx-platform,kamalx/edx-platform,jruiperezv/ANALYSE,eestay/edx-platform,jamesblunt/edx-platform,cecep-edu/edx-platform,knehez/edx-platform,CredoReference/edx-platform,AkA84/edx-platform,antonve/s4-project-mooc,AkA84/edx-platform,jazkarta/edx-platform-for-isc,jbzdak/edx-platform,ovnicraft/edx-platform,knehez/edx-platform,kmoocdev2/edx-platform,antoviaque/edx-platform,praveen-pal/edx-platform,dsajkl/reqiop,wwj718/ANALYSE
|
from lxml import etree
from xmodule.x_module import XModule
from xmodule.raw_module import RawDescriptor
-
- import comment_client
import json
class DiscussionModule(XModule):
def get_html(self):
context = {
'discussion_id': self.discussion_id,
}
return self.system.render_template('discussion/_discussion_module.html', context)
def __init__(self, system, location, definition, descriptor, instance_state=None, shared_state=None, **kwargs):
XModule.__init__(self, system, location, definition, descriptor, instance_state, shared_state, **kwargs)
if isinstance(instance_state, str):
instance_state = json.loads(instance_state)
xml_data = etree.fromstring(definition['data'])
self.discussion_id = xml_data.attrib['id']
self.title = xml_data.attrib['for']
self.discussion_category = xml_data.attrib['discussion_category']
class DiscussionDescriptor(RawDescriptor):
module_class = DiscussionModule
|
Remove unnecessary import that was failing a test
|
## Code Before:
from lxml import etree
from xmodule.x_module import XModule
from xmodule.raw_module import RawDescriptor
import comment_client
import json
class DiscussionModule(XModule):
def get_html(self):
context = {
'discussion_id': self.discussion_id,
}
return self.system.render_template('discussion/_discussion_module.html', context)
def __init__(self, system, location, definition, descriptor, instance_state=None, shared_state=None, **kwargs):
XModule.__init__(self, system, location, definition, descriptor, instance_state, shared_state, **kwargs)
if isinstance(instance_state, str):
instance_state = json.loads(instance_state)
xml_data = etree.fromstring(definition['data'])
self.discussion_id = xml_data.attrib['id']
self.title = xml_data.attrib['for']
self.discussion_category = xml_data.attrib['discussion_category']
class DiscussionDescriptor(RawDescriptor):
module_class = DiscussionModule
## Instruction:
Remove unnecessary import that was failing a test
## Code After:
from lxml import etree
from xmodule.x_module import XModule
from xmodule.raw_module import RawDescriptor
import json
class DiscussionModule(XModule):
def get_html(self):
context = {
'discussion_id': self.discussion_id,
}
return self.system.render_template('discussion/_discussion_module.html', context)
def __init__(self, system, location, definition, descriptor, instance_state=None, shared_state=None, **kwargs):
XModule.__init__(self, system, location, definition, descriptor, instance_state, shared_state, **kwargs)
if isinstance(instance_state, str):
instance_state = json.loads(instance_state)
xml_data = etree.fromstring(definition['data'])
self.discussion_id = xml_data.attrib['id']
self.title = xml_data.attrib['for']
self.discussion_category = xml_data.attrib['discussion_category']
class DiscussionDescriptor(RawDescriptor):
module_class = DiscussionModule
|
from lxml import etree
from xmodule.x_module import XModule
from xmodule.raw_module import RawDescriptor
-
- import comment_client
import json
class DiscussionModule(XModule):
def get_html(self):
context = {
'discussion_id': self.discussion_id,
}
return self.system.render_template('discussion/_discussion_module.html', context)
def __init__(self, system, location, definition, descriptor, instance_state=None, shared_state=None, **kwargs):
XModule.__init__(self, system, location, definition, descriptor, instance_state, shared_state, **kwargs)
if isinstance(instance_state, str):
instance_state = json.loads(instance_state)
xml_data = etree.fromstring(definition['data'])
self.discussion_id = xml_data.attrib['id']
self.title = xml_data.attrib['for']
self.discussion_category = xml_data.attrib['discussion_category']
class DiscussionDescriptor(RawDescriptor):
module_class = DiscussionModule
|
c4d4ba61d1948bebecfadd540a77603fc9dda204
|
benchfunk/core/plotters.py
|
benchfunk/core/plotters.py
|
import numpy as np
from jug import TaskGenerator
import ezplot
__all__ = ['plot_stack']
@TaskGenerator
def plot_stack(stack_results, problems=None, policies=None, name=''):
problems = problems if problems is not None else stack_results.key()
nfigs = len(problems)
fig = ezplot.figure(figsize=(5*nfigs, 4))
for i, expt in enumerate(problems):
results = stack_results[expt]
policies = policies if policies is not None else results.keys()
ax = fig.add_subplot(1, nfigs, i+1)
for policy in policies:
xbest, ybest = zip(*results[policy])
iters = np.arange(np.shape(ybest)[1])
mu = np.mean(ybest, axis=0)
std = np.std(ybest, axis=0) / np.sqrt(len(ybest))
ax.plot_banded(iters, mu, std, label=policy)
ax.set_title(expt, fontsize=16)
ax.legend(loc=0, fontsize=16)
ezplot.plt.savefig(name)
return fig
|
import matplotlib
matplotlib.use('Agg')
import numpy as np
from jug import TaskGenerator
import ezplot
__all__ = ['plot_stack']
@TaskGenerator
def plot_stack(stack_results, problems=None, policies=None, name=''):
problems = problems if problems is not None else stack_results.key()
nfigs = len(problems)
fig = ezplot.figure(figsize=(5*nfigs, 4))
for i, expt in enumerate(problems):
results = stack_results[expt]
policies = policies if policies is not None else results.keys()
ax = fig.add_subplot(1, nfigs, i+1)
for policy in policies:
xbest, ybest = zip(*results[policy])
iters = np.arange(np.shape(ybest)[1])
mu = np.mean(ybest, axis=0)
std = np.std(ybest, axis=0) / np.sqrt(len(ybest))
ax.plot_banded(iters, mu, std, label=policy)
ax.set_title(expt, fontsize=16)
ax.legend(loc=0, fontsize=16)
ezplot.plt.savefig(name)
return fig
|
Fix plotter to use 'Agg'.
|
Fix plotter to use 'Agg'.
|
Python
|
bsd-2-clause
|
mwhoffman/benchfunk
|
+ import matplotlib
+ matplotlib.use('Agg')
import numpy as np
from jug import TaskGenerator
import ezplot
__all__ = ['plot_stack']
@TaskGenerator
def plot_stack(stack_results, problems=None, policies=None, name=''):
problems = problems if problems is not None else stack_results.key()
nfigs = len(problems)
fig = ezplot.figure(figsize=(5*nfigs, 4))
for i, expt in enumerate(problems):
results = stack_results[expt]
policies = policies if policies is not None else results.keys()
ax = fig.add_subplot(1, nfigs, i+1)
for policy in policies:
xbest, ybest = zip(*results[policy])
iters = np.arange(np.shape(ybest)[1])
mu = np.mean(ybest, axis=0)
std = np.std(ybest, axis=0) / np.sqrt(len(ybest))
ax.plot_banded(iters, mu, std, label=policy)
ax.set_title(expt, fontsize=16)
ax.legend(loc=0, fontsize=16)
ezplot.plt.savefig(name)
return fig
|
Fix plotter to use 'Agg'.
|
## Code Before:
import numpy as np
from jug import TaskGenerator
import ezplot
__all__ = ['plot_stack']
@TaskGenerator
def plot_stack(stack_results, problems=None, policies=None, name=''):
problems = problems if problems is not None else stack_results.key()
nfigs = len(problems)
fig = ezplot.figure(figsize=(5*nfigs, 4))
for i, expt in enumerate(problems):
results = stack_results[expt]
policies = policies if policies is not None else results.keys()
ax = fig.add_subplot(1, nfigs, i+1)
for policy in policies:
xbest, ybest = zip(*results[policy])
iters = np.arange(np.shape(ybest)[1])
mu = np.mean(ybest, axis=0)
std = np.std(ybest, axis=0) / np.sqrt(len(ybest))
ax.plot_banded(iters, mu, std, label=policy)
ax.set_title(expt, fontsize=16)
ax.legend(loc=0, fontsize=16)
ezplot.plt.savefig(name)
return fig
## Instruction:
Fix plotter to use 'Agg'.
## Code After:
import matplotlib
matplotlib.use('Agg')
import numpy as np
from jug import TaskGenerator
import ezplot
__all__ = ['plot_stack']
@TaskGenerator
def plot_stack(stack_results, problems=None, policies=None, name=''):
problems = problems if problems is not None else stack_results.key()
nfigs = len(problems)
fig = ezplot.figure(figsize=(5*nfigs, 4))
for i, expt in enumerate(problems):
results = stack_results[expt]
policies = policies if policies is not None else results.keys()
ax = fig.add_subplot(1, nfigs, i+1)
for policy in policies:
xbest, ybest = zip(*results[policy])
iters = np.arange(np.shape(ybest)[1])
mu = np.mean(ybest, axis=0)
std = np.std(ybest, axis=0) / np.sqrt(len(ybest))
ax.plot_banded(iters, mu, std, label=policy)
ax.set_title(expt, fontsize=16)
ax.legend(loc=0, fontsize=16)
ezplot.plt.savefig(name)
return fig
|
+ import matplotlib
+ matplotlib.use('Agg')
import numpy as np
from jug import TaskGenerator
import ezplot
__all__ = ['plot_stack']
@TaskGenerator
def plot_stack(stack_results, problems=None, policies=None, name=''):
problems = problems if problems is not None else stack_results.key()
nfigs = len(problems)
fig = ezplot.figure(figsize=(5*nfigs, 4))
for i, expt in enumerate(problems):
results = stack_results[expt]
policies = policies if policies is not None else results.keys()
ax = fig.add_subplot(1, nfigs, i+1)
for policy in policies:
xbest, ybest = zip(*results[policy])
iters = np.arange(np.shape(ybest)[1])
mu = np.mean(ybest, axis=0)
std = np.std(ybest, axis=0) / np.sqrt(len(ybest))
ax.plot_banded(iters, mu, std, label=policy)
ax.set_title(expt, fontsize=16)
ax.legend(loc=0, fontsize=16)
ezplot.plt.savefig(name)
return fig
|
4af8f60598e216adb04e4ed04394e8835beedcac
|
src/som/vmobjects/object_without_fields.py
|
src/som/vmobjects/object_without_fields.py
|
from rlib.jit import promote
from som.vmobjects.abstract_object import AbstractObject
class ObjectWithoutFields(AbstractObject):
_immutable_fields_ = ["_object_layout?"]
def __init__(self, layout): # pylint: disable=W
self._object_layout = layout
def get_class(self, universe):
assert self._object_layout is not None
return self._object_layout.for_class
def get_object_layout(self, _universe):
return promote(self._object_layout)
def set_class(self, clazz):
layout = clazz.get_layout_for_instances()
assert layout is not None
self._object_layout = layout
def get_number_of_fields(self): # pylint: disable=no-self-use
return 0
|
from rlib.jit import promote
from som.vmobjects.abstract_object import AbstractObject
class ObjectWithoutFields(AbstractObject):
_immutable_fields_ = ["_object_layout?"]
def __init__(self, layout): # pylint: disable=W
self._object_layout = layout
def get_class(self, universe):
assert self._object_layout is not None
return self._object_layout.for_class
def get_object_layout(self, _universe):
return promote(self._object_layout)
def set_class(self, clazz):
layout = clazz.get_layout_for_instances()
assert layout is not None
self._object_layout = layout
def get_number_of_fields(self): # pylint: disable=no-self-use
return 0
def __str__(self):
from som.vm.globals import nilObject, trueObject, falseObject
if self is nilObject:
return "nil"
if self is trueObject:
return "true"
if self is falseObject:
return "false"
return AbstractObject.__str__(self)
|
Improve ObjectWithoutField.__str__ to idenfify singleton values
|
Improve ObjectWithoutField.__str__ to idenfify singleton values
Signed-off-by: Stefan Marr <[email protected]>
|
Python
|
mit
|
smarr/PySOM,smarr/PySOM,SOM-st/PySOM,SOM-st/PySOM
|
from rlib.jit import promote
from som.vmobjects.abstract_object import AbstractObject
class ObjectWithoutFields(AbstractObject):
_immutable_fields_ = ["_object_layout?"]
def __init__(self, layout): # pylint: disable=W
self._object_layout = layout
def get_class(self, universe):
assert self._object_layout is not None
return self._object_layout.for_class
def get_object_layout(self, _universe):
return promote(self._object_layout)
def set_class(self, clazz):
layout = clazz.get_layout_for_instances()
assert layout is not None
self._object_layout = layout
def get_number_of_fields(self): # pylint: disable=no-self-use
return 0
+ def __str__(self):
+ from som.vm.globals import nilObject, trueObject, falseObject
+
+ if self is nilObject:
+ return "nil"
+ if self is trueObject:
+ return "true"
+ if self is falseObject:
+ return "false"
+ return AbstractObject.__str__(self)
+
|
Improve ObjectWithoutField.__str__ to idenfify singleton values
|
## Code Before:
from rlib.jit import promote
from som.vmobjects.abstract_object import AbstractObject
class ObjectWithoutFields(AbstractObject):
_immutable_fields_ = ["_object_layout?"]
def __init__(self, layout): # pylint: disable=W
self._object_layout = layout
def get_class(self, universe):
assert self._object_layout is not None
return self._object_layout.for_class
def get_object_layout(self, _universe):
return promote(self._object_layout)
def set_class(self, clazz):
layout = clazz.get_layout_for_instances()
assert layout is not None
self._object_layout = layout
def get_number_of_fields(self): # pylint: disable=no-self-use
return 0
## Instruction:
Improve ObjectWithoutField.__str__ to idenfify singleton values
## Code After:
from rlib.jit import promote
from som.vmobjects.abstract_object import AbstractObject
class ObjectWithoutFields(AbstractObject):
_immutable_fields_ = ["_object_layout?"]
def __init__(self, layout): # pylint: disable=W
self._object_layout = layout
def get_class(self, universe):
assert self._object_layout is not None
return self._object_layout.for_class
def get_object_layout(self, _universe):
return promote(self._object_layout)
def set_class(self, clazz):
layout = clazz.get_layout_for_instances()
assert layout is not None
self._object_layout = layout
def get_number_of_fields(self): # pylint: disable=no-self-use
return 0
def __str__(self):
from som.vm.globals import nilObject, trueObject, falseObject
if self is nilObject:
return "nil"
if self is trueObject:
return "true"
if self is falseObject:
return "false"
return AbstractObject.__str__(self)
|
from rlib.jit import promote
from som.vmobjects.abstract_object import AbstractObject
class ObjectWithoutFields(AbstractObject):
_immutable_fields_ = ["_object_layout?"]
def __init__(self, layout): # pylint: disable=W
self._object_layout = layout
def get_class(self, universe):
assert self._object_layout is not None
return self._object_layout.for_class
def get_object_layout(self, _universe):
return promote(self._object_layout)
def set_class(self, clazz):
layout = clazz.get_layout_for_instances()
assert layout is not None
self._object_layout = layout
def get_number_of_fields(self): # pylint: disable=no-self-use
return 0
+
+ def __str__(self):
+ from som.vm.globals import nilObject, trueObject, falseObject
+
+ if self is nilObject:
+ return "nil"
+ if self is trueObject:
+ return "true"
+ if self is falseObject:
+ return "false"
+ return AbstractObject.__str__(self)
|
19dc8b7e1535c4cc431b765f95db117175fc7d24
|
server/admin.py
|
server/admin.py
|
from django.contrib import admin
from server.models import *
class MachineGroupAdmin(admin.ModelAdmin):
readonly_fields = ('key',)
class MachineAdmin(admin.ModelAdmin):
list_display = ('hostname', 'serial')
admin.site.register(UserProfile)
admin.site.register(BusinessUnit)
admin.site.register(MachineGroup, MachineGroupAdmin)
admin.site.register(Machine, MachineAdmin)
admin.site.register(Fact)
admin.site.register(PluginScriptSubmission)
admin.site.register(PluginScriptRow)
admin.site.register(HistoricalFact)
admin.site.register(Condition)
admin.site.register(PendingUpdate)
admin.site.register(InstalledUpdate)
admin.site.register(PendingAppleUpdate)
admin.site.register(ApiKey)
admin.site.register(Plugin)
admin.site.register(Report)
# admin.site.register(OSQueryResult)
# admin.site.register(OSQueryColumn)
admin.site.register(SalSetting)
admin.site.register(UpdateHistory)
admin.site.register(UpdateHistoryItem)
admin.site.register(MachineDetailPlugin)
|
from django.contrib import admin
from server.models import *
class ApiKeyAdmin(admin.ModelAdmin):
list_display = ('name', 'public_key', 'private_key')
class MachineAdmin(admin.ModelAdmin):
list_display = ('hostname', 'serial')
class MachineGroupAdmin(admin.ModelAdmin):
readonly_fields = ('key',)
admin.site.register(ApiKey, ApiKeyAdmin)
admin.site.register(BusinessUnit)
admin.site.register(Condition)
admin.site.register(Fact)
admin.site.register(HistoricalFact)
admin.site.register(InstalledUpdate)
admin.site.register(Machine, MachineAdmin)
admin.site.register(MachineDetailPlugin)
admin.site.register(MachineGroup, MachineGroupAdmin)
# admin.site.register(OSQueryColumn)
# admin.site.register(OSQueryResult)
admin.site.register(PendingAppleUpdate)
admin.site.register(PendingUpdate)
admin.site.register(Plugin)
admin.site.register(PluginScriptRow)
admin.site.register(PluginScriptSubmission)
admin.site.register(Report)
admin.site.register(SalSetting)
admin.site.register(UpdateHistory)
admin.site.register(UpdateHistoryItem)
admin.site.register(UserProfile)
|
Sort registrations. Separate classes of imports. Add API key display.
|
Sort registrations. Separate classes of imports. Add API key display.
|
Python
|
apache-2.0
|
salopensource/sal,sheagcraig/sal,salopensource/sal,salopensource/sal,sheagcraig/sal,sheagcraig/sal,sheagcraig/sal,salopensource/sal
|
from django.contrib import admin
+
from server.models import *
+
+
+ class ApiKeyAdmin(admin.ModelAdmin):
+ list_display = ('name', 'public_key', 'private_key')
+
+
+ class MachineAdmin(admin.ModelAdmin):
+ list_display = ('hostname', 'serial')
class MachineGroupAdmin(admin.ModelAdmin):
readonly_fields = ('key',)
+ admin.site.register(ApiKey, ApiKeyAdmin)
- class MachineAdmin(admin.ModelAdmin):
- list_display = ('hostname', 'serial')
-
-
- admin.site.register(UserProfile)
admin.site.register(BusinessUnit)
+ admin.site.register(Condition)
+ admin.site.register(Fact)
+ admin.site.register(HistoricalFact)
+ admin.site.register(InstalledUpdate)
+ admin.site.register(Machine, MachineAdmin)
+ admin.site.register(MachineDetailPlugin)
admin.site.register(MachineGroup, MachineGroupAdmin)
- admin.site.register(Machine, MachineAdmin)
+ # admin.site.register(OSQueryColumn)
+ # admin.site.register(OSQueryResult)
+ admin.site.register(PendingAppleUpdate)
+ admin.site.register(PendingUpdate)
- admin.site.register(Fact)
+ admin.site.register(Plugin)
+ admin.site.register(PluginScriptRow)
admin.site.register(PluginScriptSubmission)
- admin.site.register(PluginScriptRow)
- admin.site.register(HistoricalFact)
- admin.site.register(Condition)
- admin.site.register(PendingUpdate)
- admin.site.register(InstalledUpdate)
- admin.site.register(PendingAppleUpdate)
- admin.site.register(ApiKey)
- admin.site.register(Plugin)
admin.site.register(Report)
- # admin.site.register(OSQueryResult)
- # admin.site.register(OSQueryColumn)
admin.site.register(SalSetting)
admin.site.register(UpdateHistory)
admin.site.register(UpdateHistoryItem)
- admin.site.register(MachineDetailPlugin)
+ admin.site.register(UserProfile)
|
Sort registrations. Separate classes of imports. Add API key display.
|
## Code Before:
from django.contrib import admin
from server.models import *
class MachineGroupAdmin(admin.ModelAdmin):
readonly_fields = ('key',)
class MachineAdmin(admin.ModelAdmin):
list_display = ('hostname', 'serial')
admin.site.register(UserProfile)
admin.site.register(BusinessUnit)
admin.site.register(MachineGroup, MachineGroupAdmin)
admin.site.register(Machine, MachineAdmin)
admin.site.register(Fact)
admin.site.register(PluginScriptSubmission)
admin.site.register(PluginScriptRow)
admin.site.register(HistoricalFact)
admin.site.register(Condition)
admin.site.register(PendingUpdate)
admin.site.register(InstalledUpdate)
admin.site.register(PendingAppleUpdate)
admin.site.register(ApiKey)
admin.site.register(Plugin)
admin.site.register(Report)
# admin.site.register(OSQueryResult)
# admin.site.register(OSQueryColumn)
admin.site.register(SalSetting)
admin.site.register(UpdateHistory)
admin.site.register(UpdateHistoryItem)
admin.site.register(MachineDetailPlugin)
## Instruction:
Sort registrations. Separate classes of imports. Add API key display.
## Code After:
from django.contrib import admin
from server.models import *
class ApiKeyAdmin(admin.ModelAdmin):
list_display = ('name', 'public_key', 'private_key')
class MachineAdmin(admin.ModelAdmin):
list_display = ('hostname', 'serial')
class MachineGroupAdmin(admin.ModelAdmin):
readonly_fields = ('key',)
admin.site.register(ApiKey, ApiKeyAdmin)
admin.site.register(BusinessUnit)
admin.site.register(Condition)
admin.site.register(Fact)
admin.site.register(HistoricalFact)
admin.site.register(InstalledUpdate)
admin.site.register(Machine, MachineAdmin)
admin.site.register(MachineDetailPlugin)
admin.site.register(MachineGroup, MachineGroupAdmin)
# admin.site.register(OSQueryColumn)
# admin.site.register(OSQueryResult)
admin.site.register(PendingAppleUpdate)
admin.site.register(PendingUpdate)
admin.site.register(Plugin)
admin.site.register(PluginScriptRow)
admin.site.register(PluginScriptSubmission)
admin.site.register(Report)
admin.site.register(SalSetting)
admin.site.register(UpdateHistory)
admin.site.register(UpdateHistoryItem)
admin.site.register(UserProfile)
|
from django.contrib import admin
+
from server.models import *
+
+
+ class ApiKeyAdmin(admin.ModelAdmin):
+ list_display = ('name', 'public_key', 'private_key')
+
+
+ class MachineAdmin(admin.ModelAdmin):
+ list_display = ('hostname', 'serial')
class MachineGroupAdmin(admin.ModelAdmin):
readonly_fields = ('key',)
+ admin.site.register(ApiKey, ApiKeyAdmin)
- class MachineAdmin(admin.ModelAdmin):
- list_display = ('hostname', 'serial')
-
-
- admin.site.register(UserProfile)
admin.site.register(BusinessUnit)
+ admin.site.register(Condition)
+ admin.site.register(Fact)
+ admin.site.register(HistoricalFact)
+ admin.site.register(InstalledUpdate)
+ admin.site.register(Machine, MachineAdmin)
+ admin.site.register(MachineDetailPlugin)
admin.site.register(MachineGroup, MachineGroupAdmin)
- admin.site.register(Machine, MachineAdmin)
+ # admin.site.register(OSQueryColumn)
+ # admin.site.register(OSQueryResult)
+ admin.site.register(PendingAppleUpdate)
+ admin.site.register(PendingUpdate)
- admin.site.register(Fact)
? ^^^^
+ admin.site.register(Plugin)
? ^^^^^^
+ admin.site.register(PluginScriptRow)
admin.site.register(PluginScriptSubmission)
- admin.site.register(PluginScriptRow)
- admin.site.register(HistoricalFact)
- admin.site.register(Condition)
- admin.site.register(PendingUpdate)
- admin.site.register(InstalledUpdate)
- admin.site.register(PendingAppleUpdate)
- admin.site.register(ApiKey)
- admin.site.register(Plugin)
admin.site.register(Report)
- # admin.site.register(OSQueryResult)
- # admin.site.register(OSQueryColumn)
admin.site.register(SalSetting)
admin.site.register(UpdateHistory)
admin.site.register(UpdateHistoryItem)
- admin.site.register(MachineDetailPlugin)
+ admin.site.register(UserProfile)
|
ff0631c625cda7c1aac3d86cbc7074a996ef0fc1
|
powerline/bindings/bar/powerline-bar.py
|
powerline/bindings/bar/powerline-bar.py
|
from __future__ import (unicode_literals, division, absolute_import, print_function)
import sys
import time
from threading import Lock
from argparse import ArgumentParser
from powerline import Powerline
from powerline.lib.monotonic import monotonic
from powerline.lib.encoding import get_unicode_writer
if __name__ == '__main__':
parser = ArgumentParser(description='Powerline BAR bindings.')
parser.add_argument(
'--i3', action='store_true',
help='Subscribe for i3 events.'
)
args = parser.parse_args()
powerline = Powerline('wm', renderer_module='bar')
powerline.update_renderer()
interval = 0.5
lock = Lock()
write = get_unicode_writer(encoding='utf-8')
def render(event=None, data=None, sub=None):
global lock
with lock:
write(powerline.render())
write('\n')
sys.stdout.flush()
if args.i3:
import i3
sub = i3.Subscription(render, 'workspace')
while True:
start_time = monotonic()
render()
time.sleep(max(interval - (monotonic() - start_time), 0.1))
|
from __future__ import (unicode_literals, division, absolute_import, print_function)
import sys
import time
from threading import Lock
from argparse import ArgumentParser
from powerline import Powerline
from powerline.lib.monotonic import monotonic
from powerline.lib.encoding import get_unicode_writer
class BarPowerline(Powerline):
get_encoding = staticmethod(lambda: 'utf-8')
def init(self):
super(BarPowerline, self).init(ext='wm', renderer_module='bar')
def render(event=None, data=None, sub=None):
global lock
with lock:
write(powerline.render())
write('\n')
sys.stdout.flush()
if __name__ == '__main__':
parser = ArgumentParser(description='Powerline BAR bindings.')
parser.add_argument(
'--i3', action='store_true',
help='Subscribe for i3 events.'
)
args = parser.parse_args()
powerline = BarPowerline()
interval = 0.5
lock = Lock()
write = get_unicode_writer(encoding='utf-8')
if args.i3:
import i3
sub = i3.Subscription(render, 'workspace')
while True:
start_time = monotonic()
render()
time.sleep(max(interval - (monotonic() - start_time), 0.1))
|
Make sure powerline class knows that it will use UTF-8
|
Make sure powerline class knows that it will use UTF-8
|
Python
|
mit
|
darac/powerline,darac/powerline,junix/powerline,dragon788/powerline,kenrachynski/powerline,junix/powerline,areteix/powerline,russellb/powerline,seanfisk/powerline,Liangjianghao/powerline,lukw00/powerline,xxxhycl2010/powerline,bezhermoso/powerline,bartvm/powerline,EricSB/powerline,cyrixhero/powerline,DoctorJellyface/powerline,blindFS/powerline,DoctorJellyface/powerline,Liangjianghao/powerline,areteix/powerline,DoctorJellyface/powerline,xxxhycl2010/powerline,dragon788/powerline,EricSB/powerline,kenrachynski/powerline,s0undt3ch/powerline,Liangjianghao/powerline,prvnkumar/powerline,seanfisk/powerline,S0lll0s/powerline,bezhermoso/powerline,bezhermoso/powerline,S0lll0s/powerline,junix/powerline,lukw00/powerline,lukw00/powerline,russellb/powerline,bartvm/powerline,QuLogic/powerline,russellb/powerline,xfumihiro/powerline,IvanAli/powerline,areteix/powerline,cyrixhero/powerline,IvanAli/powerline,s0undt3ch/powerline,seanfisk/powerline,s0undt3ch/powerline,darac/powerline,EricSB/powerline,Luffin/powerline,bartvm/powerline,cyrixhero/powerline,S0lll0s/powerline,xfumihiro/powerline,xxxhycl2010/powerline,Luffin/powerline,IvanAli/powerline,QuLogic/powerline,dragon788/powerline,blindFS/powerline,kenrachynski/powerline,QuLogic/powerline,xfumihiro/powerline,prvnkumar/powerline,prvnkumar/powerline,blindFS/powerline,Luffin/powerline
|
from __future__ import (unicode_literals, division, absolute_import, print_function)
import sys
import time
from threading import Lock
from argparse import ArgumentParser
from powerline import Powerline
from powerline.lib.monotonic import monotonic
from powerline.lib.encoding import get_unicode_writer
+ class BarPowerline(Powerline):
+ get_encoding = staticmethod(lambda: 'utf-8')
+
+ def init(self):
+ super(BarPowerline, self).init(ext='wm', renderer_module='bar')
+
+
+ def render(event=None, data=None, sub=None):
+ global lock
+ with lock:
+ write(powerline.render())
+ write('\n')
+ sys.stdout.flush()
+
+
if __name__ == '__main__':
parser = ArgumentParser(description='Powerline BAR bindings.')
parser.add_argument(
'--i3', action='store_true',
help='Subscribe for i3 events.'
)
args = parser.parse_args()
+ powerline = BarPowerline()
- powerline = Powerline('wm', renderer_module='bar')
- powerline.update_renderer()
interval = 0.5
lock = Lock()
write = get_unicode_writer(encoding='utf-8')
-
- def render(event=None, data=None, sub=None):
- global lock
- with lock:
- write(powerline.render())
- write('\n')
- sys.stdout.flush()
if args.i3:
import i3
sub = i3.Subscription(render, 'workspace')
while True:
start_time = monotonic()
render()
time.sleep(max(interval - (monotonic() - start_time), 0.1))
|
Make sure powerline class knows that it will use UTF-8
|
## Code Before:
from __future__ import (unicode_literals, division, absolute_import, print_function)
import sys
import time
from threading import Lock
from argparse import ArgumentParser
from powerline import Powerline
from powerline.lib.monotonic import monotonic
from powerline.lib.encoding import get_unicode_writer
if __name__ == '__main__':
parser = ArgumentParser(description='Powerline BAR bindings.')
parser.add_argument(
'--i3', action='store_true',
help='Subscribe for i3 events.'
)
args = parser.parse_args()
powerline = Powerline('wm', renderer_module='bar')
powerline.update_renderer()
interval = 0.5
lock = Lock()
write = get_unicode_writer(encoding='utf-8')
def render(event=None, data=None, sub=None):
global lock
with lock:
write(powerline.render())
write('\n')
sys.stdout.flush()
if args.i3:
import i3
sub = i3.Subscription(render, 'workspace')
while True:
start_time = monotonic()
render()
time.sleep(max(interval - (monotonic() - start_time), 0.1))
## Instruction:
Make sure powerline class knows that it will use UTF-8
## Code After:
from __future__ import (unicode_literals, division, absolute_import, print_function)
import sys
import time
from threading import Lock
from argparse import ArgumentParser
from powerline import Powerline
from powerline.lib.monotonic import monotonic
from powerline.lib.encoding import get_unicode_writer
class BarPowerline(Powerline):
get_encoding = staticmethod(lambda: 'utf-8')
def init(self):
super(BarPowerline, self).init(ext='wm', renderer_module='bar')
def render(event=None, data=None, sub=None):
global lock
with lock:
write(powerline.render())
write('\n')
sys.stdout.flush()
if __name__ == '__main__':
parser = ArgumentParser(description='Powerline BAR bindings.')
parser.add_argument(
'--i3', action='store_true',
help='Subscribe for i3 events.'
)
args = parser.parse_args()
powerline = BarPowerline()
interval = 0.5
lock = Lock()
write = get_unicode_writer(encoding='utf-8')
if args.i3:
import i3
sub = i3.Subscription(render, 'workspace')
while True:
start_time = monotonic()
render()
time.sleep(max(interval - (monotonic() - start_time), 0.1))
|
from __future__ import (unicode_literals, division, absolute_import, print_function)
import sys
import time
from threading import Lock
from argparse import ArgumentParser
from powerline import Powerline
from powerline.lib.monotonic import monotonic
from powerline.lib.encoding import get_unicode_writer
+ class BarPowerline(Powerline):
+ get_encoding = staticmethod(lambda: 'utf-8')
+
+ def init(self):
+ super(BarPowerline, self).init(ext='wm', renderer_module='bar')
+
+
+ def render(event=None, data=None, sub=None):
+ global lock
+ with lock:
+ write(powerline.render())
+ write('\n')
+ sys.stdout.flush()
+
+
if __name__ == '__main__':
parser = ArgumentParser(description='Powerline BAR bindings.')
parser.add_argument(
'--i3', action='store_true',
help='Subscribe for i3 events.'
)
args = parser.parse_args()
+ powerline = BarPowerline()
- powerline = Powerline('wm', renderer_module='bar')
- powerline.update_renderer()
interval = 0.5
lock = Lock()
write = get_unicode_writer(encoding='utf-8')
-
- def render(event=None, data=None, sub=None):
- global lock
- with lock:
- write(powerline.render())
- write('\n')
- sys.stdout.flush()
if args.i3:
import i3
sub = i3.Subscription(render, 'workspace')
while True:
start_time = monotonic()
render()
time.sleep(max(interval - (monotonic() - start_time), 0.1))
|
1c9ce82c954ab206ec1b5387ef5cb49ab9c96208
|
additional_scripts/image-get-datastore-list.py
|
additional_scripts/image-get-datastore-list.py
|
import os
import xml.etree.ElementTree as ET
oneimage = os.popen("oneimage list --xml")
tree = ET.fromstring(oneimage.read())
#print(tree.tag)
for image in tree.findall('./IMAGE'):
imageid = image.find('./ID')
print(imageid.text)
for vmid in image.findall('./VMS/ID'):
print(imageid.text+" "+vmid.text)
|
import os
import xml.etree.ElementTree as ET
oneimage = os.popen("oneimage list --xml")
tree = ET.fromstring(oneimage.read())
#print(tree.tag)
for image in tree.findall('./IMAGE'):
imageid = image.find('./ID')
print(imageid.text)
is_persistent = image.find('./PERSISTENT')
if is_persistent.text == '1':
continue
for vmid in image.findall('./VMS/ID'):
print(imageid.text+" "+vmid.text)
|
Exclude persistent images in list
|
Exclude persistent images in list
|
Python
|
apache-2.0
|
zhtlancer/addon-iscsi,zhtlancer/addon-iscsi
|
import os
import xml.etree.ElementTree as ET
oneimage = os.popen("oneimage list --xml")
tree = ET.fromstring(oneimage.read())
#print(tree.tag)
for image in tree.findall('./IMAGE'):
imageid = image.find('./ID')
print(imageid.text)
+ is_persistent = image.find('./PERSISTENT')
+ if is_persistent.text == '1':
+ continue
for vmid in image.findall('./VMS/ID'):
print(imageid.text+" "+vmid.text)
|
Exclude persistent images in list
|
## Code Before:
import os
import xml.etree.ElementTree as ET
oneimage = os.popen("oneimage list --xml")
tree = ET.fromstring(oneimage.read())
#print(tree.tag)
for image in tree.findall('./IMAGE'):
imageid = image.find('./ID')
print(imageid.text)
for vmid in image.findall('./VMS/ID'):
print(imageid.text+" "+vmid.text)
## Instruction:
Exclude persistent images in list
## Code After:
import os
import xml.etree.ElementTree as ET
oneimage = os.popen("oneimage list --xml")
tree = ET.fromstring(oneimage.read())
#print(tree.tag)
for image in tree.findall('./IMAGE'):
imageid = image.find('./ID')
print(imageid.text)
is_persistent = image.find('./PERSISTENT')
if is_persistent.text == '1':
continue
for vmid in image.findall('./VMS/ID'):
print(imageid.text+" "+vmid.text)
|
import os
import xml.etree.ElementTree as ET
oneimage = os.popen("oneimage list --xml")
tree = ET.fromstring(oneimage.read())
#print(tree.tag)
for image in tree.findall('./IMAGE'):
imageid = image.find('./ID')
print(imageid.text)
+ is_persistent = image.find('./PERSISTENT')
+ if is_persistent.text == '1':
+ continue
for vmid in image.findall('./VMS/ID'):
print(imageid.text+" "+vmid.text)
|
d6461896dec112caad81490e1a6d055a3d4c9a95
|
db.py
|
db.py
|
from pymongo import MongoClient
from settings import *
client = MongoClient(MONGO_HOST, MONGO_PORT)
db = client[MONGO_DB]
|
from pymongo import mongo_client
from pymongo import database
from pymongo import collection
from settings import *
class ModuleMongoClient(mongo_client.MongoClient):
def __getattr__(self, name):
attr = super(ModuleMongoClient, self).__getattr__(name)
if isinstance(attr, database.Database):
return Database(self, name)
return attr
class ModuleDatabase(database.Database):
def __getattr__(self, name):
attr = super(ModuleDatabase, self).__getattr__(name)
if isinstance(attr, collection.Collection):
return ModuleCollection(self, name)
return attr
class ModuleCollection(collection.Collection):
def __init__(self, database, name, create=False, **kwargs):
_name = 'module_%s_%s' % (self.__class__.__name__, name)
super(ModuleCollection, self).__init__(database=database,
name=_name,
create=create)
client = mongo_client.MongoClient(MONGO_HOST, MONGO_PORT)
db = client[MONGO_DB]
module_client = ModuleMongoClient(MONGO_HOST, MONGO_PORT)
module_db = client[MONGO_DB]
|
Add custom wrapper code to pymongo for Modules
|
Add custom wrapper code to pymongo for Modules
|
Python
|
mit
|
billyvg/piebot
|
- from pymongo import MongoClient
+ from pymongo import mongo_client
+ from pymongo import database
+ from pymongo import collection
from settings import *
+ class ModuleMongoClient(mongo_client.MongoClient):
+ def __getattr__(self, name):
+ attr = super(ModuleMongoClient, self).__getattr__(name)
+ if isinstance(attr, database.Database):
+ return Database(self, name)
+ return attr
+
+ class ModuleDatabase(database.Database):
+ def __getattr__(self, name):
+ attr = super(ModuleDatabase, self).__getattr__(name)
+ if isinstance(attr, collection.Collection):
+ return ModuleCollection(self, name)
+ return attr
+
+ class ModuleCollection(collection.Collection):
+ def __init__(self, database, name, create=False, **kwargs):
+ _name = 'module_%s_%s' % (self.__class__.__name__, name)
+ super(ModuleCollection, self).__init__(database=database,
+ name=_name,
+ create=create)
+
- client = MongoClient(MONGO_HOST, MONGO_PORT)
+ client = mongo_client.MongoClient(MONGO_HOST, MONGO_PORT)
db = client[MONGO_DB]
+ module_client = ModuleMongoClient(MONGO_HOST, MONGO_PORT)
+ module_db = client[MONGO_DB]
|
Add custom wrapper code to pymongo for Modules
|
## Code Before:
from pymongo import MongoClient
from settings import *
client = MongoClient(MONGO_HOST, MONGO_PORT)
db = client[MONGO_DB]
## Instruction:
Add custom wrapper code to pymongo for Modules
## Code After:
from pymongo import mongo_client
from pymongo import database
from pymongo import collection
from settings import *
class ModuleMongoClient(mongo_client.MongoClient):
def __getattr__(self, name):
attr = super(ModuleMongoClient, self).__getattr__(name)
if isinstance(attr, database.Database):
return Database(self, name)
return attr
class ModuleDatabase(database.Database):
def __getattr__(self, name):
attr = super(ModuleDatabase, self).__getattr__(name)
if isinstance(attr, collection.Collection):
return ModuleCollection(self, name)
return attr
class ModuleCollection(collection.Collection):
def __init__(self, database, name, create=False, **kwargs):
_name = 'module_%s_%s' % (self.__class__.__name__, name)
super(ModuleCollection, self).__init__(database=database,
name=_name,
create=create)
client = mongo_client.MongoClient(MONGO_HOST, MONGO_PORT)
db = client[MONGO_DB]
module_client = ModuleMongoClient(MONGO_HOST, MONGO_PORT)
module_db = client[MONGO_DB]
|
- from pymongo import MongoClient
? ^ ^
+ from pymongo import mongo_client
? ^ ^^
+ from pymongo import database
+ from pymongo import collection
from settings import *
+ class ModuleMongoClient(mongo_client.MongoClient):
+ def __getattr__(self, name):
+ attr = super(ModuleMongoClient, self).__getattr__(name)
+ if isinstance(attr, database.Database):
+ return Database(self, name)
+ return attr
+
+ class ModuleDatabase(database.Database):
+ def __getattr__(self, name):
+ attr = super(ModuleDatabase, self).__getattr__(name)
+ if isinstance(attr, collection.Collection):
+ return ModuleCollection(self, name)
+ return attr
+
+ class ModuleCollection(collection.Collection):
+ def __init__(self, database, name, create=False, **kwargs):
+ _name = 'module_%s_%s' % (self.__class__.__name__, name)
+ super(ModuleCollection, self).__init__(database=database,
+ name=_name,
+ create=create)
+
- client = MongoClient(MONGO_HOST, MONGO_PORT)
+ client = mongo_client.MongoClient(MONGO_HOST, MONGO_PORT)
? +++++++++++++
db = client[MONGO_DB]
+ module_client = ModuleMongoClient(MONGO_HOST, MONGO_PORT)
+ module_db = client[MONGO_DB]
|
5606e8d56b7f6441eeb121795b0f400d65858b3b
|
tests/integration/test_fanout.py
|
tests/integration/test_fanout.py
|
import uuid
import diesel
from diesel.util.queue import Fanout
class FanoutHarness(object):
def setup(self):
self.fan = Fanout()
self.subscriber_data = {}
for x in xrange(10):
diesel.fork(self.subscriber)
diesel.sleep()
for i in xrange(10):
self.fan.pub(i)
diesel.sleep()
def subscriber(self):
self.subscriber_data[uuid.uuid4()] = data = []
with self.fan.sub() as q:
for i in xrange(10):
data.append(q.get())
class TestFanout(FanoutHarness):
def test_all_subscribers_get_the_published_messages(self):
assert len(self.subscriber_data) == 10
for values in self.subscriber_data.itervalues():
assert values == range(10), values
def test_sub_is_removed_after_it_is_done(self):
assert not self.fan.subs
|
import uuid
import diesel
from diesel.util.queue import Fanout
from diesel.util.event import Countdown
class FanoutHarness(object):
def setup(self):
self.done = Countdown(10)
self.fan = Fanout()
self.subscriber_data = {}
for x in xrange(10):
diesel.fork(self.subscriber)
diesel.sleep()
for i in xrange(10):
self.fan.pub(i)
self.done.wait()
def subscriber(self):
self.subscriber_data[uuid.uuid4()] = data = []
with self.fan.sub() as q:
for i in xrange(10):
data.append(q.get())
self.done.tick()
class TestFanout(FanoutHarness):
def test_all_subscribers_get_the_published_messages(self):
assert len(self.subscriber_data) == 10
for values in self.subscriber_data.itervalues():
assert values == range(10), values
def test_sub_is_removed_after_it_is_done(self):
assert not self.fan.subs
|
Add "done" tracking for fanout test.
|
Add "done" tracking for fanout test.
|
Python
|
bsd-3-clause
|
dieseldev/diesel
|
import uuid
import diesel
from diesel.util.queue import Fanout
+ from diesel.util.event import Countdown
class FanoutHarness(object):
def setup(self):
+ self.done = Countdown(10)
self.fan = Fanout()
self.subscriber_data = {}
for x in xrange(10):
diesel.fork(self.subscriber)
diesel.sleep()
for i in xrange(10):
self.fan.pub(i)
- diesel.sleep()
+ self.done.wait()
def subscriber(self):
self.subscriber_data[uuid.uuid4()] = data = []
with self.fan.sub() as q:
for i in xrange(10):
data.append(q.get())
+ self.done.tick()
class TestFanout(FanoutHarness):
def test_all_subscribers_get_the_published_messages(self):
assert len(self.subscriber_data) == 10
for values in self.subscriber_data.itervalues():
assert values == range(10), values
def test_sub_is_removed_after_it_is_done(self):
assert not self.fan.subs
|
Add "done" tracking for fanout test.
|
## Code Before:
import uuid
import diesel
from diesel.util.queue import Fanout
class FanoutHarness(object):
def setup(self):
self.fan = Fanout()
self.subscriber_data = {}
for x in xrange(10):
diesel.fork(self.subscriber)
diesel.sleep()
for i in xrange(10):
self.fan.pub(i)
diesel.sleep()
def subscriber(self):
self.subscriber_data[uuid.uuid4()] = data = []
with self.fan.sub() as q:
for i in xrange(10):
data.append(q.get())
class TestFanout(FanoutHarness):
def test_all_subscribers_get_the_published_messages(self):
assert len(self.subscriber_data) == 10
for values in self.subscriber_data.itervalues():
assert values == range(10), values
def test_sub_is_removed_after_it_is_done(self):
assert not self.fan.subs
## Instruction:
Add "done" tracking for fanout test.
## Code After:
import uuid
import diesel
from diesel.util.queue import Fanout
from diesel.util.event import Countdown
class FanoutHarness(object):
def setup(self):
self.done = Countdown(10)
self.fan = Fanout()
self.subscriber_data = {}
for x in xrange(10):
diesel.fork(self.subscriber)
diesel.sleep()
for i in xrange(10):
self.fan.pub(i)
self.done.wait()
def subscriber(self):
self.subscriber_data[uuid.uuid4()] = data = []
with self.fan.sub() as q:
for i in xrange(10):
data.append(q.get())
self.done.tick()
class TestFanout(FanoutHarness):
def test_all_subscribers_get_the_published_messages(self):
assert len(self.subscriber_data) == 10
for values in self.subscriber_data.itervalues():
assert values == range(10), values
def test_sub_is_removed_after_it_is_done(self):
assert not self.fan.subs
|
import uuid
import diesel
from diesel.util.queue import Fanout
+ from diesel.util.event import Countdown
class FanoutHarness(object):
def setup(self):
+ self.done = Countdown(10)
self.fan = Fanout()
self.subscriber_data = {}
for x in xrange(10):
diesel.fork(self.subscriber)
diesel.sleep()
for i in xrange(10):
self.fan.pub(i)
- diesel.sleep()
+ self.done.wait()
def subscriber(self):
self.subscriber_data[uuid.uuid4()] = data = []
with self.fan.sub() as q:
for i in xrange(10):
data.append(q.get())
+ self.done.tick()
class TestFanout(FanoutHarness):
def test_all_subscribers_get_the_published_messages(self):
assert len(self.subscriber_data) == 10
for values in self.subscriber_data.itervalues():
assert values == range(10), values
def test_sub_is_removed_after_it_is_done(self):
assert not self.fan.subs
|
690c70db9717bcc538db4e35597145870106844f
|
versioning/signals.py
|
versioning/signals.py
|
import sha
from django.contrib.contenttypes.models import ContentType
from versioning import _registry
from versioning.diff import unified_diff
from versioning.models import Revision
def pre_save(instance, **kwargs):
"""
"""
model = kwargs["sender"]
fields = _registry[model]
original = model._default_manager.get(pk=instance.pk)
ct = ContentType.objects.get_for_model(model)
diff = []
for field in fields:
original_data = getattr(original, field)
new_data = getattr(instance, field)
data_diff = unified_diff(original_data.split("\n"),
new_data.split("\n"), context=3)
diff.extend(["--- %s.%s" % (model.__name__, field),
"+++ %s.%s" % (model.__name__, field)])
for line in data_diff:
diff.append(line)
delta = "\n".join(diff)
sha1 = sha.new(delta)
rev = Revision(sha1=sha1.hexdigest(), object_pk=instance.pk,
content_type=ct, delta=delta)
rev.save()
|
import sha
from django.contrib.contenttypes.models import ContentType
from versioning import _registry
from versioning.diff import unified_diff
from versioning.models import Revision
def pre_save(instance, **kwargs):
"""
"""
model = kwargs["sender"]
fields = _registry[model]
original = model._default_manager.get(pk=instance.pk)
ct = ContentType.objects.get_for_model(model)
diff = []
for field in fields:
original_data = getattr(original, field)
new_data = getattr(instance, field)
data_diff = unified_diff(original_data.splitlines(),
new_data.splitlines(), context=3)
diff.extend(["--- %s.%s" % (model.__name__, field),
"+++ %s.%s" % (model.__name__, field)])
for line in data_diff:
diff.append(line)
delta = "\n".join(diff)
sha1 = sha.new(delta)
rev = Revision(sha1=sha1.hexdigest(), object_pk=instance.pk,
content_type=ct, delta=delta)
rev.save()
|
Use splitlines instead of hard-coding the line endings.
|
Use splitlines instead of hard-coding the line endings.
git-svn-id: 15b99a5ef70b6649222be30eb13433ba2eb40757@14 cdb1d5cb-5653-0410-9e46-1b5f511687a6
|
Python
|
bsd-3-clause
|
luzfcb/django-versioning,luzfcb/django-versioning
|
import sha
from django.contrib.contenttypes.models import ContentType
from versioning import _registry
from versioning.diff import unified_diff
from versioning.models import Revision
def pre_save(instance, **kwargs):
"""
"""
model = kwargs["sender"]
fields = _registry[model]
original = model._default_manager.get(pk=instance.pk)
ct = ContentType.objects.get_for_model(model)
diff = []
for field in fields:
original_data = getattr(original, field)
new_data = getattr(instance, field)
- data_diff = unified_diff(original_data.split("\n"),
+ data_diff = unified_diff(original_data.splitlines(),
- new_data.split("\n"), context=3)
+ new_data.splitlines(), context=3)
diff.extend(["--- %s.%s" % (model.__name__, field),
"+++ %s.%s" % (model.__name__, field)])
for line in data_diff:
diff.append(line)
delta = "\n".join(diff)
sha1 = sha.new(delta)
rev = Revision(sha1=sha1.hexdigest(), object_pk=instance.pk,
content_type=ct, delta=delta)
rev.save()
|
Use splitlines instead of hard-coding the line endings.
|
## Code Before:
import sha
from django.contrib.contenttypes.models import ContentType
from versioning import _registry
from versioning.diff import unified_diff
from versioning.models import Revision
def pre_save(instance, **kwargs):
"""
"""
model = kwargs["sender"]
fields = _registry[model]
original = model._default_manager.get(pk=instance.pk)
ct = ContentType.objects.get_for_model(model)
diff = []
for field in fields:
original_data = getattr(original, field)
new_data = getattr(instance, field)
data_diff = unified_diff(original_data.split("\n"),
new_data.split("\n"), context=3)
diff.extend(["--- %s.%s" % (model.__name__, field),
"+++ %s.%s" % (model.__name__, field)])
for line in data_diff:
diff.append(line)
delta = "\n".join(diff)
sha1 = sha.new(delta)
rev = Revision(sha1=sha1.hexdigest(), object_pk=instance.pk,
content_type=ct, delta=delta)
rev.save()
## Instruction:
Use splitlines instead of hard-coding the line endings.
## Code After:
import sha
from django.contrib.contenttypes.models import ContentType
from versioning import _registry
from versioning.diff import unified_diff
from versioning.models import Revision
def pre_save(instance, **kwargs):
"""
"""
model = kwargs["sender"]
fields = _registry[model]
original = model._default_manager.get(pk=instance.pk)
ct = ContentType.objects.get_for_model(model)
diff = []
for field in fields:
original_data = getattr(original, field)
new_data = getattr(instance, field)
data_diff = unified_diff(original_data.splitlines(),
new_data.splitlines(), context=3)
diff.extend(["--- %s.%s" % (model.__name__, field),
"+++ %s.%s" % (model.__name__, field)])
for line in data_diff:
diff.append(line)
delta = "\n".join(diff)
sha1 = sha.new(delta)
rev = Revision(sha1=sha1.hexdigest(), object_pk=instance.pk,
content_type=ct, delta=delta)
rev.save()
|
import sha
from django.contrib.contenttypes.models import ContentType
from versioning import _registry
from versioning.diff import unified_diff
from versioning.models import Revision
def pre_save(instance, **kwargs):
"""
"""
model = kwargs["sender"]
fields = _registry[model]
original = model._default_manager.get(pk=instance.pk)
ct = ContentType.objects.get_for_model(model)
diff = []
for field in fields:
original_data = getattr(original, field)
new_data = getattr(instance, field)
- data_diff = unified_diff(original_data.split("\n"),
? ----
+ data_diff = unified_diff(original_data.splitlines(),
? +++++
- new_data.split("\n"), context=3)
? ----
+ new_data.splitlines(), context=3)
? +++++
diff.extend(["--- %s.%s" % (model.__name__, field),
"+++ %s.%s" % (model.__name__, field)])
for line in data_diff:
diff.append(line)
delta = "\n".join(diff)
sha1 = sha.new(delta)
rev = Revision(sha1=sha1.hexdigest(), object_pk=instance.pk,
content_type=ct, delta=delta)
rev.save()
|
596e850189e8c8590ac4b8c401de5930ce711929
|
puppetboard/default_settings.py
|
puppetboard/default_settings.py
|
import os
PUPPETDB_HOST = 'localhost'
PUPPETDB_PORT = 8080
PUPPETDB_SSL_VERIFY = True
PUPPETDB_KEY = None
PUPPETDB_CERT = None
PUPPETDB_TIMEOUT = 20
DEFAULT_ENVIRONMENT = 'production'
SECRET_KEY = os.urandom(24)
DEV_LISTEN_HOST = '127.0.0.1'
DEV_LISTEN_PORT = 5000
DEV_COFFEE_LOCATION = 'coffee'
UNRESPONSIVE_HOURS = 2
ENABLE_QUERY = True
LOCALISE_TIMESTAMP = True
LOGLEVEL = 'info'
REPORTS_COUNT = 10
OFFLINE_MODE = False
ENABLE_CATALOG = False
GRAPH_FACTS = ['architecture',
'domain',
'lsbcodename',
'lsbdistcodename',
'lsbdistid',
'lsbdistrelease',
'lsbmajdistrelease',
'netmask',
'osfamily',
'puppetversion',
'processorcount']
INVENTORY_FACTS = [ ('Hostname', 'fqdn' ),
('IP Address', 'ipaddress' ),
('OS', 'lsbdistdescription'),
('Architecture', 'hardwaremodel' ),
('Kernel Version', 'kernelrelease' ),
('Puppet Version', 'puppetversion' ), ]
REFRESH_RATE = 30
|
import os
PUPPETDB_HOST = 'localhost'
PUPPETDB_PORT = 8080
PUPPETDB_SSL_VERIFY = True
PUPPETDB_KEY = None
PUPPETDB_CERT = None
PUPPETDB_TIMEOUT = 20
DEFAULT_ENVIRONMENT = 'production'
SECRET_KEY = os.urandom(24)
DEV_LISTEN_HOST = '127.0.0.1'
DEV_LISTEN_PORT = 5000
DEV_COFFEE_LOCATION = 'coffee'
UNRESPONSIVE_HOURS = 2
ENABLE_QUERY = True
LOCALISE_TIMESTAMP = True
LOGLEVEL = 'info'
REPORTS_COUNT = 10
OFFLINE_MODE = False
ENABLE_CATALOG = False
GRAPH_FACTS = ['architecture',
'clientversion',
'domain',
'lsbcodename',
'lsbdistcodename',
'lsbdistid',
'lsbdistrelease',
'lsbmajdistrelease',
'netmask',
'osfamily',
'puppetversion',
'processorcount']
INVENTORY_FACTS = [ ('Hostname', 'fqdn' ),
('IP Address', 'ipaddress' ),
('OS', 'lsbdistdescription'),
('Architecture', 'hardwaremodel' ),
('Kernel Version', 'kernelrelease' ),
('Puppet Version', 'puppetversion' ), ]
REFRESH_RATE = 30
|
Add clientversion to graphing facts
|
Add clientversion to graphing facts
|
Python
|
apache-2.0
|
puppet-community/puppetboard,johnzimm/xx-puppetboard,tparkercbn/puppetboard,mterzo/puppetboard,stoyansbg/puppetboard,mterzo/puppetboard,holstvoogd/puppetboard,johnzimm/xx-puppetboard,johnzimm/puppetboard,tparkercbn/puppetboard,mterzo/puppetboard,stoyansbg/puppetboard,james-powis/puppetboard,tparkercbn/puppetboard,voxpupuli/puppetboard,voxpupuli/puppetboard,voxpupuli/puppetboard,johnzimm/puppetboard,johnzimm/xx-puppetboard,puppet-community/puppetboard,grandich/puppetboard,holstvoogd/puppetboard,grandich/puppetboard,holstvoogd/puppetboard,grandich/puppetboard,james-powis/puppetboard,james-powis/puppetboard,grandich/puppetboard,johnzimm/xx-puppetboard,stoyansbg/puppetboard,johnzimm/puppetboard,mterzo/puppetboard,tparkercbn/puppetboard,puppet-community/puppetboard
|
import os
PUPPETDB_HOST = 'localhost'
PUPPETDB_PORT = 8080
PUPPETDB_SSL_VERIFY = True
PUPPETDB_KEY = None
PUPPETDB_CERT = None
PUPPETDB_TIMEOUT = 20
DEFAULT_ENVIRONMENT = 'production'
SECRET_KEY = os.urandom(24)
DEV_LISTEN_HOST = '127.0.0.1'
DEV_LISTEN_PORT = 5000
DEV_COFFEE_LOCATION = 'coffee'
UNRESPONSIVE_HOURS = 2
ENABLE_QUERY = True
LOCALISE_TIMESTAMP = True
LOGLEVEL = 'info'
REPORTS_COUNT = 10
OFFLINE_MODE = False
ENABLE_CATALOG = False
GRAPH_FACTS = ['architecture',
+ 'clientversion',
'domain',
'lsbcodename',
'lsbdistcodename',
'lsbdistid',
'lsbdistrelease',
'lsbmajdistrelease',
'netmask',
'osfamily',
'puppetversion',
'processorcount']
INVENTORY_FACTS = [ ('Hostname', 'fqdn' ),
('IP Address', 'ipaddress' ),
('OS', 'lsbdistdescription'),
('Architecture', 'hardwaremodel' ),
('Kernel Version', 'kernelrelease' ),
('Puppet Version', 'puppetversion' ), ]
REFRESH_RATE = 30
|
Add clientversion to graphing facts
|
## Code Before:
import os
PUPPETDB_HOST = 'localhost'
PUPPETDB_PORT = 8080
PUPPETDB_SSL_VERIFY = True
PUPPETDB_KEY = None
PUPPETDB_CERT = None
PUPPETDB_TIMEOUT = 20
DEFAULT_ENVIRONMENT = 'production'
SECRET_KEY = os.urandom(24)
DEV_LISTEN_HOST = '127.0.0.1'
DEV_LISTEN_PORT = 5000
DEV_COFFEE_LOCATION = 'coffee'
UNRESPONSIVE_HOURS = 2
ENABLE_QUERY = True
LOCALISE_TIMESTAMP = True
LOGLEVEL = 'info'
REPORTS_COUNT = 10
OFFLINE_MODE = False
ENABLE_CATALOG = False
GRAPH_FACTS = ['architecture',
'domain',
'lsbcodename',
'lsbdistcodename',
'lsbdistid',
'lsbdistrelease',
'lsbmajdistrelease',
'netmask',
'osfamily',
'puppetversion',
'processorcount']
INVENTORY_FACTS = [ ('Hostname', 'fqdn' ),
('IP Address', 'ipaddress' ),
('OS', 'lsbdistdescription'),
('Architecture', 'hardwaremodel' ),
('Kernel Version', 'kernelrelease' ),
('Puppet Version', 'puppetversion' ), ]
REFRESH_RATE = 30
## Instruction:
Add clientversion to graphing facts
## Code After:
import os
PUPPETDB_HOST = 'localhost'
PUPPETDB_PORT = 8080
PUPPETDB_SSL_VERIFY = True
PUPPETDB_KEY = None
PUPPETDB_CERT = None
PUPPETDB_TIMEOUT = 20
DEFAULT_ENVIRONMENT = 'production'
SECRET_KEY = os.urandom(24)
DEV_LISTEN_HOST = '127.0.0.1'
DEV_LISTEN_PORT = 5000
DEV_COFFEE_LOCATION = 'coffee'
UNRESPONSIVE_HOURS = 2
ENABLE_QUERY = True
LOCALISE_TIMESTAMP = True
LOGLEVEL = 'info'
REPORTS_COUNT = 10
OFFLINE_MODE = False
ENABLE_CATALOG = False
GRAPH_FACTS = ['architecture',
'clientversion',
'domain',
'lsbcodename',
'lsbdistcodename',
'lsbdistid',
'lsbdistrelease',
'lsbmajdistrelease',
'netmask',
'osfamily',
'puppetversion',
'processorcount']
INVENTORY_FACTS = [ ('Hostname', 'fqdn' ),
('IP Address', 'ipaddress' ),
('OS', 'lsbdistdescription'),
('Architecture', 'hardwaremodel' ),
('Kernel Version', 'kernelrelease' ),
('Puppet Version', 'puppetversion' ), ]
REFRESH_RATE = 30
|
import os
PUPPETDB_HOST = 'localhost'
PUPPETDB_PORT = 8080
PUPPETDB_SSL_VERIFY = True
PUPPETDB_KEY = None
PUPPETDB_CERT = None
PUPPETDB_TIMEOUT = 20
DEFAULT_ENVIRONMENT = 'production'
SECRET_KEY = os.urandom(24)
DEV_LISTEN_HOST = '127.0.0.1'
DEV_LISTEN_PORT = 5000
DEV_COFFEE_LOCATION = 'coffee'
UNRESPONSIVE_HOURS = 2
ENABLE_QUERY = True
LOCALISE_TIMESTAMP = True
LOGLEVEL = 'info'
REPORTS_COUNT = 10
OFFLINE_MODE = False
ENABLE_CATALOG = False
GRAPH_FACTS = ['architecture',
+ 'clientversion',
'domain',
'lsbcodename',
'lsbdistcodename',
'lsbdistid',
'lsbdistrelease',
'lsbmajdistrelease',
'netmask',
'osfamily',
'puppetversion',
'processorcount']
INVENTORY_FACTS = [ ('Hostname', 'fqdn' ),
('IP Address', 'ipaddress' ),
('OS', 'lsbdistdescription'),
('Architecture', 'hardwaremodel' ),
('Kernel Version', 'kernelrelease' ),
('Puppet Version', 'puppetversion' ), ]
REFRESH_RATE = 30
|
e66bd19fc4baae27f40b1b63bdc0a3280d8d25e9
|
src/heap.py
|
src/heap.py
|
class Heap(object):
"""Implements a heap data structure in Python.
The underlying data structure used to hold the data is an array.
"""
__heap = []
def __init__(self, initial=[]):
"""Creates a new heap.
Args:
initial: (Optional): A continguous array containing the data with which to
initialize the new heap.
"""
self.__heap = []
|
class Heap(object):
"""Implements a heap data structure in Python.
The underlying data structure used to hold the data is an array.
"""
__heap = []
def __init__(self, initial=None):
"""Creates a new heap.
Args:
initial: (Optional): A continguous array containing the data with which to
initialize the new heap.
"""
if isinstance(initial, list) or isinstance(initial, tuple):
self.__heap = initial
|
Fix massive bug in initialization
|
Fix massive bug in initialization
|
Python
|
mit
|
DasAllFolks/PyAlgo
|
class Heap(object):
"""Implements a heap data structure in Python.
The underlying data structure used to hold the data is an array.
"""
__heap = []
- def __init__(self, initial=[]):
+ def __init__(self, initial=None):
"""Creates a new heap.
Args:
initial: (Optional): A continguous array containing the data with which to
initialize the new heap.
"""
+ if isinstance(initial, list) or isinstance(initial, tuple):
- self.__heap = []
+ self.__heap = initial
|
Fix massive bug in initialization
|
## Code Before:
class Heap(object):
"""Implements a heap data structure in Python.
The underlying data structure used to hold the data is an array.
"""
__heap = []
def __init__(self, initial=[]):
"""Creates a new heap.
Args:
initial: (Optional): A continguous array containing the data with which to
initialize the new heap.
"""
self.__heap = []
## Instruction:
Fix massive bug in initialization
## Code After:
class Heap(object):
"""Implements a heap data structure in Python.
The underlying data structure used to hold the data is an array.
"""
__heap = []
def __init__(self, initial=None):
"""Creates a new heap.
Args:
initial: (Optional): A continguous array containing the data with which to
initialize the new heap.
"""
if isinstance(initial, list) or isinstance(initial, tuple):
self.__heap = initial
|
class Heap(object):
"""Implements a heap data structure in Python.
The underlying data structure used to hold the data is an array.
"""
__heap = []
- def __init__(self, initial=[]):
? ^^
+ def __init__(self, initial=None):
? ^^^^
"""Creates a new heap.
Args:
initial: (Optional): A continguous array containing the data with which to
initialize the new heap.
"""
+ if isinstance(initial, list) or isinstance(initial, tuple):
- self.__heap = []
? ^^
+ self.__heap = initial
? ++ ^^^^^^^
|
afb58da6ecc11a1c92d230bc2dcbb06464cc4f32
|
percept/workflows/commands/run_flow.py
|
percept/workflows/commands/run_flow.py
|
from percept.management.commands import BaseCommand
from percept.utils.registry import registry, find_in_registry
from percept.workflows.base import NaiveWorkflow
from percept.utils.workflow import WorkflowWrapper, WorkflowLoader
import logging
log = logging.getLogger(__name__)
class Command(BaseCommand):
args = 'config_file'
def command(self, *args, **options):
config_file = args[0]
wrapper = WorkflowWrapper(config_file, NaiveWorkflow)
wrapper.run()
|
from percept.management.commands import BaseCommand
from percept.utils.registry import registry, find_in_registry
from percept.workflows.base import NaiveWorkflow
from percept.utils.workflow import WorkflowWrapper, WorkflowLoader
from optparse import make_option
import IPython
import logging
log = logging.getLogger(__name__)
class Command(BaseCommand):
args = 'config_file'
option_list = BaseCommand.option_list + (make_option('--shell',
help='Whether or not to load a shell afterwards".'),)
def command(self, *args, **options):
config_file = args[0]
wrapper = WorkflowWrapper(config_file, NaiveWorkflow)
wrapper.run()
if '--shell' in options:
ns = {
'flow' : wrapper.workflow,
'tasks' : wrapper.workflow.tasks
}
IPython.embed(user_ns=ns)
|
Add in a way to start a shell using the results of a workflow
|
Add in a way to start a shell using the results of a workflow
|
Python
|
apache-2.0
|
VikParuchuri/percept,VikParuchuri/percept
|
from percept.management.commands import BaseCommand
from percept.utils.registry import registry, find_in_registry
from percept.workflows.base import NaiveWorkflow
from percept.utils.workflow import WorkflowWrapper, WorkflowLoader
+ from optparse import make_option
+ import IPython
import logging
log = logging.getLogger(__name__)
class Command(BaseCommand):
args = 'config_file'
+ option_list = BaseCommand.option_list + (make_option('--shell',
+ help='Whether or not to load a shell afterwards".'),)
+
def command(self, *args, **options):
config_file = args[0]
wrapper = WorkflowWrapper(config_file, NaiveWorkflow)
wrapper.run()
+ if '--shell' in options:
+ ns = {
+ 'flow' : wrapper.workflow,
+ 'tasks' : wrapper.workflow.tasks
+ }
+ IPython.embed(user_ns=ns)
+
+
|
Add in a way to start a shell using the results of a workflow
|
## Code Before:
from percept.management.commands import BaseCommand
from percept.utils.registry import registry, find_in_registry
from percept.workflows.base import NaiveWorkflow
from percept.utils.workflow import WorkflowWrapper, WorkflowLoader
import logging
log = logging.getLogger(__name__)
class Command(BaseCommand):
args = 'config_file'
def command(self, *args, **options):
config_file = args[0]
wrapper = WorkflowWrapper(config_file, NaiveWorkflow)
wrapper.run()
## Instruction:
Add in a way to start a shell using the results of a workflow
## Code After:
from percept.management.commands import BaseCommand
from percept.utils.registry import registry, find_in_registry
from percept.workflows.base import NaiveWorkflow
from percept.utils.workflow import WorkflowWrapper, WorkflowLoader
from optparse import make_option
import IPython
import logging
log = logging.getLogger(__name__)
class Command(BaseCommand):
args = 'config_file'
option_list = BaseCommand.option_list + (make_option('--shell',
help='Whether or not to load a shell afterwards".'),)
def command(self, *args, **options):
config_file = args[0]
wrapper = WorkflowWrapper(config_file, NaiveWorkflow)
wrapper.run()
if '--shell' in options:
ns = {
'flow' : wrapper.workflow,
'tasks' : wrapper.workflow.tasks
}
IPython.embed(user_ns=ns)
|
from percept.management.commands import BaseCommand
from percept.utils.registry import registry, find_in_registry
from percept.workflows.base import NaiveWorkflow
from percept.utils.workflow import WorkflowWrapper, WorkflowLoader
+ from optparse import make_option
+ import IPython
import logging
log = logging.getLogger(__name__)
class Command(BaseCommand):
args = 'config_file'
+ option_list = BaseCommand.option_list + (make_option('--shell',
+ help='Whether or not to load a shell afterwards".'),)
+
def command(self, *args, **options):
config_file = args[0]
wrapper = WorkflowWrapper(config_file, NaiveWorkflow)
wrapper.run()
+ if '--shell' in options:
+ ns = {
+ 'flow' : wrapper.workflow,
+ 'tasks' : wrapper.workflow.tasks
+ }
+
+ IPython.embed(user_ns=ns)
+
|
573055a80ef19f2b743ef3bfc08c40e8738c5bb1
|
libtree/utils.py
|
libtree/utils.py
|
import collections
from copy import deepcopy
def recursive_dict_merge(left, right, first_run=True):
"""
Merge ``right`` into ``left`` and return a new dictionary.
"""
if first_run is True:
left = deepcopy(left)
for key in right:
if key in left:
if isinstance(left[key], dict) and isinstance(right[key], dict):
recursive_dict_merge(left[key], right[key], False)
else:
left[key] = right[key]
else:
left[key] = right[key]
return left
def vectorize_nodes(*nodes):
if len(nodes) == 1 and isinstance(nodes[0], collections.Iterable):
nodes = nodes[0]
ret = []
parents = {node.parent: node for node in nodes}
last_parent = None
for _ in range(len(parents)):
node = parents[last_parent]
ret.append(node)
last_parent = node.id
return ret
|
import collections
from copy import deepcopy
def recursive_dict_merge(left, right, create_copy=True):
"""
Merge ``right`` into ``left`` and return a new dictionary.
"""
if create_copy is True:
left = deepcopy(left)
for key in right:
if key in left:
if isinstance(left[key], dict) and isinstance(right[key], dict):
recursive_dict_merge(left[key], right[key], False)
else:
left[key] = right[key]
else:
left[key] = right[key]
return left
def vectorize_nodes(*nodes):
if len(nodes) == 1 and isinstance(nodes[0], collections.Iterable):
nodes = nodes[0]
ret = []
parents = {node.parent: node for node in nodes}
last_parent = None
for _ in range(len(parents)):
node = parents[last_parent]
ret.append(node)
last_parent = node.id
return ret
|
Rename 'first_run' -> 'create_copy' in recursive_dict_merge()
|
Rename 'first_run' -> 'create_copy' in recursive_dict_merge()
|
Python
|
mit
|
conceptsandtraining/libtree
|
import collections
from copy import deepcopy
- def recursive_dict_merge(left, right, first_run=True):
+ def recursive_dict_merge(left, right, create_copy=True):
"""
Merge ``right`` into ``left`` and return a new dictionary.
"""
- if first_run is True:
+ if create_copy is True:
left = deepcopy(left)
for key in right:
if key in left:
if isinstance(left[key], dict) and isinstance(right[key], dict):
recursive_dict_merge(left[key], right[key], False)
else:
left[key] = right[key]
else:
left[key] = right[key]
return left
def vectorize_nodes(*nodes):
if len(nodes) == 1 and isinstance(nodes[0], collections.Iterable):
nodes = nodes[0]
ret = []
parents = {node.parent: node for node in nodes}
last_parent = None
for _ in range(len(parents)):
node = parents[last_parent]
ret.append(node)
last_parent = node.id
return ret
|
Rename 'first_run' -> 'create_copy' in recursive_dict_merge()
|
## Code Before:
import collections
from copy import deepcopy
def recursive_dict_merge(left, right, first_run=True):
"""
Merge ``right`` into ``left`` and return a new dictionary.
"""
if first_run is True:
left = deepcopy(left)
for key in right:
if key in left:
if isinstance(left[key], dict) and isinstance(right[key], dict):
recursive_dict_merge(left[key], right[key], False)
else:
left[key] = right[key]
else:
left[key] = right[key]
return left
def vectorize_nodes(*nodes):
if len(nodes) == 1 and isinstance(nodes[0], collections.Iterable):
nodes = nodes[0]
ret = []
parents = {node.parent: node for node in nodes}
last_parent = None
for _ in range(len(parents)):
node = parents[last_parent]
ret.append(node)
last_parent = node.id
return ret
## Instruction:
Rename 'first_run' -> 'create_copy' in recursive_dict_merge()
## Code After:
import collections
from copy import deepcopy
def recursive_dict_merge(left, right, create_copy=True):
"""
Merge ``right`` into ``left`` and return a new dictionary.
"""
if create_copy is True:
left = deepcopy(left)
for key in right:
if key in left:
if isinstance(left[key], dict) and isinstance(right[key], dict):
recursive_dict_merge(left[key], right[key], False)
else:
left[key] = right[key]
else:
left[key] = right[key]
return left
def vectorize_nodes(*nodes):
if len(nodes) == 1 and isinstance(nodes[0], collections.Iterable):
nodes = nodes[0]
ret = []
parents = {node.parent: node for node in nodes}
last_parent = None
for _ in range(len(parents)):
node = parents[last_parent]
ret.append(node)
last_parent = node.id
return ret
|
import collections
from copy import deepcopy
- def recursive_dict_merge(left, right, first_run=True):
? ^^ ^ ^^^
+ def recursive_dict_merge(left, right, create_copy=True):
? ^ ^^ + ^^^^
"""
Merge ``right`` into ``left`` and return a new dictionary.
"""
- if first_run is True:
+ if create_copy is True:
left = deepcopy(left)
for key in right:
if key in left:
if isinstance(left[key], dict) and isinstance(right[key], dict):
recursive_dict_merge(left[key], right[key], False)
else:
left[key] = right[key]
else:
left[key] = right[key]
return left
def vectorize_nodes(*nodes):
if len(nodes) == 1 and isinstance(nodes[0], collections.Iterable):
nodes = nodes[0]
ret = []
parents = {node.parent: node for node in nodes}
last_parent = None
for _ in range(len(parents)):
node = parents[last_parent]
ret.append(node)
last_parent = node.id
return ret
|
fb5117e653b7a47f4af35d2c19ada9da15458ae3
|
tmpl/Platform.py
|
tmpl/Platform.py
|
class BasePlatform(object):
"""
A template for codes which are dependent on platform, whatever shell type or system type.
Redefine members to modify the function.
"""
def __init__(self, shell = False):
if shell:
if os.name == 'posix':
return self.Posix()
if os.name == 'nt':
return self.NT()
return None
else:
import platform
if platform.system()[:6] == 'Darwin':
return self.OSX()
if platform.system()[:5] == 'Linux':
return self.Linux()
if platform.system()[:7] == 'Windows':
return self.Windows()
if platform.system()[:6] == 'CYGWIN':
return self.Cygwin()
return None
return None
class Common(object):
"""
Redefine members here for those which will be inherited to all shells and systems.
"""
pass
class Shell(self.Common):
"""
Redefine members here for those which will be inherited to all shells.
"""
pass
class System(self.Common):
"""
Redefine members here for those which will be inherited to all systems.
"""
pass
class Posix(self.Shell):
pass
class NT(self.Shell):
pass
class OSX(self.System):
pass
class Linux(self.System):
pass
class Windows(self.System):
pass
class Cygwin(self.System):
pass
if __name__ == '__main__':
raise EnvironmentError ("DO NOT DIRECTLY RUN THIS TEMPLATE!")
|
class BasePlatform(object):
"""
A template for codes which are dependent on platform, whatever shell type or system type.
Redefine members to modify the function.
"""
def __init__(self, shell = False):
if shell:
if os.name == 'posix':
return self.Posix()
if os.name == 'nt':
return self.NT()
return None
else:
import platform
if platform.system()[:6] == 'Darwin':
return self.OSX()
if platform.system()[:5] == 'Linux':
return self.Linux()
if platform.system()[:7] == 'Windows':
return self.Windows()
if platform.system()[:6] == 'CYGWIN':
return self.Cygwin()
return None
return None
class Common(object):
"""
Redefine members here for those which will be inherited to all shells and systems.
"""
pass
class Shell(Common):
"""
Redefine members here for those which will be inherited to all shells.
"""
pass
class System(Common):
"""
Redefine members here for those which will be inherited to all systems.
"""
pass
class Posix(Shell):
pass
class NT(Shell):
pass
class OSX(System):
pass
class Linux(System):
pass
class Windows(System):
pass
class Cygwin(System):
pass
if __name__ == '__main__':
raise EnvironmentError ("DO NOT DIRECTLY RUN THIS TEMPLATE!")
|
Fix the compile error of define classes which derivate from other classes in class.
|
Fix the compile error of define classes which derivate from other classes in class.
|
Python
|
mit
|
nday-dev/Spider-Framework
|
class BasePlatform(object):
"""
A template for codes which are dependent on platform, whatever shell type or system type.
Redefine members to modify the function.
"""
def __init__(self, shell = False):
if shell:
if os.name == 'posix':
return self.Posix()
if os.name == 'nt':
return self.NT()
return None
else:
import platform
if platform.system()[:6] == 'Darwin':
return self.OSX()
if platform.system()[:5] == 'Linux':
return self.Linux()
if platform.system()[:7] == 'Windows':
return self.Windows()
if platform.system()[:6] == 'CYGWIN':
return self.Cygwin()
return None
return None
class Common(object):
"""
Redefine members here for those which will be inherited to all shells and systems.
"""
pass
- class Shell(self.Common):
+ class Shell(Common):
"""
Redefine members here for those which will be inherited to all shells.
"""
pass
- class System(self.Common):
+ class System(Common):
"""
Redefine members here for those which will be inherited to all systems.
"""
pass
- class Posix(self.Shell):
+ class Posix(Shell):
pass
- class NT(self.Shell):
+ class NT(Shell):
pass
- class OSX(self.System):
+ class OSX(System):
pass
- class Linux(self.System):
+ class Linux(System):
pass
- class Windows(self.System):
+ class Windows(System):
pass
- class Cygwin(self.System):
+ class Cygwin(System):
pass
if __name__ == '__main__':
raise EnvironmentError ("DO NOT DIRECTLY RUN THIS TEMPLATE!")
|
Fix the compile error of define classes which derivate from other classes in class.
|
## Code Before:
class BasePlatform(object):
"""
A template for codes which are dependent on platform, whatever shell type or system type.
Redefine members to modify the function.
"""
def __init__(self, shell = False):
if shell:
if os.name == 'posix':
return self.Posix()
if os.name == 'nt':
return self.NT()
return None
else:
import platform
if platform.system()[:6] == 'Darwin':
return self.OSX()
if platform.system()[:5] == 'Linux':
return self.Linux()
if platform.system()[:7] == 'Windows':
return self.Windows()
if platform.system()[:6] == 'CYGWIN':
return self.Cygwin()
return None
return None
class Common(object):
"""
Redefine members here for those which will be inherited to all shells and systems.
"""
pass
class Shell(self.Common):
"""
Redefine members here for those which will be inherited to all shells.
"""
pass
class System(self.Common):
"""
Redefine members here for those which will be inherited to all systems.
"""
pass
class Posix(self.Shell):
pass
class NT(self.Shell):
pass
class OSX(self.System):
pass
class Linux(self.System):
pass
class Windows(self.System):
pass
class Cygwin(self.System):
pass
if __name__ == '__main__':
raise EnvironmentError ("DO NOT DIRECTLY RUN THIS TEMPLATE!")
## Instruction:
Fix the compile error of define classes which derivate from other classes in class.
## Code After:
class BasePlatform(object):
"""
A template for codes which are dependent on platform, whatever shell type or system type.
Redefine members to modify the function.
"""
def __init__(self, shell = False):
if shell:
if os.name == 'posix':
return self.Posix()
if os.name == 'nt':
return self.NT()
return None
else:
import platform
if platform.system()[:6] == 'Darwin':
return self.OSX()
if platform.system()[:5] == 'Linux':
return self.Linux()
if platform.system()[:7] == 'Windows':
return self.Windows()
if platform.system()[:6] == 'CYGWIN':
return self.Cygwin()
return None
return None
class Common(object):
"""
Redefine members here for those which will be inherited to all shells and systems.
"""
pass
class Shell(Common):
"""
Redefine members here for those which will be inherited to all shells.
"""
pass
class System(Common):
"""
Redefine members here for those which will be inherited to all systems.
"""
pass
class Posix(Shell):
pass
class NT(Shell):
pass
class OSX(System):
pass
class Linux(System):
pass
class Windows(System):
pass
class Cygwin(System):
pass
if __name__ == '__main__':
raise EnvironmentError ("DO NOT DIRECTLY RUN THIS TEMPLATE!")
|
class BasePlatform(object):
"""
A template for codes which are dependent on platform, whatever shell type or system type.
Redefine members to modify the function.
"""
def __init__(self, shell = False):
if shell:
if os.name == 'posix':
return self.Posix()
if os.name == 'nt':
return self.NT()
return None
else:
import platform
if platform.system()[:6] == 'Darwin':
return self.OSX()
if platform.system()[:5] == 'Linux':
return self.Linux()
if platform.system()[:7] == 'Windows':
return self.Windows()
if platform.system()[:6] == 'CYGWIN':
return self.Cygwin()
return None
return None
class Common(object):
"""
Redefine members here for those which will be inherited to all shells and systems.
"""
pass
- class Shell(self.Common):
? -----
+ class Shell(Common):
"""
Redefine members here for those which will be inherited to all shells.
"""
pass
- class System(self.Common):
? -----
+ class System(Common):
"""
Redefine members here for those which will be inherited to all systems.
"""
pass
- class Posix(self.Shell):
? -----
+ class Posix(Shell):
pass
- class NT(self.Shell):
? -----
+ class NT(Shell):
pass
- class OSX(self.System):
? -----
+ class OSX(System):
pass
- class Linux(self.System):
? -----
+ class Linux(System):
pass
- class Windows(self.System):
? -----
+ class Windows(System):
pass
- class Cygwin(self.System):
? -----
+ class Cygwin(System):
pass
if __name__ == '__main__':
raise EnvironmentError ("DO NOT DIRECTLY RUN THIS TEMPLATE!")
|
f2afbc2d7b47e6e28f6924b9761390c34b04ea49
|
trunk/editor/test_opensave.py
|
trunk/editor/test_opensave.py
|
import unittest
from xml.etree import ElementTree
from openfilerooms import openFileRooms
from savefilerooms import saveFileRooms
class Test(unittest.TestCase):
def test1(self):
source = "world1.rooms"
dest = 'a.rooms'
openFileRooms(source)
saveFileRooms(dest)
xml_file_world = ElementTree.fromstring(open(source, 'rb').read())
xml_file_a = ElementTree.fromstring(open(dest, 'rb').read())
diff = []
for line in xml_file_world.getiterator():
difference = self.findDiff(line, xml_file_a)
if difference:
diff.append(difference)
self.assertEqual(diff, [], diff)
def findDiff(self, line, xml_file_a):
find = False
for line_a in xml_file_a.getiterator(line.tag):
if line.tag == line_a.tag:
if line.attrib == line_a.attrib:
find = True
break
if not find:
return line, line_a
return None
if __name__ == "__main__":
unittest.main()
|
import os
import unittest
from xml.etree import ElementTree
from openfilerooms import openFileRooms
from savefilerooms import saveFileRooms
class Test(unittest.TestCase):
test_output = "a.rooms"
def test1(self):
fpath = os.path.abspath(__file__)
path, _ = os.path.split(fpath)
source = os.path.join(path, "..", "examples", "example1", "world.rooms")
source = os.path.normpath(source)
dest = self.test_output
openFileRooms(source)
saveFileRooms(dest)
xml_file_world = ElementTree.fromstring(open(source, 'rb').read())
xml_file_a = ElementTree.fromstring(open(dest, 'rb').read())
diff = []
for line in xml_file_world.getiterator():
difference = self.findDiff(line, xml_file_a)
if difference:
diff.append(difference)
self.assertEqual(diff, [], diff)
def findDiff(self, line, xml_file_a):
find = False
for line_a in xml_file_a.getiterator(line.tag):
if line.tag == line_a.tag:
if line.attrib == line_a.attrib:
find = True
break
if not find:
return line, line_a
return None
def tearDown(self):
# Cleanup the temporary file used for test purposes
os.unlink(self.test_output)
if __name__ == "__main__":
unittest.main()
|
Use one of the stock examples for the open/save test
|
Use one of the stock examples for the open/save test
|
Python
|
mit
|
develersrl/rooms,develersrl/rooms,develersrl/rooms,develersrl/rooms,develersrl/rooms,develersrl/rooms,develersrl/rooms
|
+ import os
import unittest
from xml.etree import ElementTree
from openfilerooms import openFileRooms
from savefilerooms import saveFileRooms
class Test(unittest.TestCase):
+ test_output = "a.rooms"
def test1(self):
- source = "world1.rooms"
- dest = 'a.rooms'
+ fpath = os.path.abspath(__file__)
+ path, _ = os.path.split(fpath)
+ source = os.path.join(path, "..", "examples", "example1", "world.rooms")
+ source = os.path.normpath(source)
+ dest = self.test_output
openFileRooms(source)
saveFileRooms(dest)
xml_file_world = ElementTree.fromstring(open(source, 'rb').read())
xml_file_a = ElementTree.fromstring(open(dest, 'rb').read())
diff = []
for line in xml_file_world.getiterator():
difference = self.findDiff(line, xml_file_a)
if difference:
diff.append(difference)
self.assertEqual(diff, [], diff)
def findDiff(self, line, xml_file_a):
find = False
for line_a in xml_file_a.getiterator(line.tag):
if line.tag == line_a.tag:
if line.attrib == line_a.attrib:
find = True
break
if not find:
return line, line_a
return None
+ def tearDown(self):
+ # Cleanup the temporary file used for test purposes
+ os.unlink(self.test_output)
+
if __name__ == "__main__":
unittest.main()
|
Use one of the stock examples for the open/save test
|
## Code Before:
import unittest
from xml.etree import ElementTree
from openfilerooms import openFileRooms
from savefilerooms import saveFileRooms
class Test(unittest.TestCase):
def test1(self):
source = "world1.rooms"
dest = 'a.rooms'
openFileRooms(source)
saveFileRooms(dest)
xml_file_world = ElementTree.fromstring(open(source, 'rb').read())
xml_file_a = ElementTree.fromstring(open(dest, 'rb').read())
diff = []
for line in xml_file_world.getiterator():
difference = self.findDiff(line, xml_file_a)
if difference:
diff.append(difference)
self.assertEqual(diff, [], diff)
def findDiff(self, line, xml_file_a):
find = False
for line_a in xml_file_a.getiterator(line.tag):
if line.tag == line_a.tag:
if line.attrib == line_a.attrib:
find = True
break
if not find:
return line, line_a
return None
if __name__ == "__main__":
unittest.main()
## Instruction:
Use one of the stock examples for the open/save test
## Code After:
import os
import unittest
from xml.etree import ElementTree
from openfilerooms import openFileRooms
from savefilerooms import saveFileRooms
class Test(unittest.TestCase):
test_output = "a.rooms"
def test1(self):
fpath = os.path.abspath(__file__)
path, _ = os.path.split(fpath)
source = os.path.join(path, "..", "examples", "example1", "world.rooms")
source = os.path.normpath(source)
dest = self.test_output
openFileRooms(source)
saveFileRooms(dest)
xml_file_world = ElementTree.fromstring(open(source, 'rb').read())
xml_file_a = ElementTree.fromstring(open(dest, 'rb').read())
diff = []
for line in xml_file_world.getiterator():
difference = self.findDiff(line, xml_file_a)
if difference:
diff.append(difference)
self.assertEqual(diff, [], diff)
def findDiff(self, line, xml_file_a):
find = False
for line_a in xml_file_a.getiterator(line.tag):
if line.tag == line_a.tag:
if line.attrib == line_a.attrib:
find = True
break
if not find:
return line, line_a
return None
def tearDown(self):
# Cleanup the temporary file used for test purposes
os.unlink(self.test_output)
if __name__ == "__main__":
unittest.main()
|
+ import os
import unittest
from xml.etree import ElementTree
from openfilerooms import openFileRooms
from savefilerooms import saveFileRooms
class Test(unittest.TestCase):
+ test_output = "a.rooms"
def test1(self):
- source = "world1.rooms"
- dest = 'a.rooms'
+ fpath = os.path.abspath(__file__)
+ path, _ = os.path.split(fpath)
+ source = os.path.join(path, "..", "examples", "example1", "world.rooms")
+ source = os.path.normpath(source)
+ dest = self.test_output
openFileRooms(source)
saveFileRooms(dest)
xml_file_world = ElementTree.fromstring(open(source, 'rb').read())
xml_file_a = ElementTree.fromstring(open(dest, 'rb').read())
diff = []
for line in xml_file_world.getiterator():
difference = self.findDiff(line, xml_file_a)
if difference:
diff.append(difference)
self.assertEqual(diff, [], diff)
def findDiff(self, line, xml_file_a):
find = False
for line_a in xml_file_a.getiterator(line.tag):
if line.tag == line_a.tag:
if line.attrib == line_a.attrib:
find = True
break
if not find:
return line, line_a
return None
+ def tearDown(self):
+ # Cleanup the temporary file used for test purposes
+ os.unlink(self.test_output)
+
if __name__ == "__main__":
unittest.main()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.