commit
stringlengths
40
40
old_file
stringlengths
4
118
new_file
stringlengths
4
118
old_contents
stringlengths
10
2.94k
new_contents
stringlengths
21
3.18k
subject
stringlengths
16
444
message
stringlengths
17
2.63k
lang
stringclasses
1 value
license
stringclasses
13 values
repos
stringlengths
5
43k
ndiff
stringlengths
52
3.32k
instruction
stringlengths
16
444
content
stringlengths
133
4.32k
fuzzy_diff
stringlengths
16
3.18k
b2f1f97000c8d3479e1df6778f0cc85ec0680571
garden-watering01/mybuddy.py
garden-watering01/mybuddy.py
import machine def setntptime(maxretries=10): # ntptime is a helper module which gets packaged into the firmware # Check https://raw.githubusercontent.com/micropython/micropython/master/esp8266/scripts/ntptime.py import ntptime for i in range (maxretries): try: ntptime.settime() break except: if i+1 == maxretries: raise def deepsleep(sleeptime=15*60*1000): # configure RTC.ALARM0 to be able to wake the device rtc = machine.RTC() rtc.irq(trigger=rtc.ALARM0, wake=machine.DEEPSLEEP) # set RTC.ALARM0 to fire after some time. Time is given in milliseconds here rtc.alarm(rtc.ALARM0, sleeptime) #Make sure you have GPIO16 connected RST to wake from deepSleep. # put the device to sleep print ("Going into Sleep now") machine.deepsleep()
import machine def have_internet(): import urequests try: resp = urequests.request("HEAD", "http://jsonip.com/") return True except: return False def setntptime(maxretries=10): # ntptime is a helper module which gets packaged into the firmware # Check https://raw.githubusercontent.com/micropython/micropython/master/esp8266/scripts/ntptime.py import ntptime for i in range (maxretries): try: ntptime.settime() break except: if i+1 == maxretries: raise def deepsleep(sleeptime=15*60*1000): # configure RTC.ALARM0 to be able to wake the device rtc = machine.RTC() rtc.irq(trigger=rtc.ALARM0, wake=machine.DEEPSLEEP) # set RTC.ALARM0 to fire after some time. Time is given in milliseconds here rtc.alarm(rtc.ALARM0, sleeptime) #Make sure you have GPIO16 connected RST to wake from deepSleep. # put the device to sleep print ("Going into Sleep now") machine.deepsleep()
Add a function to check status of internet connectivity
Add a function to check status of internet connectivity
Python
mit
fuzzyhandle/esp8266hangout,fuzzyhandle/esp8266hangout,fuzzyhandle/esp8266hangout
import machine + + def have_internet(): + import urequests + try: + resp = urequests.request("HEAD", "http://jsonip.com/") + return True + except: + return False def setntptime(maxretries=10): # ntptime is a helper module which gets packaged into the firmware # Check https://raw.githubusercontent.com/micropython/micropython/master/esp8266/scripts/ntptime.py import ntptime for i in range (maxretries): try: ntptime.settime() break except: if i+1 == maxretries: raise def deepsleep(sleeptime=15*60*1000): # configure RTC.ALARM0 to be able to wake the device rtc = machine.RTC() rtc.irq(trigger=rtc.ALARM0, wake=machine.DEEPSLEEP) # set RTC.ALARM0 to fire after some time. Time is given in milliseconds here rtc.alarm(rtc.ALARM0, sleeptime) #Make sure you have GPIO16 connected RST to wake from deepSleep. # put the device to sleep print ("Going into Sleep now") machine.deepsleep()
Add a function to check status of internet connectivity
## Code Before: import machine def setntptime(maxretries=10): # ntptime is a helper module which gets packaged into the firmware # Check https://raw.githubusercontent.com/micropython/micropython/master/esp8266/scripts/ntptime.py import ntptime for i in range (maxretries): try: ntptime.settime() break except: if i+1 == maxretries: raise def deepsleep(sleeptime=15*60*1000): # configure RTC.ALARM0 to be able to wake the device rtc = machine.RTC() rtc.irq(trigger=rtc.ALARM0, wake=machine.DEEPSLEEP) # set RTC.ALARM0 to fire after some time. Time is given in milliseconds here rtc.alarm(rtc.ALARM0, sleeptime) #Make sure you have GPIO16 connected RST to wake from deepSleep. # put the device to sleep print ("Going into Sleep now") machine.deepsleep() ## Instruction: Add a function to check status of internet connectivity ## Code After: import machine def have_internet(): import urequests try: resp = urequests.request("HEAD", "http://jsonip.com/") return True except: return False def setntptime(maxretries=10): # ntptime is a helper module which gets packaged into the firmware # Check https://raw.githubusercontent.com/micropython/micropython/master/esp8266/scripts/ntptime.py import ntptime for i in range (maxretries): try: ntptime.settime() break except: if i+1 == maxretries: raise def deepsleep(sleeptime=15*60*1000): # configure RTC.ALARM0 to be able to wake the device rtc = machine.RTC() rtc.irq(trigger=rtc.ALARM0, wake=machine.DEEPSLEEP) # set RTC.ALARM0 to fire after some time. Time is given in milliseconds here rtc.alarm(rtc.ALARM0, sleeptime) #Make sure you have GPIO16 connected RST to wake from deepSleep. # put the device to sleep print ("Going into Sleep now") machine.deepsleep()
... import machine def have_internet(): import urequests try: resp = urequests.request("HEAD", "http://jsonip.com/") return True except: return False ...
7e2b60a7f7b32c235f931f9e7263ccefc84c79e2
gittip/orm/__init__.py
gittip/orm/__init__.py
from __future__ import unicode_literals import os import pdb from sqlalchemy import create_engine, MetaData from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import sessionmaker, scoped_session class Model(object): def __repr__(self): cols = self.__mapper__.c.keys() class_name = self.__class__.__name__ items = ', '.join(['%s=%s' % (col, repr(getattr(self, col))) for col in cols]) return '%s(%s)' % (class_name, items) def attrs_dict(self): keys = self.__mapper__.c.keys() attrs = {} for key in keys: attrs[key] = getattr(self, key) return attrs class SQLAlchemy(object): def __init__(self): self.session = self.create_session() self.Model = self.make_declarative_base() @property def engine(self): dburl = os.environ['DATABASE_URL'] return create_engine(dburl) def create_session(self): session = scoped_session(sessionmaker()) session.configure(bind=self.engine) return session def make_declarative_base(self): base = declarative_base(cls=Model) base.query = self.session.query_property() return base db = SQLAlchemy() all = [db] def rollback(*_): db.session.rollback()
from __future__ import unicode_literals import os import pdb from sqlalchemy import create_engine, MetaData from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import sessionmaker, scoped_session class Model(object): def __repr__(self): cols = self.__mapper__.c.keys() class_name = self.__class__.__name__ items = ', '.join(['%s=%s' % (col, repr(getattr(self, col))) for col in cols]) return '%s(%s)' % (class_name, items) def attrs_dict(self): keys = self.__mapper__.c.keys() attrs = {} for key in keys: attrs[key] = getattr(self, key) return attrs class SQLAlchemy(object): def __init__(self): self.session = self.create_session() self.Model = self.make_declarative_base() @property def engine(self): dburl = os.environ['DATABASE_URL'] return create_engine(dburl) def create_session(self): session = scoped_session(sessionmaker()) session.configure(bind=self.engine) return session def make_declarative_base(self): base = declarative_base(cls=Model) base.query = self.session.query_property() return base def drop_all(self): self.Model.metadata.drop_all(bind=self.engine) def create_all(self): self.Model.metadata.create_all(bind=self.engine) db = SQLAlchemy() all = [db] def rollback(*_): db.session.rollback()
Add convenience methods for creating/deleting all tables, for bootstrapping/testing use
Add convenience methods for creating/deleting all tables, for bootstrapping/testing use Signed-off-by: Joonas Bergius <[email protected]>
Python
mit
studio666/gratipay.com,gratipay/gratipay.com,eXcomm/gratipay.com,MikeFair/www.gittip.com,MikeFair/www.gittip.com,studio666/gratipay.com,mccolgst/www.gittip.com,bountysource/www.gittip.com,eXcomm/gratipay.com,gratipay/gratipay.com,mccolgst/www.gittip.com,gratipay/gratipay.com,bountysource/www.gittip.com,MikeFair/www.gittip.com,mccolgst/www.gittip.com,eXcomm/gratipay.com,gratipay/gratipay.com,studio666/gratipay.com,mccolgst/www.gittip.com,bountysource/www.gittip.com,eXcomm/gratipay.com,studio666/gratipay.com,bountysource/www.gittip.com
from __future__ import unicode_literals import os import pdb from sqlalchemy import create_engine, MetaData from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import sessionmaker, scoped_session class Model(object): def __repr__(self): cols = self.__mapper__.c.keys() class_name = self.__class__.__name__ items = ', '.join(['%s=%s' % (col, repr(getattr(self, col))) for col in cols]) return '%s(%s)' % (class_name, items) def attrs_dict(self): keys = self.__mapper__.c.keys() attrs = {} for key in keys: attrs[key] = getattr(self, key) return attrs class SQLAlchemy(object): def __init__(self): self.session = self.create_session() self.Model = self.make_declarative_base() @property def engine(self): dburl = os.environ['DATABASE_URL'] return create_engine(dburl) def create_session(self): session = scoped_session(sessionmaker()) session.configure(bind=self.engine) return session def make_declarative_base(self): base = declarative_base(cls=Model) base.query = self.session.query_property() return base + def drop_all(self): + self.Model.metadata.drop_all(bind=self.engine) + + def create_all(self): + self.Model.metadata.create_all(bind=self.engine) + db = SQLAlchemy() all = [db] def rollback(*_): db.session.rollback()
Add convenience methods for creating/deleting all tables, for bootstrapping/testing use
## Code Before: from __future__ import unicode_literals import os import pdb from sqlalchemy import create_engine, MetaData from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import sessionmaker, scoped_session class Model(object): def __repr__(self): cols = self.__mapper__.c.keys() class_name = self.__class__.__name__ items = ', '.join(['%s=%s' % (col, repr(getattr(self, col))) for col in cols]) return '%s(%s)' % (class_name, items) def attrs_dict(self): keys = self.__mapper__.c.keys() attrs = {} for key in keys: attrs[key] = getattr(self, key) return attrs class SQLAlchemy(object): def __init__(self): self.session = self.create_session() self.Model = self.make_declarative_base() @property def engine(self): dburl = os.environ['DATABASE_URL'] return create_engine(dburl) def create_session(self): session = scoped_session(sessionmaker()) session.configure(bind=self.engine) return session def make_declarative_base(self): base = declarative_base(cls=Model) base.query = self.session.query_property() return base db = SQLAlchemy() all = [db] def rollback(*_): db.session.rollback() ## Instruction: Add convenience methods for creating/deleting all tables, for bootstrapping/testing use ## Code After: from __future__ import unicode_literals import os import pdb from sqlalchemy import create_engine, MetaData from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import sessionmaker, scoped_session class Model(object): def __repr__(self): cols = self.__mapper__.c.keys() class_name = self.__class__.__name__ items = ', '.join(['%s=%s' % (col, repr(getattr(self, col))) for col in cols]) return '%s(%s)' % (class_name, items) def attrs_dict(self): keys = self.__mapper__.c.keys() attrs = {} for key in keys: attrs[key] = getattr(self, key) return attrs class SQLAlchemy(object): def __init__(self): self.session = self.create_session() self.Model = self.make_declarative_base() @property def engine(self): dburl = os.environ['DATABASE_URL'] return create_engine(dburl) def create_session(self): session = scoped_session(sessionmaker()) session.configure(bind=self.engine) return session def make_declarative_base(self): base = declarative_base(cls=Model) base.query = self.session.query_property() return base def drop_all(self): self.Model.metadata.drop_all(bind=self.engine) def create_all(self): self.Model.metadata.create_all(bind=self.engine) db = SQLAlchemy() all = [db] def rollback(*_): db.session.rollback()
// ... existing code ... def drop_all(self): self.Model.metadata.drop_all(bind=self.engine) def create_all(self): self.Model.metadata.create_all(bind=self.engine) db = SQLAlchemy() // ... rest of the code ...
ae70502f910c85f6a4528b487eea3b535cec6c39
frappe/desk/doctype/tag/test_tag.py
frappe/desk/doctype/tag/test_tag.py
import unittest class TestTag(unittest.TestCase): pass
import unittest import frappe from frappe.desk.reportview import get_stats from frappe.desk.doctype.tag.tag import add_tag class TestTag(unittest.TestCase): def setUp(self) -> None: frappe.db.sql("DELETE from `tabTag`") frappe.db.sql("UPDATE `tabDocType` set _user_tags=''") def test_tag_count_query(self): self.assertDictEqual(get_stats('["_user_tags"]', 'DocType'), {'_user_tags': [['No Tags', frappe.db.count('DocType')]]}) add_tag('Standard', 'DocType', 'User') add_tag('Standard', 'DocType', 'ToDo') # count with no filter self.assertDictEqual(get_stats('["_user_tags"]', 'DocType'), {'_user_tags': [['Standard', 2], ['No Tags', frappe.db.count('DocType') - 2]]}) # count with child table field filter self.assertDictEqual(get_stats('["_user_tags"]', 'DocType', filters='[["DocField", "fieldname", "like", "%last_name%"], ["DocType", "name", "like", "%use%"]]'), {'_user_tags': [['Standard', 1], ['No Tags', 0]]})
Add test case to validate tag count query
test: Add test case to validate tag count query
Python
mit
mhbu50/frappe,almeidapaulopt/frappe,yashodhank/frappe,almeidapaulopt/frappe,mhbu50/frappe,almeidapaulopt/frappe,yashodhank/frappe,StrellaGroup/frappe,frappe/frappe,frappe/frappe,StrellaGroup/frappe,yashodhank/frappe,frappe/frappe,almeidapaulopt/frappe,yashodhank/frappe,StrellaGroup/frappe,mhbu50/frappe,mhbu50/frappe
import unittest + import frappe + + from frappe.desk.reportview import get_stats + from frappe.desk.doctype.tag.tag import add_tag class TestTag(unittest.TestCase): - pass + def setUp(self) -> None: + frappe.db.sql("DELETE from `tabTag`") + frappe.db.sql("UPDATE `tabDocType` set _user_tags=''") + def test_tag_count_query(self): + self.assertDictEqual(get_stats('["_user_tags"]', 'DocType'), + {'_user_tags': [['No Tags', frappe.db.count('DocType')]]}) + add_tag('Standard', 'DocType', 'User') + add_tag('Standard', 'DocType', 'ToDo') + + # count with no filter + self.assertDictEqual(get_stats('["_user_tags"]', 'DocType'), + {'_user_tags': [['Standard', 2], ['No Tags', frappe.db.count('DocType') - 2]]}) + + # count with child table field filter + self.assertDictEqual(get_stats('["_user_tags"]', + 'DocType', + filters='[["DocField", "fieldname", "like", "%last_name%"], ["DocType", "name", "like", "%use%"]]'), + {'_user_tags': [['Standard', 1], ['No Tags', 0]]})
Add test case to validate tag count query
## Code Before: import unittest class TestTag(unittest.TestCase): pass ## Instruction: Add test case to validate tag count query ## Code After: import unittest import frappe from frappe.desk.reportview import get_stats from frappe.desk.doctype.tag.tag import add_tag class TestTag(unittest.TestCase): def setUp(self) -> None: frappe.db.sql("DELETE from `tabTag`") frappe.db.sql("UPDATE `tabDocType` set _user_tags=''") def test_tag_count_query(self): self.assertDictEqual(get_stats('["_user_tags"]', 'DocType'), {'_user_tags': [['No Tags', frappe.db.count('DocType')]]}) add_tag('Standard', 'DocType', 'User') add_tag('Standard', 'DocType', 'ToDo') # count with no filter self.assertDictEqual(get_stats('["_user_tags"]', 'DocType'), {'_user_tags': [['Standard', 2], ['No Tags', frappe.db.count('DocType') - 2]]}) # count with child table field filter self.assertDictEqual(get_stats('["_user_tags"]', 'DocType', filters='[["DocField", "fieldname", "like", "%last_name%"], ["DocType", "name", "like", "%use%"]]'), {'_user_tags': [['Standard', 1], ['No Tags', 0]]})
# ... existing code ... import unittest import frappe from frappe.desk.reportview import get_stats from frappe.desk.doctype.tag.tag import add_tag # ... modified code ... class TestTag(unittest.TestCase): def setUp(self) -> None: frappe.db.sql("DELETE from `tabTag`") frappe.db.sql("UPDATE `tabDocType` set _user_tags=''") def test_tag_count_query(self): self.assertDictEqual(get_stats('["_user_tags"]', 'DocType'), {'_user_tags': [['No Tags', frappe.db.count('DocType')]]}) add_tag('Standard', 'DocType', 'User') add_tag('Standard', 'DocType', 'ToDo') # count with no filter self.assertDictEqual(get_stats('["_user_tags"]', 'DocType'), {'_user_tags': [['Standard', 2], ['No Tags', frappe.db.count('DocType') - 2]]}) # count with child table field filter self.assertDictEqual(get_stats('["_user_tags"]', 'DocType', filters='[["DocField", "fieldname", "like", "%last_name%"], ["DocType", "name", "like", "%use%"]]'), {'_user_tags': [['Standard', 1], ['No Tags', 0]]}) # ... rest of the code ...
8cbac87d73f361bd6d623cbe58d188dd9cc518ce
ext_pylib/input/__init__.py
ext_pylib/input/__init__.py
from __future__ import absolute_import # Use Python 3 input if possible try: INPUT = input except NameError: INPUT = raw_input # pylint: disable=wrong-import-position # this import MUST be after INPUT is defined from .prompts import prompt, prompt_str, warn_prompt
from __future__ import absolute_import # Use Python 2 input unless raw_input doesn't exist try: INPUT = raw_input except NameError: INPUT = input # pylint: disable=wrong-import-position # this import MUST be after INPUT is defined from .prompts import prompt, prompt_str, warn_prompt
Use raw_input [py2] first, then resort to input [py3].
BUGFIX: Use raw_input [py2] first, then resort to input [py3].
Python
mit
hbradleyiii/ext_pylib
from __future__ import absolute_import - # Use Python 3 input if possible + # Use Python 2 input unless raw_input doesn't exist try: + INPUT = raw_input + except NameError: INPUT = input - except NameError: - INPUT = raw_input # pylint: disable=wrong-import-position # this import MUST be after INPUT is defined from .prompts import prompt, prompt_str, warn_prompt
Use raw_input [py2] first, then resort to input [py3].
## Code Before: from __future__ import absolute_import # Use Python 3 input if possible try: INPUT = input except NameError: INPUT = raw_input # pylint: disable=wrong-import-position # this import MUST be after INPUT is defined from .prompts import prompt, prompt_str, warn_prompt ## Instruction: Use raw_input [py2] first, then resort to input [py3]. ## Code After: from __future__ import absolute_import # Use Python 2 input unless raw_input doesn't exist try: INPUT = raw_input except NameError: INPUT = input # pylint: disable=wrong-import-position # this import MUST be after INPUT is defined from .prompts import prompt, prompt_str, warn_prompt
// ... existing code ... # Use Python 2 input unless raw_input doesn't exist try: INPUT = raw_input except NameError: INPUT = input // ... rest of the code ...
b5fc673d44624dfddfbdd98c9806b7e7e2f67331
simplekv/memory/memcachestore.py
simplekv/memory/memcachestore.py
try: from cStringIO import StringIO except ImportError: from StringIO import StringIO from .. import KeyValueStore class MemcacheStore(KeyValueStore): def __contains__(self, key): try: return key in self.mc except TypeError: raise IOError('memcache implementation does not support '\ '__contains__') def __init__(self, mc): self.mc = mc def _delete(self, key): self.mc.delete(key) def _get(self, key): rv = self.mc.get(key) if None == rv: raise KeyError(key) return rv def _get_file(self, key, file): file.write(self._get(key)) def _open(self, key): return StringIO(self._get(key)) def _put(self, key, data): self.mc.set(key, data) return key def _put_file(self, key, file): self.mc.set(key, file.read()) return key def keys(self): raise IOError('Memcache does not support listing keys.') def iter_keys(self): raise IOError('Memcache does not support key iteration.')
try: from cStringIO import StringIO except ImportError: from StringIO import StringIO from .. import KeyValueStore class MemcacheStore(KeyValueStore): def __contains__(self, key): try: return key in self.mc except TypeError: raise IOError('memcache implementation does not support '\ '__contains__') def __init__(self, mc): self.mc = mc def _delete(self, key): if not self.mc.delete(key): raise IOError('Error deleting key') def _get(self, key): rv = self.mc.get(key) if None == rv: raise KeyError(key) return rv def _get_file(self, key, file): file.write(self._get(key)) def _open(self, key): return StringIO(self._get(key)) def _put(self, key, data): if not self.mc.set(key, data): if len(data) >= 1024 * 1023: raise IOError('Failed to store data, probably too large. '\ 'memcached limit is 1M') raise IOError('Failed to store data') return key def _put_file(self, key, file): return self._put(key, file.read()) def keys(self): raise IOError('Memcache does not support listing keys.') def iter_keys(self): raise IOError('Memcache does not support key iteration.')
Check if putting/getting was actually successful.
Check if putting/getting was actually successful.
Python
mit
fmarczin/simplekv,fmarczin/simplekv,karteek/simplekv,mbr/simplekv,karteek/simplekv,mbr/simplekv
try: from cStringIO import StringIO except ImportError: from StringIO import StringIO from .. import KeyValueStore class MemcacheStore(KeyValueStore): def __contains__(self, key): try: return key in self.mc except TypeError: raise IOError('memcache implementation does not support '\ '__contains__') def __init__(self, mc): self.mc = mc def _delete(self, key): - self.mc.delete(key) + if not self.mc.delete(key): + raise IOError('Error deleting key') def _get(self, key): rv = self.mc.get(key) if None == rv: raise KeyError(key) return rv def _get_file(self, key, file): file.write(self._get(key)) def _open(self, key): return StringIO(self._get(key)) def _put(self, key, data): - self.mc.set(key, data) + if not self.mc.set(key, data): + if len(data) >= 1024 * 1023: + raise IOError('Failed to store data, probably too large. '\ + 'memcached limit is 1M') + raise IOError('Failed to store data') return key def _put_file(self, key, file): - self.mc.set(key, file.read()) + return self._put(key, file.read()) - return key def keys(self): raise IOError('Memcache does not support listing keys.') def iter_keys(self): raise IOError('Memcache does not support key iteration.')
Check if putting/getting was actually successful.
## Code Before: try: from cStringIO import StringIO except ImportError: from StringIO import StringIO from .. import KeyValueStore class MemcacheStore(KeyValueStore): def __contains__(self, key): try: return key in self.mc except TypeError: raise IOError('memcache implementation does not support '\ '__contains__') def __init__(self, mc): self.mc = mc def _delete(self, key): self.mc.delete(key) def _get(self, key): rv = self.mc.get(key) if None == rv: raise KeyError(key) return rv def _get_file(self, key, file): file.write(self._get(key)) def _open(self, key): return StringIO(self._get(key)) def _put(self, key, data): self.mc.set(key, data) return key def _put_file(self, key, file): self.mc.set(key, file.read()) return key def keys(self): raise IOError('Memcache does not support listing keys.') def iter_keys(self): raise IOError('Memcache does not support key iteration.') ## Instruction: Check if putting/getting was actually successful. ## Code After: try: from cStringIO import StringIO except ImportError: from StringIO import StringIO from .. import KeyValueStore class MemcacheStore(KeyValueStore): def __contains__(self, key): try: return key in self.mc except TypeError: raise IOError('memcache implementation does not support '\ '__contains__') def __init__(self, mc): self.mc = mc def _delete(self, key): if not self.mc.delete(key): raise IOError('Error deleting key') def _get(self, key): rv = self.mc.get(key) if None == rv: raise KeyError(key) return rv def _get_file(self, key, file): file.write(self._get(key)) def _open(self, key): return StringIO(self._get(key)) def _put(self, key, data): if not self.mc.set(key, data): if len(data) >= 1024 * 1023: raise IOError('Failed to store data, probably too large. '\ 'memcached limit is 1M') raise IOError('Failed to store data') return key def _put_file(self, key, file): return self._put(key, file.read()) def keys(self): raise IOError('Memcache does not support listing keys.') def iter_keys(self): raise IOError('Memcache does not support key iteration.')
... def _delete(self, key): if not self.mc.delete(key): raise IOError('Error deleting key') ... def _put(self, key, data): if not self.mc.set(key, data): if len(data) >= 1024 * 1023: raise IOError('Failed to store data, probably too large. '\ 'memcached limit is 1M') raise IOError('Failed to store data') return key ... def _put_file(self, key, file): return self._put(key, file.read()) ...
022f2cc6d067769a6c8e56601c0238aac69ec9ab
jfr_playoff/settings.py
jfr_playoff/settings.py
import glob, json, os, readline, sys def complete_filename(text, state): return (glob.glob(text+'*')+[None])[state] class PlayoffSettings: def __init__(self): self.interactive = False self.settings_file = None if len(sys.argv) > 1: self.settings_file = sys.argv[1] else: self.interactive = True def load(self): if self.settings_file is None: readline.set_completer_delims(' \t\n;') readline.parse_and_bind("tab: complete") readline.set_completer(complete_filename) self.settings_file = raw_input('JSON settings file: ') self.settings = json.load(open(self.settings_file)) def has_section(self, key): self.load() return key in self.settings def get(self, *keys): self.load() section = self.settings for key in keys: section = section[key] return section
import glob, json, os, readline, sys def complete_filename(text, state): return (glob.glob(text+'*')+[None])[state] class PlayoffSettings: def __init__(self): self.settings = None self.interactive = False self.settings_file = None if len(sys.argv) > 1: self.settings_file = sys.argv[1] else: self.interactive = True def load(self): if self.settings_file is None: readline.set_completer_delims(' \t\n;') readline.parse_and_bind("tab: complete") readline.set_completer(complete_filename) self.settings_file = raw_input('JSON settings file: ') if self.settings is None: self.settings = json.load(open(self.settings_file)) def has_section(self, key): self.load() return key in self.settings def get(self, *keys): self.load() section = self.settings for key in keys: section = section[key] return section
Load config file only once
Load config file only once
Python
bsd-2-clause
emkael/jfrteamy-playoff,emkael/jfrteamy-playoff
import glob, json, os, readline, sys def complete_filename(text, state): return (glob.glob(text+'*')+[None])[state] class PlayoffSettings: def __init__(self): + self.settings = None self.interactive = False self.settings_file = None if len(sys.argv) > 1: self.settings_file = sys.argv[1] else: self.interactive = True def load(self): if self.settings_file is None: readline.set_completer_delims(' \t\n;') readline.parse_and_bind("tab: complete") readline.set_completer(complete_filename) self.settings_file = raw_input('JSON settings file: ') + if self.settings is None: - self.settings = json.load(open(self.settings_file)) + self.settings = json.load(open(self.settings_file)) def has_section(self, key): self.load() return key in self.settings def get(self, *keys): self.load() section = self.settings for key in keys: section = section[key] return section
Load config file only once
## Code Before: import glob, json, os, readline, sys def complete_filename(text, state): return (glob.glob(text+'*')+[None])[state] class PlayoffSettings: def __init__(self): self.interactive = False self.settings_file = None if len(sys.argv) > 1: self.settings_file = sys.argv[1] else: self.interactive = True def load(self): if self.settings_file is None: readline.set_completer_delims(' \t\n;') readline.parse_and_bind("tab: complete") readline.set_completer(complete_filename) self.settings_file = raw_input('JSON settings file: ') self.settings = json.load(open(self.settings_file)) def has_section(self, key): self.load() return key in self.settings def get(self, *keys): self.load() section = self.settings for key in keys: section = section[key] return section ## Instruction: Load config file only once ## Code After: import glob, json, os, readline, sys def complete_filename(text, state): return (glob.glob(text+'*')+[None])[state] class PlayoffSettings: def __init__(self): self.settings = None self.interactive = False self.settings_file = None if len(sys.argv) > 1: self.settings_file = sys.argv[1] else: self.interactive = True def load(self): if self.settings_file is None: readline.set_completer_delims(' \t\n;') readline.parse_and_bind("tab: complete") readline.set_completer(complete_filename) self.settings_file = raw_input('JSON settings file: ') if self.settings is None: self.settings = json.load(open(self.settings_file)) def has_section(self, key): self.load() return key in self.settings def get(self, *keys): self.load() section = self.settings for key in keys: section = section[key] return section
... def __init__(self): self.settings = None self.interactive = False ... if self.settings is None: self.settings = json.load(open(self.settings_file)) ...
aeac44b782397e78925fa74d2e87aa73c88b8162
core/polyaxon/utils/np_utils.py
core/polyaxon/utils/np_utils.py
try: import numpy as np except ImportError: np = None def sanitize_np_types(value): if isinstance(value, (int, float, complex, type(None))): return value if np.isnan(value): return None if np and isinstance(value, np.integer): return int(value) if np and isinstance(value, np.floating): return float(value) return value def to_np(value): if isinstance(value, np.ndarray): return value if np.isscalar(value): return np.array([value]) def calculate_scale_factor(tensor): converted = tensor.numpy() if not isinstance(tensor, np.ndarray) else tensor return 1 if converted.dtype == np.uint8 else 255
import math try: import numpy as np except ImportError: np = None def sanitize_np_types(value): if math.isnan(value): return None if isinstance(value, (int, float, complex, type(None))): return value if np and np.isnan(value): return None if np and isinstance(value, np.integer): return int(value) if np and isinstance(value, np.floating): return float(value) return value def to_np(value): if isinstance(value, np.ndarray): return value if np.isscalar(value): return np.array([value]) def calculate_scale_factor(tensor): converted = tensor.numpy() if not isinstance(tensor, np.ndarray) else tensor return 1 if converted.dtype == np.uint8 else 255
Add check for nan values
Add check for nan values
Python
apache-2.0
polyaxon/polyaxon,polyaxon/polyaxon,polyaxon/polyaxon
+ import math try: import numpy as np except ImportError: np = None def sanitize_np_types(value): + if math.isnan(value): + return None if isinstance(value, (int, float, complex, type(None))): return value - if np.isnan(value): + if np and np.isnan(value): return None if np and isinstance(value, np.integer): return int(value) if np and isinstance(value, np.floating): return float(value) return value def to_np(value): if isinstance(value, np.ndarray): return value if np.isscalar(value): return np.array([value]) def calculate_scale_factor(tensor): converted = tensor.numpy() if not isinstance(tensor, np.ndarray) else tensor return 1 if converted.dtype == np.uint8 else 255
Add check for nan values
## Code Before: try: import numpy as np except ImportError: np = None def sanitize_np_types(value): if isinstance(value, (int, float, complex, type(None))): return value if np.isnan(value): return None if np and isinstance(value, np.integer): return int(value) if np and isinstance(value, np.floating): return float(value) return value def to_np(value): if isinstance(value, np.ndarray): return value if np.isscalar(value): return np.array([value]) def calculate_scale_factor(tensor): converted = tensor.numpy() if not isinstance(tensor, np.ndarray) else tensor return 1 if converted.dtype == np.uint8 else 255 ## Instruction: Add check for nan values ## Code After: import math try: import numpy as np except ImportError: np = None def sanitize_np_types(value): if math.isnan(value): return None if isinstance(value, (int, float, complex, type(None))): return value if np and np.isnan(value): return None if np and isinstance(value, np.integer): return int(value) if np and isinstance(value, np.floating): return float(value) return value def to_np(value): if isinstance(value, np.ndarray): return value if np.isscalar(value): return np.array([value]) def calculate_scale_factor(tensor): converted = tensor.numpy() if not isinstance(tensor, np.ndarray) else tensor return 1 if converted.dtype == np.uint8 else 255
... import math ... def sanitize_np_types(value): if math.isnan(value): return None if isinstance(value, (int, float, complex, type(None))): ... return value if np and np.isnan(value): return None ...
15be3bd492a0808713c6ae6981ecb99acacd5297
allauth/socialaccount/providers/trello/provider.py
allauth/socialaccount/providers/trello/provider.py
from allauth.socialaccount.providers.base import ProviderAccount from allauth.socialaccount.providers.oauth.provider import OAuthProvider class TrelloAccount(ProviderAccount): def get_profile_url(self): return None def get_avatar_url(self): return None class TrelloProvider(OAuthProvider): id = 'trello' name = 'Trello' account_class = TrelloAccount def get_default_scope(self): return ['read'] def extract_uid(self, data): return data['id'] def get_auth_params(self, request, action): data = super(TrelloProvider, self).get_auth_params(request, action) app = self.get_app(request) data['type'] = 'web_server' data['name'] = app.name # define here for how long it will be, this can be configured on the # social app data['expiration'] = 'never' return data provider_classes = [TrelloProvider]
from allauth.socialaccount.providers.base import ProviderAccount from allauth.socialaccount.providers.oauth.provider import OAuthProvider class TrelloAccount(ProviderAccount): def get_profile_url(self): return None def get_avatar_url(self): return None class TrelloProvider(OAuthProvider): id = 'trello' name = 'Trello' account_class = TrelloAccount def get_default_scope(self): return ['read'] def extract_uid(self, data): return data['id'] def get_auth_params(self, request, action): data = super(TrelloProvider, self).get_auth_params(request, action) app = self.get_app(request) data['type'] = 'web_server' data['name'] = app.name data['scope'] = self.get_scope(request) # define here for how long it will be, this can be configured on the # social app data['expiration'] = 'never' return data provider_classes = [TrelloProvider]
Use 'scope' in TrelloProvider auth params. Allows overriding from django settings.
feat(TrelloProvider): Use 'scope' in TrelloProvider auth params. Allows overriding from django settings.
Python
mit
AltSchool/django-allauth,AltSchool/django-allauth,AltSchool/django-allauth
from allauth.socialaccount.providers.base import ProviderAccount from allauth.socialaccount.providers.oauth.provider import OAuthProvider class TrelloAccount(ProviderAccount): def get_profile_url(self): return None def get_avatar_url(self): return None class TrelloProvider(OAuthProvider): id = 'trello' name = 'Trello' account_class = TrelloAccount def get_default_scope(self): return ['read'] def extract_uid(self, data): return data['id'] def get_auth_params(self, request, action): data = super(TrelloProvider, self).get_auth_params(request, action) app = self.get_app(request) data['type'] = 'web_server' data['name'] = app.name + data['scope'] = self.get_scope(request) # define here for how long it will be, this can be configured on the # social app data['expiration'] = 'never' return data provider_classes = [TrelloProvider]
Use 'scope' in TrelloProvider auth params. Allows overriding from django settings.
## Code Before: from allauth.socialaccount.providers.base import ProviderAccount from allauth.socialaccount.providers.oauth.provider import OAuthProvider class TrelloAccount(ProviderAccount): def get_profile_url(self): return None def get_avatar_url(self): return None class TrelloProvider(OAuthProvider): id = 'trello' name = 'Trello' account_class = TrelloAccount def get_default_scope(self): return ['read'] def extract_uid(self, data): return data['id'] def get_auth_params(self, request, action): data = super(TrelloProvider, self).get_auth_params(request, action) app = self.get_app(request) data['type'] = 'web_server' data['name'] = app.name # define here for how long it will be, this can be configured on the # social app data['expiration'] = 'never' return data provider_classes = [TrelloProvider] ## Instruction: Use 'scope' in TrelloProvider auth params. Allows overriding from django settings. ## Code After: from allauth.socialaccount.providers.base import ProviderAccount from allauth.socialaccount.providers.oauth.provider import OAuthProvider class TrelloAccount(ProviderAccount): def get_profile_url(self): return None def get_avatar_url(self): return None class TrelloProvider(OAuthProvider): id = 'trello' name = 'Trello' account_class = TrelloAccount def get_default_scope(self): return ['read'] def extract_uid(self, data): return data['id'] def get_auth_params(self, request, action): data = super(TrelloProvider, self).get_auth_params(request, action) app = self.get_app(request) data['type'] = 'web_server' data['name'] = app.name data['scope'] = self.get_scope(request) # define here for how long it will be, this can be configured on the # social app data['expiration'] = 'never' return data provider_classes = [TrelloProvider]
# ... existing code ... data['name'] = app.name data['scope'] = self.get_scope(request) # define here for how long it will be, this can be configured on the # ... rest of the code ...
a9c6e045631103fe8508fd1b60d6076c05092fe1
tests/examples/customnode/nodes.py
tests/examples/customnode/nodes.py
from viewflow.activation import AbstractGateActivation, Activation from viewflow.flow import base from viewflow.token import Token class DynamicSplitActivation(AbstractGateActivation): def calculate_next(self): self._split_count = self.flow_task._task_count_callback(self.process) @Activation.status.super() def activate_next(self): if self._split_count: token_source = Token.split_token_source(self.task.token, self.task.pk) for _ in range(self._split_count): self.flow_task._next.activate(prev_activation=self, token=next(token_source)) class DynamicSplit(base.NextNodeMixin, base.DetailsViewMixin, base.Gateway): """ Activates several outgoing task instances depends on callback value Example:: spit_on_decision = flow.DynamicSplit(lambda p: 4) \\ .Next(this.make_decision) make_decision = flow.View(MyView) \\ .Next(this.join_on_decision) join_on_decision = flow.Join() \\ .Next(this.end) """ task_type = 'SPLIT' activation_cls = DynamicSplitActivation def __init__(self, callback): super(DynamicSplit, self).__init__() self._task_count_callback = callback
from viewflow.activation import AbstractGateActivation from viewflow.flow import base from viewflow.token import Token class DynamicSplitActivation(AbstractGateActivation): def calculate_next(self): self._split_count = self.flow_task._task_count_callback(self.process) def activate_next(self): if self._split_count: token_source = Token.split_token_source(self.task.token, self.task.pk) for _ in range(self._split_count): self.flow_task._next.activate(prev_activation=self, token=next(token_source)) class DynamicSplit(base.NextNodeMixin, base.UndoViewMixin, base.CancelViewMixin, base.PerformViewMixin, base.DetailsViewMixin, base.Gateway): """ Activates several outgoing task instances depends on callback value Example:: spit_on_decision = flow.DynamicSplit(lambda p: 4) \\ .Next(this.make_decision) make_decision = flow.View(MyView) \\ .Next(this.join_on_decision) join_on_decision = flow.Join() \\ .Next(this.end) """ task_type = 'SPLIT' activation_cls = DynamicSplitActivation def __init__(self, callback): super(DynamicSplit, self).__init__() self._task_count_callback = callback
Add undo to custom node sample
Add undo to custom node sample
Python
agpl-3.0
ribeiro-ucl/viewflow,codingjoe/viewflow,pombredanne/viewflow,pombredanne/viewflow,codingjoe/viewflow,codingjoe/viewflow,viewflow/viewflow,viewflow/viewflow,ribeiro-ucl/viewflow,viewflow/viewflow,ribeiro-ucl/viewflow
- from viewflow.activation import AbstractGateActivation, Activation + from viewflow.activation import AbstractGateActivation from viewflow.flow import base from viewflow.token import Token class DynamicSplitActivation(AbstractGateActivation): def calculate_next(self): self._split_count = self.flow_task._task_count_callback(self.process) - @Activation.status.super() def activate_next(self): if self._split_count: token_source = Token.split_token_source(self.task.token, self.task.pk) for _ in range(self._split_count): self.flow_task._next.activate(prev_activation=self, token=next(token_source)) - class DynamicSplit(base.NextNodeMixin, base.DetailsViewMixin, base.Gateway): + class DynamicSplit(base.NextNodeMixin, + base.UndoViewMixin, + base.CancelViewMixin, + base.PerformViewMixin, + base.DetailsViewMixin, + base.Gateway): """ Activates several outgoing task instances depends on callback value Example:: spit_on_decision = flow.DynamicSplit(lambda p: 4) \\ .Next(this.make_decision) make_decision = flow.View(MyView) \\ .Next(this.join_on_decision) join_on_decision = flow.Join() \\ .Next(this.end) """ task_type = 'SPLIT' activation_cls = DynamicSplitActivation def __init__(self, callback): super(DynamicSplit, self).__init__() self._task_count_callback = callback
Add undo to custom node sample
## Code Before: from viewflow.activation import AbstractGateActivation, Activation from viewflow.flow import base from viewflow.token import Token class DynamicSplitActivation(AbstractGateActivation): def calculate_next(self): self._split_count = self.flow_task._task_count_callback(self.process) @Activation.status.super() def activate_next(self): if self._split_count: token_source = Token.split_token_source(self.task.token, self.task.pk) for _ in range(self._split_count): self.flow_task._next.activate(prev_activation=self, token=next(token_source)) class DynamicSplit(base.NextNodeMixin, base.DetailsViewMixin, base.Gateway): """ Activates several outgoing task instances depends on callback value Example:: spit_on_decision = flow.DynamicSplit(lambda p: 4) \\ .Next(this.make_decision) make_decision = flow.View(MyView) \\ .Next(this.join_on_decision) join_on_decision = flow.Join() \\ .Next(this.end) """ task_type = 'SPLIT' activation_cls = DynamicSplitActivation def __init__(self, callback): super(DynamicSplit, self).__init__() self._task_count_callback = callback ## Instruction: Add undo to custom node sample ## Code After: from viewflow.activation import AbstractGateActivation from viewflow.flow import base from viewflow.token import Token class DynamicSplitActivation(AbstractGateActivation): def calculate_next(self): self._split_count = self.flow_task._task_count_callback(self.process) def activate_next(self): if self._split_count: token_source = Token.split_token_source(self.task.token, self.task.pk) for _ in range(self._split_count): self.flow_task._next.activate(prev_activation=self, token=next(token_source)) class DynamicSplit(base.NextNodeMixin, base.UndoViewMixin, base.CancelViewMixin, base.PerformViewMixin, base.DetailsViewMixin, base.Gateway): """ Activates several outgoing task instances depends on callback value Example:: spit_on_decision = flow.DynamicSplit(lambda p: 4) \\ .Next(this.make_decision) make_decision = flow.View(MyView) \\ .Next(this.join_on_decision) join_on_decision = flow.Join() \\ .Next(this.end) """ task_type = 'SPLIT' activation_cls = DynamicSplitActivation def __init__(self, callback): super(DynamicSplit, self).__init__() self._task_count_callback = callback
# ... existing code ... from viewflow.activation import AbstractGateActivation from viewflow.flow import base # ... modified code ... def activate_next(self): ... class DynamicSplit(base.NextNodeMixin, base.UndoViewMixin, base.CancelViewMixin, base.PerformViewMixin, base.DetailsViewMixin, base.Gateway): """ # ... rest of the code ...
315ad5f2f31f82f8d42d2a65fe4f056b4e3fcfd7
tests/test_quickstart.py
tests/test_quickstart.py
import pytest from lektor.quickstart import get_default_author from lektor.quickstart import get_default_author_email from lektor.utils import locate_executable def test_default_author(os_user): assert get_default_author() == "Lektor Test" @pytest.mark.skipif(locate_executable("git") is None, reason="git not installed") def test_default_author_email(): assert isinstance(get_default_author_email(), str)
import os import pytest from lektor.quickstart import get_default_author from lektor.quickstart import get_default_author_email from lektor.utils import locate_executable def test_default_author(os_user): assert get_default_author() == "Lektor Test" @pytest.mark.skipif(locate_executable("git") is None, reason="git not installed") def test_default_author_email(): assert isinstance(get_default_author_email(), str) def test_default_author_email_git_unavailable(monkeypatch): monkeypatch.setitem(os.environ, "PATH", "/dev/null") locate_executable.cache_clear() assert get_default_author_email() is None
Add test case for when git is not available
Add test case for when git is not available
Python
bsd-3-clause
lektor/lektor,lektor/lektor,lektor/lektor,lektor/lektor
+ import os + import pytest from lektor.quickstart import get_default_author from lektor.quickstart import get_default_author_email from lektor.utils import locate_executable def test_default_author(os_user): assert get_default_author() == "Lektor Test" @pytest.mark.skipif(locate_executable("git") is None, reason="git not installed") def test_default_author_email(): assert isinstance(get_default_author_email(), str) + + def test_default_author_email_git_unavailable(monkeypatch): + monkeypatch.setitem(os.environ, "PATH", "/dev/null") + locate_executable.cache_clear() + assert get_default_author_email() is None +
Add test case for when git is not available
## Code Before: import pytest from lektor.quickstart import get_default_author from lektor.quickstart import get_default_author_email from lektor.utils import locate_executable def test_default_author(os_user): assert get_default_author() == "Lektor Test" @pytest.mark.skipif(locate_executable("git") is None, reason="git not installed") def test_default_author_email(): assert isinstance(get_default_author_email(), str) ## Instruction: Add test case for when git is not available ## Code After: import os import pytest from lektor.quickstart import get_default_author from lektor.quickstart import get_default_author_email from lektor.utils import locate_executable def test_default_author(os_user): assert get_default_author() == "Lektor Test" @pytest.mark.skipif(locate_executable("git") is None, reason="git not installed") def test_default_author_email(): assert isinstance(get_default_author_email(), str) def test_default_author_email_git_unavailable(monkeypatch): monkeypatch.setitem(os.environ, "PATH", "/dev/null") locate_executable.cache_clear() assert get_default_author_email() is None
# ... existing code ... import os import pytest # ... modified code ... assert isinstance(get_default_author_email(), str) def test_default_author_email_git_unavailable(monkeypatch): monkeypatch.setitem(os.environ, "PATH", "/dev/null") locate_executable.cache_clear() assert get_default_author_email() is None # ... rest of the code ...
c313d6fb6803edabb956e1e90f040f8518c334bf
app/main/errors.py
app/main/errors.py
from flask import render_template from . import main @main.app_errorhandler(404) def page_not_found(e): return render_template("404.html"), 404
from flask import render_template from . import main @main.app_errorhandler(404) def page_not_found(e): return render_template("404.html", **main.config['BASE_TEMPLATE_DATA']), 404
Fix 404 page template static resources
Fix 404 page template static resources
Python
mit
mtekel/digitalmarketplace-admin-frontend,mtekel/digitalmarketplace-admin-frontend,mtekel/digitalmarketplace-admin-frontend,alphagov/digitalmarketplace-admin-frontend,mtekel/digitalmarketplace-admin-frontend,alphagov/digitalmarketplace-admin-frontend,alphagov/digitalmarketplace-admin-frontend,alphagov/digitalmarketplace-admin-frontend
from flask import render_template from . import main @main.app_errorhandler(404) def page_not_found(e): - return render_template("404.html"), 404 + return render_template("404.html", + **main.config['BASE_TEMPLATE_DATA']), 404
Fix 404 page template static resources
## Code Before: from flask import render_template from . import main @main.app_errorhandler(404) def page_not_found(e): return render_template("404.html"), 404 ## Instruction: Fix 404 page template static resources ## Code After: from flask import render_template from . import main @main.app_errorhandler(404) def page_not_found(e): return render_template("404.html", **main.config['BASE_TEMPLATE_DATA']), 404
# ... existing code ... def page_not_found(e): return render_template("404.html", **main.config['BASE_TEMPLATE_DATA']), 404 # ... rest of the code ...
fe50ea0dd1ceb51fdff455484cb5d2d32c94b076
spyder_unittest/__init__.py
spyder_unittest/__init__.py
from .unittest import UnitTestPlugin as PLUGIN_CLASS
"""Spyder unitest plugin.""" # Local imports from .unittest import UnitTestPlugin as PLUGIN_CLASS VERSION_INFO = (0, 1, 0, 'dev0') __version__ = '.'.join(map(str, VERSION_INFO))
Add version information and header
Add version information and header
Python
mit
jitseniesen/spyder-unittest
+ """Spyder unitest plugin.""" + + # Local imports from .unittest import UnitTestPlugin as PLUGIN_CLASS + VERSION_INFO = (0, 1, 0, 'dev0') + __version__ = '.'.join(map(str, VERSION_INFO)) +
Add version information and header
## Code Before: from .unittest import UnitTestPlugin as PLUGIN_CLASS ## Instruction: Add version information and header ## Code After: """Spyder unitest plugin.""" # Local imports from .unittest import UnitTestPlugin as PLUGIN_CLASS VERSION_INFO = (0, 1, 0, 'dev0') __version__ = '.'.join(map(str, VERSION_INFO))
# ... existing code ... """Spyder unitest plugin.""" # Local imports from .unittest import UnitTestPlugin as PLUGIN_CLASS VERSION_INFO = (0, 1, 0, 'dev0') __version__ = '.'.join(map(str, VERSION_INFO)) # ... rest of the code ...
00a497b21b9c788cb38da6c92a985e1b5c22801a
apps/survey/urls.py
apps/survey/urls.py
from django.conf.urls.defaults import * from . import views urlpatterns = patterns('', url(r'^profile/$', views.profile_index, name='survey_profile'), url(r'^profile/electric/$', views.profile_electric, name='survey_profile_electric'), #url(r'^profile/intake/$', views.survey_intake, name='survey_profile_intake'), url(r'^profile/surveys/$', views.survey_management, name='survey_management'), url(r'^main/$', views.main_index), url(r'^survey_management/$', views.survey_management, name='survey_management'), #url(r'^survey_data/(?P<survey_shortname>.+)/(?P<id>\d+)/$', views.survey_data, name='survey_data'), url(r'^intake/$', views.survey_data, name='survey_data'), url(r'^monthly/(?P<id>\d+)/$', views.survey_data_monthly ,name='survey_data_monthly'), url(r'^thanks_profile/$', views.thanks_profile, name='profile_thanks'), #url(r'^select/$', views.select_user, name='survey_select_user'), url(r'^$', views.index, name='survey_index'), )
from django.conf.urls.defaults import * from . import views urlpatterns = patterns('', url(r'^profile/$', views.profile_index, name='survey_profile'), url(r'^profile/electric/$', views.profile_electric, name='survey_profile_electric'), url(r'^profile/surveys/$', views.survey_management, name='survey_management'), url(r'^main/$', views.main_index), url(r'^survey_management/$', views.survey_management, name='survey_management'), url(r'^intake/view/$', views.survey_intake_view, name='survey_intake_view'), url(r'^intake/update/$', views.survey_intake_update, name='survey_intake_update'), url(r'^monthly/(?P<id>\d+)/$', views.survey_monthly ,name='survey_monthly'), url(r'^monthly/(?P<id>\d+)/update/$', views.survey_monthly_update ,name='survey_monthly_update'), url(r'^thanks_profile/$', views.thanks_profile, name='profile_thanks'), #url(r'^select/$', views.select_user, name='survey_select_user'), url(r'^$', views.index, name='survey_index'), )
Add view and update decorators
Add view and update decorators
Python
agpl-3.0
chispita/epiwork,chispita/epiwork,chispita/epiwork,chispita/epiwork,chispita/epiwork,chispita/epiwork,chispita/epiwork
from django.conf.urls.defaults import * from . import views urlpatterns = patterns('', url(r'^profile/$', views.profile_index, name='survey_profile'), url(r'^profile/electric/$', views.profile_electric, name='survey_profile_electric'), - #url(r'^profile/intake/$', views.survey_intake, name='survey_profile_intake'), url(r'^profile/surveys/$', views.survey_management, name='survey_management'), url(r'^main/$', views.main_index), url(r'^survey_management/$', views.survey_management, name='survey_management'), - #url(r'^survey_data/(?P<survey_shortname>.+)/(?P<id>\d+)/$', views.survey_data, name='survey_data'), - - url(r'^intake/$', views.survey_data, name='survey_data'), + + url(r'^intake/view/$', views.survey_intake_view, name='survey_intake_view'), + url(r'^intake/update/$', views.survey_intake_update, name='survey_intake_update'), + - url(r'^monthly/(?P<id>\d+)/$', views.survey_data_monthly ,name='survey_data_monthly'), + url(r'^monthly/(?P<id>\d+)/$', views.survey_monthly ,name='survey_monthly'), - + url(r'^monthly/(?P<id>\d+)/update/$', views.survey_monthly_update ,name='survey_monthly_update'), url(r'^thanks_profile/$', views.thanks_profile, name='profile_thanks'), #url(r'^select/$', views.select_user, name='survey_select_user'), url(r'^$', views.index, name='survey_index'), )
Add view and update decorators
## Code Before: from django.conf.urls.defaults import * from . import views urlpatterns = patterns('', url(r'^profile/$', views.profile_index, name='survey_profile'), url(r'^profile/electric/$', views.profile_electric, name='survey_profile_electric'), #url(r'^profile/intake/$', views.survey_intake, name='survey_profile_intake'), url(r'^profile/surveys/$', views.survey_management, name='survey_management'), url(r'^main/$', views.main_index), url(r'^survey_management/$', views.survey_management, name='survey_management'), #url(r'^survey_data/(?P<survey_shortname>.+)/(?P<id>\d+)/$', views.survey_data, name='survey_data'), url(r'^intake/$', views.survey_data, name='survey_data'), url(r'^monthly/(?P<id>\d+)/$', views.survey_data_monthly ,name='survey_data_monthly'), url(r'^thanks_profile/$', views.thanks_profile, name='profile_thanks'), #url(r'^select/$', views.select_user, name='survey_select_user'), url(r'^$', views.index, name='survey_index'), ) ## Instruction: Add view and update decorators ## Code After: from django.conf.urls.defaults import * from . import views urlpatterns = patterns('', url(r'^profile/$', views.profile_index, name='survey_profile'), url(r'^profile/electric/$', views.profile_electric, name='survey_profile_electric'), url(r'^profile/surveys/$', views.survey_management, name='survey_management'), url(r'^main/$', views.main_index), url(r'^survey_management/$', views.survey_management, name='survey_management'), url(r'^intake/view/$', views.survey_intake_view, name='survey_intake_view'), url(r'^intake/update/$', views.survey_intake_update, name='survey_intake_update'), url(r'^monthly/(?P<id>\d+)/$', views.survey_monthly ,name='survey_monthly'), url(r'^monthly/(?P<id>\d+)/update/$', views.survey_monthly_update ,name='survey_monthly_update'), url(r'^thanks_profile/$', views.thanks_profile, name='profile_thanks'), #url(r'^select/$', views.select_user, name='survey_select_user'), url(r'^$', views.index, name='survey_index'), )
# ... existing code ... url(r'^profile/electric/$', views.profile_electric, name='survey_profile_electric'), url(r'^profile/surveys/$', views.survey_management, name='survey_management'), # ... modified code ... url(r'^survey_management/$', views.survey_management, name='survey_management'), url(r'^intake/view/$', views.survey_intake_view, name='survey_intake_view'), url(r'^intake/update/$', views.survey_intake_update, name='survey_intake_update'), url(r'^monthly/(?P<id>\d+)/$', views.survey_monthly ,name='survey_monthly'), url(r'^monthly/(?P<id>\d+)/update/$', views.survey_monthly_update ,name='survey_monthly_update'), # ... rest of the code ...
693b904a9053fbddc6c93cfab1d6448c4b644d1c
scripts/travis_build_dependent_projects.py
scripts/travis_build_dependent_projects.py
import os from click import echo from travispy import travispy from travispy import TravisPy def main(): restarted = [] building = [] for domain in [travispy.PUBLIC, travispy.PRIVATE]: echo("Enumerate repos on {!r}".format(domain)) conn = TravisPy.github_auth(os.environ['GITHUB_KEY'], domain) user = conn.user() repos = conn.repos(member=user.login) for repo in repos: if not repo.active: continue echo(u"Checking repo: {}\n{!r}".format(repo.slug, repo.description)) try: build = conn.build(repo.last_build_id) if "config.json" in build.config.get("config", [""])[0]: echo("Found drift project: {!r}".format(repo.slug)) if not build.running: echo("Restarting...") build.restart() restarted.append(repo.slug) else: echo("Build is already running!") building.append(repo.slug) else: echo("Not a drift based project.") except Exception as e: echo("Can't build repo: {!r}".format(e)) echo() if restarted: echo("Repos restarted:") for reponame in restarted: echo("\t{}".format(reponame)) else: echo("No builds restarted.") if building: echo("Repos already building:") for reponame in building: echo("\t{}".format(reponame)) if __name__ == "__main__": main()
import os from click import echo from travispy import travispy from travispy import TravisPy def main(): restarted = [] building = [] for domain in [travispy.PUBLIC, travispy.PRIVATE]: echo("Enumerate repos on {!r}".format(domain)) conn = TravisPy.github_auth(os.environ['GITHUB_KEY'], domain) user = conn.user() repos = conn.repos(member=user.login) for repo in repos: if not repo.active: continue echo(u"Checking repo: {}\n{!r}".format(repo.slug, repo.description)) try: build = conn.build(repo.last_build_id) if 'drift' in build.config.get('drift_build_trigger', []): echo("Found drift project: {!r}".format(repo.slug)) if not build.running: echo("Restarting...") build.restart() restarted.append(repo.slug) else: echo("Build is already running!") building.append(repo.slug) else: echo("Not a drift based project.") except Exception as e: echo("Can't build repo: {!r}".format(e)) echo() if restarted: echo("Repos restarted:") for reponame in restarted: echo("\t{}".format(reponame)) else: echo("No builds restarted.") if building: echo("Repos already building:") for reponame in building: echo("\t{}".format(reponame)) if __name__ == "__main__": main()
Fix Travis dependant build trigger
Fix Travis dependant build trigger
Python
mit
dgnorth/drift,dgnorth/drift,dgnorth/drift
import os from click import echo from travispy import travispy from travispy import TravisPy def main(): restarted = [] building = [] for domain in [travispy.PUBLIC, travispy.PRIVATE]: echo("Enumerate repos on {!r}".format(domain)) conn = TravisPy.github_auth(os.environ['GITHUB_KEY'], domain) user = conn.user() repos = conn.repos(member=user.login) for repo in repos: if not repo.active: continue echo(u"Checking repo: {}\n{!r}".format(repo.slug, repo.description)) try: build = conn.build(repo.last_build_id) - if "config.json" in build.config.get("config", [""])[0]: + if 'drift' in build.config.get('drift_build_trigger', []): echo("Found drift project: {!r}".format(repo.slug)) if not build.running: echo("Restarting...") build.restart() restarted.append(repo.slug) else: echo("Build is already running!") building.append(repo.slug) else: echo("Not a drift based project.") except Exception as e: echo("Can't build repo: {!r}".format(e)) echo() if restarted: echo("Repos restarted:") for reponame in restarted: echo("\t{}".format(reponame)) else: echo("No builds restarted.") if building: echo("Repos already building:") for reponame in building: echo("\t{}".format(reponame)) if __name__ == "__main__": main()
Fix Travis dependant build trigger
## Code Before: import os from click import echo from travispy import travispy from travispy import TravisPy def main(): restarted = [] building = [] for domain in [travispy.PUBLIC, travispy.PRIVATE]: echo("Enumerate repos on {!r}".format(domain)) conn = TravisPy.github_auth(os.environ['GITHUB_KEY'], domain) user = conn.user() repos = conn.repos(member=user.login) for repo in repos: if not repo.active: continue echo(u"Checking repo: {}\n{!r}".format(repo.slug, repo.description)) try: build = conn.build(repo.last_build_id) if "config.json" in build.config.get("config", [""])[0]: echo("Found drift project: {!r}".format(repo.slug)) if not build.running: echo("Restarting...") build.restart() restarted.append(repo.slug) else: echo("Build is already running!") building.append(repo.slug) else: echo("Not a drift based project.") except Exception as e: echo("Can't build repo: {!r}".format(e)) echo() if restarted: echo("Repos restarted:") for reponame in restarted: echo("\t{}".format(reponame)) else: echo("No builds restarted.") if building: echo("Repos already building:") for reponame in building: echo("\t{}".format(reponame)) if __name__ == "__main__": main() ## Instruction: Fix Travis dependant build trigger ## Code After: import os from click import echo from travispy import travispy from travispy import TravisPy def main(): restarted = [] building = [] for domain in [travispy.PUBLIC, travispy.PRIVATE]: echo("Enumerate repos on {!r}".format(domain)) conn = TravisPy.github_auth(os.environ['GITHUB_KEY'], domain) user = conn.user() repos = conn.repos(member=user.login) for repo in repos: if not repo.active: continue echo(u"Checking repo: {}\n{!r}".format(repo.slug, repo.description)) try: build = conn.build(repo.last_build_id) if 'drift' in build.config.get('drift_build_trigger', []): echo("Found drift project: {!r}".format(repo.slug)) if not build.running: echo("Restarting...") build.restart() restarted.append(repo.slug) else: echo("Build is already running!") building.append(repo.slug) else: echo("Not a drift based project.") except Exception as e: echo("Can't build repo: {!r}".format(e)) echo() if restarted: echo("Repos restarted:") for reponame in restarted: echo("\t{}".format(reponame)) else: echo("No builds restarted.") if building: echo("Repos already building:") for reponame in building: echo("\t{}".format(reponame)) if __name__ == "__main__": main()
// ... existing code ... build = conn.build(repo.last_build_id) if 'drift' in build.config.get('drift_build_trigger', []): echo("Found drift project: {!r}".format(repo.slug)) // ... rest of the code ...
ff06ce55d0856cff774bdec5f0e872e093216bce
diffs/__init__.py
diffs/__init__.py
from __future__ import absolute_import, unicode_literals from django.apps import apps as django_apps from .signals import connect __version__ = '0.0.1' default_app_config = 'diffs.apps.DiffLogConfig' klasses_to_connect = [] def register(klass): """ Decorator function that registers a class to record diffs. @register class ExampleModel(models.Model): ... """ from .models import DiffLogEntryManager from dirtyfields import DirtyFieldsMixin # Hack to add dirtyfieldsmixin automatically if DirtyFieldsMixin not in klass.__bases__: klass.__bases__ = (DirtyFieldsMixin,) + klass.__bases__ klass.add_to_class('diffs', DiffLogEntryManager()) if not django_apps.ready: klasses_to_connect.append(klass) else: connect(klass) return klass
from __future__ import absolute_import, unicode_literals from .signals import connect __version__ = '0.0.1' default_app_config = 'diffs.apps.DiffLogConfig' klasses_to_connect = [] def register(klass): """ Decorator function that registers a class to record diffs. @register class ExampleModel(models.Model): ... """ from django.apps import apps as django_apps from dirtyfields import DirtyFieldsMixin from .models import DiffLogEntryManager # Hack to add dirtyfieldsmixin automatically if DirtyFieldsMixin not in klass.__bases__: klass.__bases__ = (DirtyFieldsMixin,) + klass.__bases__ klass.add_to_class('diffs', DiffLogEntryManager()) if not django_apps.ready: klasses_to_connect.append(klass) else: connect(klass) return klass
Reorganize imports to be later
Reorganize imports to be later
Python
mit
linuxlewis/django-diffs
from __future__ import absolute_import, unicode_literals - - from django.apps import apps as django_apps from .signals import connect __version__ = '0.0.1' default_app_config = 'diffs.apps.DiffLogConfig' klasses_to_connect = [] def register(klass): """ Decorator function that registers a class to record diffs. @register class ExampleModel(models.Model): ... """ + from django.apps import apps as django_apps + from dirtyfields import DirtyFieldsMixin + from .models import DiffLogEntryManager - from dirtyfields import DirtyFieldsMixin # Hack to add dirtyfieldsmixin automatically if DirtyFieldsMixin not in klass.__bases__: klass.__bases__ = (DirtyFieldsMixin,) + klass.__bases__ klass.add_to_class('diffs', DiffLogEntryManager()) if not django_apps.ready: klasses_to_connect.append(klass) else: connect(klass) return klass
Reorganize imports to be later
## Code Before: from __future__ import absolute_import, unicode_literals from django.apps import apps as django_apps from .signals import connect __version__ = '0.0.1' default_app_config = 'diffs.apps.DiffLogConfig' klasses_to_connect = [] def register(klass): """ Decorator function that registers a class to record diffs. @register class ExampleModel(models.Model): ... """ from .models import DiffLogEntryManager from dirtyfields import DirtyFieldsMixin # Hack to add dirtyfieldsmixin automatically if DirtyFieldsMixin not in klass.__bases__: klass.__bases__ = (DirtyFieldsMixin,) + klass.__bases__ klass.add_to_class('diffs', DiffLogEntryManager()) if not django_apps.ready: klasses_to_connect.append(klass) else: connect(klass) return klass ## Instruction: Reorganize imports to be later ## Code After: from __future__ import absolute_import, unicode_literals from .signals import connect __version__ = '0.0.1' default_app_config = 'diffs.apps.DiffLogConfig' klasses_to_connect = [] def register(klass): """ Decorator function that registers a class to record diffs. @register class ExampleModel(models.Model): ... """ from django.apps import apps as django_apps from dirtyfields import DirtyFieldsMixin from .models import DiffLogEntryManager # Hack to add dirtyfieldsmixin automatically if DirtyFieldsMixin not in klass.__bases__: klass.__bases__ = (DirtyFieldsMixin,) + klass.__bases__ klass.add_to_class('diffs', DiffLogEntryManager()) if not django_apps.ready: klasses_to_connect.append(klass) else: connect(klass) return klass
# ... existing code ... from __future__ import absolute_import, unicode_literals # ... modified code ... """ from django.apps import apps as django_apps from dirtyfields import DirtyFieldsMixin from .models import DiffLogEntryManager # Hack to add dirtyfieldsmixin automatically # ... rest of the code ...
0524817b152b4e3211d5d8101c661a54578e5888
dmoj/checkers/standard.py
dmoj/checkers/standard.py
def check(process_output, judge_output, **kwargs): from six.moves import zip process_lines = list(filter(None, process_output.split(b'\n'))) judge_lines = list(filter(None, judge_output.split(b'\n'))) if len(process_lines) != len(judge_lines): return False for process_line, judge_line in zip(process_lines, judge_lines): if process_line.split() != judge_line.split(): return False return True try: from ._checker import standard except ImportError as e: pass else: def check(process_output, judge_output, _checker=standard, **kwargs): return _checker(judge_output, process_output) del standard
from ._checker import standard def check(process_output, judge_output, _checker=standard, **kwargs): return _checker(judge_output, process_output) del standard
Remove untested checker code path
Remove untested checker code path
Python
agpl-3.0
DMOJ/judge,DMOJ/judge,DMOJ/judge
+ from ._checker import standard - def check(process_output, judge_output, **kwargs): - from six.moves import zip - process_lines = list(filter(None, process_output.split(b'\n'))) - judge_lines = list(filter(None, judge_output.split(b'\n'))) - if len(process_lines) != len(judge_lines): - return False - for process_line, judge_line in zip(process_lines, judge_lines): - if process_line.split() != judge_line.split(): - return False - return True - try: - from ._checker import standard - except ImportError as e: - pass - else: - def check(process_output, judge_output, _checker=standard, **kwargs): + def check(process_output, judge_output, _checker=standard, **kwargs): - return _checker(judge_output, process_output) + return _checker(judge_output, process_output) - del standard + del standard
Remove untested checker code path
## Code Before: def check(process_output, judge_output, **kwargs): from six.moves import zip process_lines = list(filter(None, process_output.split(b'\n'))) judge_lines = list(filter(None, judge_output.split(b'\n'))) if len(process_lines) != len(judge_lines): return False for process_line, judge_line in zip(process_lines, judge_lines): if process_line.split() != judge_line.split(): return False return True try: from ._checker import standard except ImportError as e: pass else: def check(process_output, judge_output, _checker=standard, **kwargs): return _checker(judge_output, process_output) del standard ## Instruction: Remove untested checker code path ## Code After: from ._checker import standard def check(process_output, judge_output, _checker=standard, **kwargs): return _checker(judge_output, process_output) del standard
# ... existing code ... from ._checker import standard def check(process_output, judge_output, _checker=standard, **kwargs): return _checker(judge_output, process_output) del standard # ... rest of the code ...
8958f8abb8798ff61af43199f0683c3e1c0ffcdd
checklisthq/main/models.py
checklisthq/main/models.py
from django.db import models from django.contrib.auth.models import User from taggit.managers import TaggableManager class Checklist(models.Model): title = models.CharField(max_length=512) owner = models.ForeignKey(User) content = models.TextField() created = models.DateTimeField(auto_now_add=True) modified = models.DateTimeField(auto_now=True,auto_now_add=True) deleted = models.BooleanField(default=False) tags = TaggableManager() # To add: type def __unicode__(self): return self.title
from django.db import models from django.contrib.auth.models import User from taggit.managers import TaggableManager class Checklist(models.Model): title = models.CharField(max_length=512) owner = models.ForeignKey(User) content = models.TextField() created = models.DateTimeField(auto_now_add=True) modified = models.DateTimeField(auto_now=True,auto_now_add=True) deleted = models.BooleanField(default=False) tags = TaggableManager(blank=True) # To add: type def __unicode__(self): return self.title
Make tags an optional field of Checklist
Make tags an optional field of Checklist
Python
agpl-3.0
checklisthq/checklisthq.com,checklisthq/checklisthq.com
from django.db import models from django.contrib.auth.models import User from taggit.managers import TaggableManager class Checklist(models.Model): title = models.CharField(max_length=512) owner = models.ForeignKey(User) content = models.TextField() created = models.DateTimeField(auto_now_add=True) modified = models.DateTimeField(auto_now=True,auto_now_add=True) deleted = models.BooleanField(default=False) - tags = TaggableManager() + tags = TaggableManager(blank=True) # To add: type def __unicode__(self): return self.title
Make tags an optional field of Checklist
## Code Before: from django.db import models from django.contrib.auth.models import User from taggit.managers import TaggableManager class Checklist(models.Model): title = models.CharField(max_length=512) owner = models.ForeignKey(User) content = models.TextField() created = models.DateTimeField(auto_now_add=True) modified = models.DateTimeField(auto_now=True,auto_now_add=True) deleted = models.BooleanField(default=False) tags = TaggableManager() # To add: type def __unicode__(self): return self.title ## Instruction: Make tags an optional field of Checklist ## Code After: from django.db import models from django.contrib.auth.models import User from taggit.managers import TaggableManager class Checklist(models.Model): title = models.CharField(max_length=512) owner = models.ForeignKey(User) content = models.TextField() created = models.DateTimeField(auto_now_add=True) modified = models.DateTimeField(auto_now=True,auto_now_add=True) deleted = models.BooleanField(default=False) tags = TaggableManager(blank=True) # To add: type def __unicode__(self): return self.title
// ... existing code ... deleted = models.BooleanField(default=False) tags = TaggableManager(blank=True) # To add: type // ... rest of the code ...
087925b336794b71675b31b70f845042e1f635fb
metro_accounts/metro_account.py
metro_accounts/metro_account.py
import time from openerp.report import report_sxw from openerp.osv import fields, osv class account_account(osv.osv): _inherit = "account.account" _columns={ 'name': fields.char('Name', size=256, required=True, select=True, translate=True), 'bal_direct': fields.selection([ ('d', 'Debit'), ('c', 'Credit'), ], 'Balance Direction',) } account_account() # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
import time from openerp.report import report_sxw from openerp.osv import fields, osv class account_account(osv.osv): _inherit = "account.account" _columns={ 'name': fields.char('Name', size=256, required=True, select=True, translate=True), 'bal_direct': fields.selection([ ('d', 'Debit'), ('c', 'Credit'), ], 'Balance Direction',) } ''' Update SQL: update account_account set bal_direct = 'd' where user_type in (select id from account_account_type where name in('Check','Asset','Bank','Cash','Receivable')) update account_account set bal_direct = 'c' where user_type in (select id from account_account_type where name in('Equity','Liability','Payable','Tax')) ''' account_account() # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
Add the SQL to update account balance direction field bal_direct
Add the SQL to update account balance direction field bal_direct
Python
agpl-3.0
837278709/metro-openerp,john-wang-metro/metro-openerp,john-wang-metro/metro-openerp,837278709/metro-openerp,837278709/metro-openerp,john-wang-metro/metro-openerp
import time from openerp.report import report_sxw from openerp.osv import fields, osv class account_account(osv.osv): _inherit = "account.account" _columns={ 'name': fields.char('Name', size=256, required=True, select=True, translate=True), 'bal_direct': fields.selection([ ('d', 'Debit'), ('c', 'Credit'), ], 'Balance Direction',) } + ''' + Update SQL: + update account_account set bal_direct = 'd' where user_type in (select id from account_account_type where name in('Check','Asset','Bank','Cash','Receivable')) + update account_account set bal_direct = 'c' where user_type in (select id from account_account_type where name in('Equity','Liability','Payable','Tax')) + ''' account_account() # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
Add the SQL to update account balance direction field bal_direct
## Code Before: import time from openerp.report import report_sxw from openerp.osv import fields, osv class account_account(osv.osv): _inherit = "account.account" _columns={ 'name': fields.char('Name', size=256, required=True, select=True, translate=True), 'bal_direct': fields.selection([ ('d', 'Debit'), ('c', 'Credit'), ], 'Balance Direction',) } account_account() # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: ## Instruction: Add the SQL to update account balance direction field bal_direct ## Code After: import time from openerp.report import report_sxw from openerp.osv import fields, osv class account_account(osv.osv): _inherit = "account.account" _columns={ 'name': fields.char('Name', size=256, required=True, select=True, translate=True), 'bal_direct': fields.selection([ ('d', 'Debit'), ('c', 'Credit'), ], 'Balance Direction',) } ''' Update SQL: update account_account set bal_direct = 'd' where user_type in (select id from account_account_type where name in('Check','Asset','Bank','Cash','Receivable')) update account_account set bal_direct = 'c' where user_type in (select id from account_account_type where name in('Equity','Liability','Payable','Tax')) ''' account_account() # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
... } ''' Update SQL: update account_account set bal_direct = 'd' where user_type in (select id from account_account_type where name in('Check','Asset','Bank','Cash','Receivable')) update account_account set bal_direct = 'c' where user_type in (select id from account_account_type where name in('Equity','Liability','Payable','Tax')) ''' account_account() ...
af88bfaece839d044ccb0781a15c8c538979051e
tests/test_object.py
tests/test_object.py
import unittest import mlbgame class TestObject(unittest.TestCase): def test_object(self): data = { 'string': 'string', 'int': '10', 'float': '10.1' } obj = mlbgame.object.Object(data) self.assertIsInstance(obj.string, str) self.assertIsInstance(obj.int, int) self.assertIsInstance(obj.float, float) self.assertEqual(obj.string, 'string') self.assertEqual(obj.int, 10) self.assertEqual(obj.float, 10.1)
import unittest import mlbgame class TestObject(unittest.TestCase): def test_object(self): data = { 'string': 'string', 'int': '10', 'float': '10.1', 'unicode': u'\xe7\x8c\xab' } obj = mlbgame.object.Object(data) self.assertIsInstance(obj.string, str) self.assertIsInstance(obj.int, int) self.assertIsInstance(obj.float, float) self.assertIsInstance(obj.unicode, unicode) self.assertEqual(obj.string, 'string') self.assertEqual(obj.int, 10) self.assertEqual(obj.float, 10.1) self.assertEqual(obj.unicode, u'\xe7\x8c\xab')
Add test for unicode characters
Add test for unicode characters
Python
mit
panzarino/mlbgame,zachpanz88/mlbgame
import unittest import mlbgame class TestObject(unittest.TestCase): def test_object(self): data = { 'string': 'string', 'int': '10', - 'float': '10.1' + 'float': '10.1', + 'unicode': u'\xe7\x8c\xab' } obj = mlbgame.object.Object(data) self.assertIsInstance(obj.string, str) self.assertIsInstance(obj.int, int) self.assertIsInstance(obj.float, float) + self.assertIsInstance(obj.unicode, unicode) self.assertEqual(obj.string, 'string') self.assertEqual(obj.int, 10) self.assertEqual(obj.float, 10.1) + self.assertEqual(obj.unicode, u'\xe7\x8c\xab')
Add test for unicode characters
## Code Before: import unittest import mlbgame class TestObject(unittest.TestCase): def test_object(self): data = { 'string': 'string', 'int': '10', 'float': '10.1' } obj = mlbgame.object.Object(data) self.assertIsInstance(obj.string, str) self.assertIsInstance(obj.int, int) self.assertIsInstance(obj.float, float) self.assertEqual(obj.string, 'string') self.assertEqual(obj.int, 10) self.assertEqual(obj.float, 10.1) ## Instruction: Add test for unicode characters ## Code After: import unittest import mlbgame class TestObject(unittest.TestCase): def test_object(self): data = { 'string': 'string', 'int': '10', 'float': '10.1', 'unicode': u'\xe7\x8c\xab' } obj = mlbgame.object.Object(data) self.assertIsInstance(obj.string, str) self.assertIsInstance(obj.int, int) self.assertIsInstance(obj.float, float) self.assertIsInstance(obj.unicode, unicode) self.assertEqual(obj.string, 'string') self.assertEqual(obj.int, 10) self.assertEqual(obj.float, 10.1) self.assertEqual(obj.unicode, u'\xe7\x8c\xab')
// ... existing code ... 'int': '10', 'float': '10.1', 'unicode': u'\xe7\x8c\xab' } // ... modified code ... self.assertIsInstance(obj.float, float) self.assertIsInstance(obj.unicode, unicode) self.assertEqual(obj.string, 'string') ... self.assertEqual(obj.float, 10.1) self.assertEqual(obj.unicode, u'\xe7\x8c\xab') // ... rest of the code ...
edd06989628e90d4fdfa98e4af84720d815322f9
pinax/likes/migrations/0001_initial.py
pinax/likes/migrations/0001_initial.py
from __future__ import unicode_literals from django.conf import settings from django.db import migrations, models import django.db.models.deletion import django.utils.timezone class Migration(migrations.Migration): initial = True dependencies = [ ('contenttypes', '0002_remove_content_type_name'), migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='Like', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('receiver_object_id', models.PositiveIntegerField()), ('timestamp', models.DateTimeField(default=django.utils.timezone.now)), ('receiver_content_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='contenttypes.ContentType')), ('sender', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='liking', to=settings.AUTH_USER_MODEL)), ], ), migrations.AlterUniqueTogether( name='like', unique_together=set([('sender', 'receiver_content_type', 'receiver_object_id')]), ), ]
from __future__ import unicode_literals from django.conf import settings from django.db import migrations, models import django.db.models.deletion import django.utils.timezone class Migration(migrations.Migration): initial = True dependencies = [ ('contenttypes', '0002_remove_content_type_name'), migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='Like', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('receiver_object_id', models.PositiveIntegerField()), ('timestamp', models.DateTimeField(default=django.utils.timezone.now)), ('receiver_content_type', models.ForeignKey('contenttypes.ContentType', on_delete=django.db.models.deletion.CASCADE)), ('sender', models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=django.db.models.deletion.CASCADE, related_name='liking')), ], ), migrations.AlterUniqueTogether( name='like', unique_together=set([('sender', 'receiver_content_type', 'receiver_object_id')]), ), ]
Drop features removed in Django 2.0
Drop features removed in Django 2.0 Field.rel and Field.remote_field.to are removed https://docs.djangoproject.com/en/dev/releases/2.0/#features-removed-in-2-0
Python
mit
pinax/pinax-likes
from __future__ import unicode_literals from django.conf import settings from django.db import migrations, models import django.db.models.deletion import django.utils.timezone class Migration(migrations.Migration): initial = True dependencies = [ ('contenttypes', '0002_remove_content_type_name'), migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='Like', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('receiver_object_id', models.PositiveIntegerField()), ('timestamp', models.DateTimeField(default=django.utils.timezone.now)), - ('receiver_content_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='contenttypes.ContentType')), + ('receiver_content_type', models.ForeignKey('contenttypes.ContentType', on_delete=django.db.models.deletion.CASCADE)), - ('sender', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='liking', to=settings.AUTH_USER_MODEL)), + ('sender', models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=django.db.models.deletion.CASCADE, related_name='liking')), ], ), migrations.AlterUniqueTogether( name='like', unique_together=set([('sender', 'receiver_content_type', 'receiver_object_id')]), ), ]
Drop features removed in Django 2.0
## Code Before: from __future__ import unicode_literals from django.conf import settings from django.db import migrations, models import django.db.models.deletion import django.utils.timezone class Migration(migrations.Migration): initial = True dependencies = [ ('contenttypes', '0002_remove_content_type_name'), migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='Like', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('receiver_object_id', models.PositiveIntegerField()), ('timestamp', models.DateTimeField(default=django.utils.timezone.now)), ('receiver_content_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='contenttypes.ContentType')), ('sender', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='liking', to=settings.AUTH_USER_MODEL)), ], ), migrations.AlterUniqueTogether( name='like', unique_together=set([('sender', 'receiver_content_type', 'receiver_object_id')]), ), ] ## Instruction: Drop features removed in Django 2.0 ## Code After: from __future__ import unicode_literals from django.conf import settings from django.db import migrations, models import django.db.models.deletion import django.utils.timezone class Migration(migrations.Migration): initial = True dependencies = [ ('contenttypes', '0002_remove_content_type_name'), migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='Like', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('receiver_object_id', models.PositiveIntegerField()), ('timestamp', models.DateTimeField(default=django.utils.timezone.now)), ('receiver_content_type', models.ForeignKey('contenttypes.ContentType', on_delete=django.db.models.deletion.CASCADE)), ('sender', models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=django.db.models.deletion.CASCADE, related_name='liking')), ], ), migrations.AlterUniqueTogether( name='like', unique_together=set([('sender', 'receiver_content_type', 'receiver_object_id')]), ), ]
... ('timestamp', models.DateTimeField(default=django.utils.timezone.now)), ('receiver_content_type', models.ForeignKey('contenttypes.ContentType', on_delete=django.db.models.deletion.CASCADE)), ('sender', models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=django.db.models.deletion.CASCADE, related_name='liking')), ], ...
b2a977a7285cbe832350492b967213b5261ad6b4
flask_app/tasks.py
flask_app/tasks.py
from __future__ import absolute_import import functools import os import sys import logbook from celery import Celery from celery.signals import after_setup_logger, after_setup_task_logger from .app import create_app _logger = logbook.Logger(__name__) queue = Celery('tasks', broker='redis://localhost') queue.conf.update( CELERY_TASK_SERIALIZER='json', CELERY_ACCEPT_CONTENT=['json'], # Ignore other content CELERY_RESULT_SERIALIZER='json', CELERY_ENABLE_UTC=True, ) def setup_log(**args): logbook.SyslogHandler().push_application() logbook.StreamHandler(sys.stderr, bubble=True).push_application() APP = None def needs_app_context(f): @functools.wraps(f) def wrapper(*args, **kwargs): global APP if APP is None: APP = create_app() with APP.app_context(): return f(*args, **kwargs) return wrapper after_setup_logger.connect(setup_log) after_setup_task_logger.connect(setup_log)
from __future__ import absolute_import import functools import os import sys import logging import logging.handlers import logbook from celery import Celery from celery.signals import after_setup_logger, after_setup_task_logger from celery.log import redirect_stdouts_to_logger from .app import create_app _logger = logbook.Logger(__name__) queue = Celery('tasks', broker='redis://localhost') queue.conf.update( CELERY_TASK_SERIALIZER='json', CELERY_ACCEPT_CONTENT=['json'], # Ignore other content CELERY_RESULT_SERIALIZER='json', CELERY_ENABLE_UTC=True, ) def setup_log(**args): logbook.SyslogHandler().push_application() logbook.StreamHandler(sys.stderr, bubble=True).push_application() redirect_stdouts_to_logger(args['logger']) # logs to local syslog if os.path.exists('/dev/log'): h = logging.handlers.SysLogHandler('/dev/log') else: h = logging.handlers.SysLogHandler() h.setLevel(args['loglevel']) formatter = logging.Formatter(logging.BASIC_FORMAT) h.setFormatter(formatter) args['logger'].addHandler(h) APP = None def needs_app_context(f): @functools.wraps(f) def wrapper(*args, **kwargs): global APP if APP is None: APP = create_app() with APP.app_context(): return f(*args, **kwargs) return wrapper after_setup_logger.connect(setup_log) after_setup_task_logger.connect(setup_log)
Fix celery logging in deployment
Fix celery logging in deployment
Python
mit
getslash/mailboxer,getslash/mailboxer,getslash/mailboxer,vmalloc/mailboxer,vmalloc/mailboxer,vmalloc/mailboxer,Infinidat/lanister,Infinidat/lanister
from __future__ import absolute_import import functools import os import sys + import logging + import logging.handlers import logbook from celery import Celery from celery.signals import after_setup_logger, after_setup_task_logger + from celery.log import redirect_stdouts_to_logger from .app import create_app _logger = logbook.Logger(__name__) queue = Celery('tasks', broker='redis://localhost') queue.conf.update( CELERY_TASK_SERIALIZER='json', CELERY_ACCEPT_CONTENT=['json'], # Ignore other content CELERY_RESULT_SERIALIZER='json', CELERY_ENABLE_UTC=True, ) def setup_log(**args): logbook.SyslogHandler().push_application() logbook.StreamHandler(sys.stderr, bubble=True).push_application() + redirect_stdouts_to_logger(args['logger']) # logs to local syslog + if os.path.exists('/dev/log'): + h = logging.handlers.SysLogHandler('/dev/log') + else: + h = logging.handlers.SysLogHandler() + h.setLevel(args['loglevel']) + formatter = logging.Formatter(logging.BASIC_FORMAT) + h.setFormatter(formatter) + args['logger'].addHandler(h) APP = None def needs_app_context(f): @functools.wraps(f) def wrapper(*args, **kwargs): global APP if APP is None: APP = create_app() with APP.app_context(): return f(*args, **kwargs) return wrapper after_setup_logger.connect(setup_log) after_setup_task_logger.connect(setup_log)
Fix celery logging in deployment
## Code Before: from __future__ import absolute_import import functools import os import sys import logbook from celery import Celery from celery.signals import after_setup_logger, after_setup_task_logger from .app import create_app _logger = logbook.Logger(__name__) queue = Celery('tasks', broker='redis://localhost') queue.conf.update( CELERY_TASK_SERIALIZER='json', CELERY_ACCEPT_CONTENT=['json'], # Ignore other content CELERY_RESULT_SERIALIZER='json', CELERY_ENABLE_UTC=True, ) def setup_log(**args): logbook.SyslogHandler().push_application() logbook.StreamHandler(sys.stderr, bubble=True).push_application() APP = None def needs_app_context(f): @functools.wraps(f) def wrapper(*args, **kwargs): global APP if APP is None: APP = create_app() with APP.app_context(): return f(*args, **kwargs) return wrapper after_setup_logger.connect(setup_log) after_setup_task_logger.connect(setup_log) ## Instruction: Fix celery logging in deployment ## Code After: from __future__ import absolute_import import functools import os import sys import logging import logging.handlers import logbook from celery import Celery from celery.signals import after_setup_logger, after_setup_task_logger from celery.log import redirect_stdouts_to_logger from .app import create_app _logger = logbook.Logger(__name__) queue = Celery('tasks', broker='redis://localhost') queue.conf.update( CELERY_TASK_SERIALIZER='json', CELERY_ACCEPT_CONTENT=['json'], # Ignore other content CELERY_RESULT_SERIALIZER='json', CELERY_ENABLE_UTC=True, ) def setup_log(**args): logbook.SyslogHandler().push_application() logbook.StreamHandler(sys.stderr, bubble=True).push_application() redirect_stdouts_to_logger(args['logger']) # logs to local syslog if os.path.exists('/dev/log'): h = logging.handlers.SysLogHandler('/dev/log') else: h = logging.handlers.SysLogHandler() h.setLevel(args['loglevel']) formatter = logging.Formatter(logging.BASIC_FORMAT) h.setFormatter(formatter) args['logger'].addHandler(h) APP = None def needs_app_context(f): @functools.wraps(f) def wrapper(*args, **kwargs): global APP if APP is None: APP = create_app() with APP.app_context(): return f(*args, **kwargs) return wrapper after_setup_logger.connect(setup_log) after_setup_task_logger.connect(setup_log)
... import logging import logging.handlers import logbook ... from celery.signals import after_setup_logger, after_setup_task_logger from celery.log import redirect_stdouts_to_logger ... logbook.StreamHandler(sys.stderr, bubble=True).push_application() redirect_stdouts_to_logger(args['logger']) # logs to local syslog if os.path.exists('/dev/log'): h = logging.handlers.SysLogHandler('/dev/log') else: h = logging.handlers.SysLogHandler() h.setLevel(args['loglevel']) formatter = logging.Formatter(logging.BASIC_FORMAT) h.setFormatter(formatter) args['logger'].addHandler(h) ...
5fba86c9f9b0d647dc8f821a97a7cc2dbb76deeb
basis_set_exchange/tests/test_aux_sanity.py
basis_set_exchange/tests/test_aux_sanity.py
import pytest from .common_testvars import bs_names, bs_metadata @pytest.mark.parametrize('basis_name', bs_names) def test_aux_sanity(basis_name): """For all basis sets, check that 1. All aux basis sets exist 2. That the role of the aux basis set matches the role in the orbital basis """ this_metadata = bs_metadata[basis_name] for role, aux in this_metadata['auxiliaries'].items(): assert aux in bs_metadata aux_metadata = bs_metadata[aux] assert role == aux_metadata['role'] @pytest.mark.parametrize('basis_name', bs_names) def test_aux_reverse(basis_name): """Make sure all aux basis sets are paired with at least one orbital basis set """ this_metadata = bs_metadata[basis_name] r = this_metadata['role'] if r == 'orbital' or r == 'guess': return # Find where this basis set is listed as an auxiliary found = False for k, v in bs_metadata.items(): aux = v['auxiliaries'] for ak, av in aux.items(): if av == basis_name: assert ak == r found = True assert found
import pytest from .common_testvars import bs_names, bs_metadata from ..misc import transform_basis_name @pytest.mark.parametrize('basis_name', bs_names) def test_aux_sanity(basis_name): """For all basis sets, check that 1. All aux basis sets exist 2. That the role of the aux basis set matches the role in the orbital basis """ this_metadata = bs_metadata[basis_name] for role, aux in this_metadata['auxiliaries'].items(): assert aux in bs_metadata aux_metadata = bs_metadata[aux] assert role == aux_metadata['role'] @pytest.mark.parametrize('basis_name', bs_names) def test_aux_reverse(basis_name): """Make sure all aux basis sets are paired with at least one orbital basis set """ this_metadata = bs_metadata[basis_name] role = this_metadata['role'] if role == 'orbital' or role == 'guess': return # All possible names for this auxiliary set # We only have to match one all_aux_names = this_metadata["other_names"] + [basis_name] all_aux_names = [transform_basis_name(x) for x in all_aux_names] # Find where this basis set is listed as an auxiliary found = False for k, v in bs_metadata.items(): aux = v['auxiliaries'] for aux_role, aux_name in aux.items(): if aux_name in all_aux_names: assert aux_role == role found = True assert found
Fix test: Auxiliaries can have multiple names
Fix test: Auxiliaries can have multiple names
Python
bsd-3-clause
MOLSSI-BSE/basis_set_exchange
import pytest from .common_testvars import bs_names, bs_metadata + from ..misc import transform_basis_name @pytest.mark.parametrize('basis_name', bs_names) def test_aux_sanity(basis_name): """For all basis sets, check that 1. All aux basis sets exist 2. That the role of the aux basis set matches the role in the orbital basis """ this_metadata = bs_metadata[basis_name] for role, aux in this_metadata['auxiliaries'].items(): assert aux in bs_metadata aux_metadata = bs_metadata[aux] assert role == aux_metadata['role'] @pytest.mark.parametrize('basis_name', bs_names) def test_aux_reverse(basis_name): """Make sure all aux basis sets are paired with at least one orbital basis set """ this_metadata = bs_metadata[basis_name] - r = this_metadata['role'] + role = this_metadata['role'] - if r == 'orbital' or r == 'guess': + if role == 'orbital' or role == 'guess': return + + # All possible names for this auxiliary set + # We only have to match one + all_aux_names = this_metadata["other_names"] + [basis_name] + all_aux_names = [transform_basis_name(x) for x in all_aux_names] # Find where this basis set is listed as an auxiliary found = False for k, v in bs_metadata.items(): aux = v['auxiliaries'] - for ak, av in aux.items(): + for aux_role, aux_name in aux.items(): - if av == basis_name: + if aux_name in all_aux_names: - assert ak == r + assert aux_role == role found = True assert found
Fix test: Auxiliaries can have multiple names
## Code Before: import pytest from .common_testvars import bs_names, bs_metadata @pytest.mark.parametrize('basis_name', bs_names) def test_aux_sanity(basis_name): """For all basis sets, check that 1. All aux basis sets exist 2. That the role of the aux basis set matches the role in the orbital basis """ this_metadata = bs_metadata[basis_name] for role, aux in this_metadata['auxiliaries'].items(): assert aux in bs_metadata aux_metadata = bs_metadata[aux] assert role == aux_metadata['role'] @pytest.mark.parametrize('basis_name', bs_names) def test_aux_reverse(basis_name): """Make sure all aux basis sets are paired with at least one orbital basis set """ this_metadata = bs_metadata[basis_name] r = this_metadata['role'] if r == 'orbital' or r == 'guess': return # Find where this basis set is listed as an auxiliary found = False for k, v in bs_metadata.items(): aux = v['auxiliaries'] for ak, av in aux.items(): if av == basis_name: assert ak == r found = True assert found ## Instruction: Fix test: Auxiliaries can have multiple names ## Code After: import pytest from .common_testvars import bs_names, bs_metadata from ..misc import transform_basis_name @pytest.mark.parametrize('basis_name', bs_names) def test_aux_sanity(basis_name): """For all basis sets, check that 1. All aux basis sets exist 2. That the role of the aux basis set matches the role in the orbital basis """ this_metadata = bs_metadata[basis_name] for role, aux in this_metadata['auxiliaries'].items(): assert aux in bs_metadata aux_metadata = bs_metadata[aux] assert role == aux_metadata['role'] @pytest.mark.parametrize('basis_name', bs_names) def test_aux_reverse(basis_name): """Make sure all aux basis sets are paired with at least one orbital basis set """ this_metadata = bs_metadata[basis_name] role = this_metadata['role'] if role == 'orbital' or role == 'guess': return # All possible names for this auxiliary set # We only have to match one all_aux_names = this_metadata["other_names"] + [basis_name] all_aux_names = [transform_basis_name(x) for x in all_aux_names] # Find where this basis set is listed as an auxiliary found = False for k, v in bs_metadata.items(): aux = v['auxiliaries'] for aux_role, aux_name in aux.items(): if aux_name in all_aux_names: assert aux_role == role found = True assert found
# ... existing code ... from .common_testvars import bs_names, bs_metadata from ..misc import transform_basis_name # ... modified code ... this_metadata = bs_metadata[basis_name] role = this_metadata['role'] if role == 'orbital' or role == 'guess': return # All possible names for this auxiliary set # We only have to match one all_aux_names = this_metadata["other_names"] + [basis_name] all_aux_names = [transform_basis_name(x) for x in all_aux_names] ... aux = v['auxiliaries'] for aux_role, aux_name in aux.items(): if aux_name in all_aux_names: assert aux_role == role found = True # ... rest of the code ...
41aa2c20a564c87fac1fd02d3bf40db84b02d49d
testing/test_run.py
testing/test_run.py
from regr_test import run from subprocess import check_output import os def test_source(): script = 'test_env.sh' var_name, var_value = 'TESTVAR', 'This is a test' with open(script, 'w') as f: f.write('export %s="%s"' % (var_name, var_value)) env = run.source(script) cmd = ['/bin/bash', '-c', 'echo $%s' % var_name] stdout = check_output(cmd, env=env, universal_newlines=True) os.remove(script) assert stdout.strip() == var_value def test_duration(): time = 2 p = run.execute('sleep %s' % time) (duration, data) = run.monitor(p) assert abs(duration-time) < 1.0 def test_timeout(): p = run.execute('sleep 5') (duration, data) = run.monitor(p, timeout=2) assert duration is None
from regr_test import run import os from subprocess import check_output from tempfile import NamedTemporaryFile def test_source(): var_name, var_value = 'TESTVAR', 'This is a test' with NamedTemporaryFile('w', delete=False) as f: f.write('export %s="%s"' % (var_name, var_value)) script_name = f.name env = run.source(script_name) cmd = ['/bin/bash', '-c', 'echo $%s' % var_name] stdout = check_output(cmd, env=env, universal_newlines=True) os.remove(script_name) assert stdout.strip() == var_value def test_duration(): time = 2 p = run.execute('sleep %s' % time) (duration, data) = run.monitor(p) assert abs(duration-time) < 1.0 def test_timeout(): p = run.execute('sleep 5') (duration, data) = run.monitor(p, timeout=2) assert duration is None
Make a test more secure
Make a test more secure Choose a random filename to avoid overwriting a file.
Python
mit
davidchall/nrtest
from regr_test import run + import os from subprocess import check_output - import os + from tempfile import NamedTemporaryFile def test_source(): - script = 'test_env.sh' var_name, var_value = 'TESTVAR', 'This is a test' - with open(script, 'w') as f: + with NamedTemporaryFile('w', delete=False) as f: f.write('export %s="%s"' % (var_name, var_value)) + script_name = f.name - env = run.source(script) + env = run.source(script_name) cmd = ['/bin/bash', '-c', 'echo $%s' % var_name] stdout = check_output(cmd, env=env, universal_newlines=True) - os.remove(script) + os.remove(script_name) assert stdout.strip() == var_value def test_duration(): time = 2 p = run.execute('sleep %s' % time) (duration, data) = run.monitor(p) assert abs(duration-time) < 1.0 def test_timeout(): p = run.execute('sleep 5') (duration, data) = run.monitor(p, timeout=2) assert duration is None
Make a test more secure
## Code Before: from regr_test import run from subprocess import check_output import os def test_source(): script = 'test_env.sh' var_name, var_value = 'TESTVAR', 'This is a test' with open(script, 'w') as f: f.write('export %s="%s"' % (var_name, var_value)) env = run.source(script) cmd = ['/bin/bash', '-c', 'echo $%s' % var_name] stdout = check_output(cmd, env=env, universal_newlines=True) os.remove(script) assert stdout.strip() == var_value def test_duration(): time = 2 p = run.execute('sleep %s' % time) (duration, data) = run.monitor(p) assert abs(duration-time) < 1.0 def test_timeout(): p = run.execute('sleep 5') (duration, data) = run.monitor(p, timeout=2) assert duration is None ## Instruction: Make a test more secure ## Code After: from regr_test import run import os from subprocess import check_output from tempfile import NamedTemporaryFile def test_source(): var_name, var_value = 'TESTVAR', 'This is a test' with NamedTemporaryFile('w', delete=False) as f: f.write('export %s="%s"' % (var_name, var_value)) script_name = f.name env = run.source(script_name) cmd = ['/bin/bash', '-c', 'echo $%s' % var_name] stdout = check_output(cmd, env=env, universal_newlines=True) os.remove(script_name) assert stdout.strip() == var_value def test_duration(): time = 2 p = run.execute('sleep %s' % time) (duration, data) = run.monitor(p) assert abs(duration-time) < 1.0 def test_timeout(): p = run.execute('sleep 5') (duration, data) = run.monitor(p, timeout=2) assert duration is None
... import os from subprocess import check_output from tempfile import NamedTemporaryFile ... def test_source(): var_name, var_value = 'TESTVAR', 'This is a test' with NamedTemporaryFile('w', delete=False) as f: f.write('export %s="%s"' % (var_name, var_value)) script_name = f.name env = run.source(script_name) cmd = ['/bin/bash', '-c', 'echo $%s' % var_name] ... os.remove(script_name) assert stdout.strip() == var_value ...
a8596fd4a76460bd3e15509825d3cb3f82a3f8c4
test/integration/ggrc/converters/test_import_delete.py
test/integration/ggrc/converters/test_import_delete.py
from ggrc.converters import errors from integration.ggrc import TestCase class TestBasicCsvImport(TestCase): def setUp(self): TestCase.setUp(self) self.client.get("/login") def test_policy_basic_import(self): filename = "ca_setup_for_deletion.csv" self.import_file(filename) filename = "ca_deletion.csv" response_data_dry = self.import_file(filename, dry_run=True) response_data = self.import_file(filename) self.assertEqual(response_data_dry, response_data) self.assertEqual(response_data[0]["deleted"], 2) self.assertEqual(response_data[0]["ignored"], 0)
from integration.ggrc import TestCase class TestBasicCsvImport(TestCase): def setUp(self): TestCase.setUp(self) self.client.get("/login") def test_policy_basic_import(self): filename = "ca_setup_for_deletion.csv" self.import_file(filename) filename = "ca_deletion.csv" response_data = self.import_file(filename) self.assertEqual(response_data[0]["deleted"], 2) self.assertEqual(response_data[0]["ignored"], 0)
Optimize basic delete import tests
Optimize basic delete import tests The dry-run check is now automatically performed on each import and we do not need to duplicate the work in the delete test.
Python
apache-2.0
selahssea/ggrc-core,plamut/ggrc-core,selahssea/ggrc-core,selahssea/ggrc-core,VinnieJohns/ggrc-core,plamut/ggrc-core,plamut/ggrc-core,VinnieJohns/ggrc-core,VinnieJohns/ggrc-core,VinnieJohns/ggrc-core,selahssea/ggrc-core,plamut/ggrc-core
- from ggrc.converters import errors from integration.ggrc import TestCase class TestBasicCsvImport(TestCase): def setUp(self): TestCase.setUp(self) self.client.get("/login") def test_policy_basic_import(self): filename = "ca_setup_for_deletion.csv" self.import_file(filename) filename = "ca_deletion.csv" - response_data_dry = self.import_file(filename, dry_run=True) response_data = self.import_file(filename) - self.assertEqual(response_data_dry, response_data) self.assertEqual(response_data[0]["deleted"], 2) self.assertEqual(response_data[0]["ignored"], 0)
Optimize basic delete import tests
## Code Before: from ggrc.converters import errors from integration.ggrc import TestCase class TestBasicCsvImport(TestCase): def setUp(self): TestCase.setUp(self) self.client.get("/login") def test_policy_basic_import(self): filename = "ca_setup_for_deletion.csv" self.import_file(filename) filename = "ca_deletion.csv" response_data_dry = self.import_file(filename, dry_run=True) response_data = self.import_file(filename) self.assertEqual(response_data_dry, response_data) self.assertEqual(response_data[0]["deleted"], 2) self.assertEqual(response_data[0]["ignored"], 0) ## Instruction: Optimize basic delete import tests ## Code After: from integration.ggrc import TestCase class TestBasicCsvImport(TestCase): def setUp(self): TestCase.setUp(self) self.client.get("/login") def test_policy_basic_import(self): filename = "ca_setup_for_deletion.csv" self.import_file(filename) filename = "ca_deletion.csv" response_data = self.import_file(filename) self.assertEqual(response_data[0]["deleted"], 2) self.assertEqual(response_data[0]["ignored"], 0)
// ... existing code ... from integration.ggrc import TestCase // ... modified code ... filename = "ca_deletion.csv" response_data = self.import_file(filename) // ... rest of the code ...
fd9f69cbc5512ea91837ff4512d4c9549b2f9eeb
plugin/DebianUtils/__init__.py
plugin/DebianUtils/__init__.py
import os import sys __version__ = "1" __author__ = 'Chris Lamb <[email protected]>' __contributors__ = {} __url__ = '' basedir = os.path.dirname(os.path.dirname(os.path.dirname(__file__))) if basedir not in sys.path: sys.path.append(basedir) import config import plugin reload(plugin) Class = plugin.Class configure = config.configure
import os import sys __version__ = "1" __author__ = 'Chris Lamb <[email protected]>' __contributors__ = {} __url__ = '' basedir = os.path.dirname(os.path.dirname(os.path.dirname(__file__))) if basedir not in sys.path: sys.path.append(basedir) import DebianDevelChangesBot reload(DebianDevelChangesBot) import config import plugin reload(plugin) Class = plugin.Class configure = config.configure
Add reload routines to DebianUtils plugin
Add reload routines to DebianUtils plugin Signed-off-by: Chris Lamb <[email protected]>
Python
agpl-3.0
lamby/debian-devel-changes-bot,lamby/debian-devel-changes-bot,lamby/debian-devel-changes-bot,xtaran/debian-devel-changes-bot,sebastinas/debian-devel-changes-bot,xtaran/debian-devel-changes-bot
import os import sys __version__ = "1" __author__ = 'Chris Lamb <[email protected]>' __contributors__ = {} __url__ = '' basedir = os.path.dirname(os.path.dirname(os.path.dirname(__file__))) if basedir not in sys.path: sys.path.append(basedir) + import DebianDevelChangesBot + reload(DebianDevelChangesBot) + import config import plugin reload(plugin) Class = plugin.Class configure = config.configure
Add reload routines to DebianUtils plugin
## Code Before: import os import sys __version__ = "1" __author__ = 'Chris Lamb <[email protected]>' __contributors__ = {} __url__ = '' basedir = os.path.dirname(os.path.dirname(os.path.dirname(__file__))) if basedir not in sys.path: sys.path.append(basedir) import config import plugin reload(plugin) Class = plugin.Class configure = config.configure ## Instruction: Add reload routines to DebianUtils plugin ## Code After: import os import sys __version__ = "1" __author__ = 'Chris Lamb <[email protected]>' __contributors__ = {} __url__ = '' basedir = os.path.dirname(os.path.dirname(os.path.dirname(__file__))) if basedir not in sys.path: sys.path.append(basedir) import DebianDevelChangesBot reload(DebianDevelChangesBot) import config import plugin reload(plugin) Class = plugin.Class configure = config.configure
# ... existing code ... import DebianDevelChangesBot reload(DebianDevelChangesBot) import config # ... rest of the code ...
2107a8c161d8a9fe13977a0997defb35297821c2
certbot/tests/helpers.py
certbot/tests/helpers.py
import json import testtools from testtools.twistedsupport import AsynchronousDeferredRunTest from uritools import urisplit class TestCase(testtools.TestCase): """ TestCase class for use with Twisted asynchornous tests. """ run_tests_with = AsynchronousDeferredRunTest def parse_query(uri): """ Parse the query dict from the given URI. When Twisted parses "args" from the URI, it leaves out query parameters that have no value. In those cases we rather use uritools to parse the query parameters. """ return urisplit(uri).getquerydict() def read_json_response(request): """ Read JSON from the UTF-8 encoded body of the given request. """ return json.loads(request.content.read().decode('utf-8')) def write_json_response(request, json_data, response_code=200): """ Write UTF-8 encoded JSON to the body of a request, set the Content-Type header and finish() the request. """ request.setResponseCode(response_code) request.setHeader('Content-Type', 'application/json; charset=utf-8') request.write(json.dumps(json_data).encode('utf-8')) request.finish()
import json import testtools from testtools.twistedsupport import AsynchronousDeferredRunTest from uritools import urisplit class TestCase(testtools.TestCase): """ TestCase class for use with Twisted asynchornous tests. """ run_tests_with = AsynchronousDeferredRunTest.make_factory(timeout=0.01) def parse_query(uri): """ Parse the query dict from the given URI. When Twisted parses "args" from the URI, it leaves out query parameters that have no value. In those cases we rather use uritools to parse the query parameters. """ return urisplit(uri).getquerydict() def read_json_response(request): """ Read JSON from the UTF-8 encoded body of the given request. """ return json.loads(request.content.read().decode('utf-8')) def write_json_response(request, json_data, response_code=200): """ Write UTF-8 encoded JSON to the body of a request, set the Content-Type header and finish() the request. """ request.setResponseCode(response_code) request.setHeader('Content-Type', 'application/json; charset=utf-8') request.write(json.dumps(json_data).encode('utf-8')) request.finish()
Increase default test timeout value
Increase default test timeout value
Python
mit
praekeltfoundation/certbot,praekeltfoundation/certbot
import json import testtools from testtools.twistedsupport import AsynchronousDeferredRunTest from uritools import urisplit class TestCase(testtools.TestCase): """ TestCase class for use with Twisted asynchornous tests. """ - run_tests_with = AsynchronousDeferredRunTest + run_tests_with = AsynchronousDeferredRunTest.make_factory(timeout=0.01) def parse_query(uri): """ Parse the query dict from the given URI. When Twisted parses "args" from the URI, it leaves out query parameters that have no value. In those cases we rather use uritools to parse the query parameters. """ return urisplit(uri).getquerydict() def read_json_response(request): """ Read JSON from the UTF-8 encoded body of the given request. """ return json.loads(request.content.read().decode('utf-8')) def write_json_response(request, json_data, response_code=200): """ Write UTF-8 encoded JSON to the body of a request, set the Content-Type header and finish() the request. """ request.setResponseCode(response_code) request.setHeader('Content-Type', 'application/json; charset=utf-8') request.write(json.dumps(json_data).encode('utf-8')) request.finish()
Increase default test timeout value
## Code Before: import json import testtools from testtools.twistedsupport import AsynchronousDeferredRunTest from uritools import urisplit class TestCase(testtools.TestCase): """ TestCase class for use with Twisted asynchornous tests. """ run_tests_with = AsynchronousDeferredRunTest def parse_query(uri): """ Parse the query dict from the given URI. When Twisted parses "args" from the URI, it leaves out query parameters that have no value. In those cases we rather use uritools to parse the query parameters. """ return urisplit(uri).getquerydict() def read_json_response(request): """ Read JSON from the UTF-8 encoded body of the given request. """ return json.loads(request.content.read().decode('utf-8')) def write_json_response(request, json_data, response_code=200): """ Write UTF-8 encoded JSON to the body of a request, set the Content-Type header and finish() the request. """ request.setResponseCode(response_code) request.setHeader('Content-Type', 'application/json; charset=utf-8') request.write(json.dumps(json_data).encode('utf-8')) request.finish() ## Instruction: Increase default test timeout value ## Code After: import json import testtools from testtools.twistedsupport import AsynchronousDeferredRunTest from uritools import urisplit class TestCase(testtools.TestCase): """ TestCase class for use with Twisted asynchornous tests. """ run_tests_with = AsynchronousDeferredRunTest.make_factory(timeout=0.01) def parse_query(uri): """ Parse the query dict from the given URI. When Twisted parses "args" from the URI, it leaves out query parameters that have no value. In those cases we rather use uritools to parse the query parameters. """ return urisplit(uri).getquerydict() def read_json_response(request): """ Read JSON from the UTF-8 encoded body of the given request. """ return json.loads(request.content.read().decode('utf-8')) def write_json_response(request, json_data, response_code=200): """ Write UTF-8 encoded JSON to the body of a request, set the Content-Type header and finish() the request. """ request.setResponseCode(response_code) request.setHeader('Content-Type', 'application/json; charset=utf-8') request.write(json.dumps(json_data).encode('utf-8')) request.finish()
... """ TestCase class for use with Twisted asynchornous tests. """ run_tests_with = AsynchronousDeferredRunTest.make_factory(timeout=0.01) ...
59c698e5db5c7fb2d537398cdad93215714b21f0
SimpleLoop.py
SimpleLoop.py
class EventLoop: def __init__ (self): self._queue = [] self._defaultinvocation = None def queueInvocation(self, function, args): self._queue.append((function, args)) def defaultInvocation(self, function, args): self._defaultinvocation = (function, args) def run(self): while True: if len(self._queue) > 0: (function, args) = self._queue.pop() function(self, args) elif self._defaultinvocation: (function, args) = self._defaultinvocation function(self, args) else: break
class EventLoop: def __init__ (self): self._queue = [] self._running = False def queueInvocation(self, function, args): self._queue.append((function, args)) def defaultInvocation(self, function, args): self._defaultinvocation = (function, args) def quit(): self._running = False def run(self): self._running = True while True: if not self._running: break if len(self._queue) > 0: (function, args) = self._queue.pop() function(self, args) elif self._defaultinvocation: (function, args) = self._defaultinvocation function(self, args) else: break
Add a way to have the loop quit after current invocation has been processed.
Add a way to have the loop quit after current invocation has been processed.
Python
apache-2.0
nanonyme/SimpleLoop,nanonyme/SimpleLoop
class EventLoop: def __init__ (self): self._queue = [] - self._defaultinvocation = None + self._running = False def queueInvocation(self, function, args): self._queue.append((function, args)) def defaultInvocation(self, function, args): self._defaultinvocation = (function, args) + def quit(): + self._running = False + def run(self): + self._running = True while True: + if not self._running: + break if len(self._queue) > 0: (function, args) = self._queue.pop() function(self, args) elif self._defaultinvocation: (function, args) = self._defaultinvocation function(self, args) else: break
Add a way to have the loop quit after current invocation has been processed.
## Code Before: class EventLoop: def __init__ (self): self._queue = [] self._defaultinvocation = None def queueInvocation(self, function, args): self._queue.append((function, args)) def defaultInvocation(self, function, args): self._defaultinvocation = (function, args) def run(self): while True: if len(self._queue) > 0: (function, args) = self._queue.pop() function(self, args) elif self._defaultinvocation: (function, args) = self._defaultinvocation function(self, args) else: break ## Instruction: Add a way to have the loop quit after current invocation has been processed. ## Code After: class EventLoop: def __init__ (self): self._queue = [] self._running = False def queueInvocation(self, function, args): self._queue.append((function, args)) def defaultInvocation(self, function, args): self._defaultinvocation = (function, args) def quit(): self._running = False def run(self): self._running = True while True: if not self._running: break if len(self._queue) > 0: (function, args) = self._queue.pop() function(self, args) elif self._defaultinvocation: (function, args) = self._defaultinvocation function(self, args) else: break
... self._queue = [] self._running = False ... def quit(): self._running = False def run(self): self._running = True while True: if not self._running: break if len(self._queue) > 0: ...
f61b81e968384859eb51a2ff14ca7709e8322ae8
yunity/walls/models.py
yunity/walls/models.py
from django.db.models import ForeignKey, TextField from config import settings from yunity.base.models import BaseModel class Wall(BaseModel): pass class WallPost(BaseModel): wall = ForeignKey(Wall) author = ForeignKey(settings.AUTH_USER_MODEL) class WallPostContent(BaseModel): post = ForeignKey(WallPost) author = ForeignKey(settings.AUTH_USER_MODEL) body = TextField()
from django.db.models import ForeignKey, TextField from config import settings from yunity.base.models import BaseModel class Wall(BaseModel): def resolve_permissions(self, collector): h = self.hub if h.target_content_type.model == 'group': g = h.target """:type : Group""" collector.add_hub(h, 'read') if g.is_content_included_in_parent: g = g.parent while g: collector.add_hub(g.hub, 'read') g = g.parent class WallPost(BaseModel): wall = ForeignKey(Wall) author = ForeignKey(settings.AUTH_USER_MODEL) class WallPostContent(BaseModel): post = ForeignKey(WallPost) author = ForeignKey(settings.AUTH_USER_MODEL) body = TextField()
Implement basic permissions resolver for walls
Implement basic permissions resolver for walls To be seen as a poc, collect all hub permissions for a basic permission and settings/inheritance model for reading a wall. with @nicksellen
Python
agpl-3.0
yunity/foodsaving-backend,yunity/yunity-core,yunity/foodsaving-backend,yunity/yunity-core,yunity/foodsaving-backend
from django.db.models import ForeignKey, TextField from config import settings from yunity.base.models import BaseModel class Wall(BaseModel): - pass + def resolve_permissions(self, collector): + h = self.hub + if h.target_content_type.model == 'group': + g = h.target + """:type : Group""" + collector.add_hub(h, 'read') + if g.is_content_included_in_parent: + g = g.parent + while g: + collector.add_hub(g.hub, 'read') + g = g.parent class WallPost(BaseModel): wall = ForeignKey(Wall) author = ForeignKey(settings.AUTH_USER_MODEL) class WallPostContent(BaseModel): post = ForeignKey(WallPost) author = ForeignKey(settings.AUTH_USER_MODEL) body = TextField()
Implement basic permissions resolver for walls
## Code Before: from django.db.models import ForeignKey, TextField from config import settings from yunity.base.models import BaseModel class Wall(BaseModel): pass class WallPost(BaseModel): wall = ForeignKey(Wall) author = ForeignKey(settings.AUTH_USER_MODEL) class WallPostContent(BaseModel): post = ForeignKey(WallPost) author = ForeignKey(settings.AUTH_USER_MODEL) body = TextField() ## Instruction: Implement basic permissions resolver for walls ## Code After: from django.db.models import ForeignKey, TextField from config import settings from yunity.base.models import BaseModel class Wall(BaseModel): def resolve_permissions(self, collector): h = self.hub if h.target_content_type.model == 'group': g = h.target """:type : Group""" collector.add_hub(h, 'read') if g.is_content_included_in_parent: g = g.parent while g: collector.add_hub(g.hub, 'read') g = g.parent class WallPost(BaseModel): wall = ForeignKey(Wall) author = ForeignKey(settings.AUTH_USER_MODEL) class WallPostContent(BaseModel): post = ForeignKey(WallPost) author = ForeignKey(settings.AUTH_USER_MODEL) body = TextField()
... class Wall(BaseModel): def resolve_permissions(self, collector): h = self.hub if h.target_content_type.model == 'group': g = h.target """:type : Group""" collector.add_hub(h, 'read') if g.is_content_included_in_parent: g = g.parent while g: collector.add_hub(g.hub, 'read') g = g.parent ...
e1569a514345a8c78d415011387d06aed5e6daa4
webshack/cli.py
webshack/cli.py
import sys from docopt import docopt from termcolor import colored from webshack.install_package import install_package_hierarchy import webshack.package_db as pdb from pathlib import Path VERSION="0.0.1" class CLIOutput: def __init__(self): self.shift_width = 0 def log(self, package): if package is None: self.end_package() else: self.begin_package(package) def begin_package(self, package): self.shift_width = 50 - len(package) sys.stdout.write("Installing {pkg}...".format(pkg=colored(package, 'blue'))) sys.stdout.flush() def end_package(self): sys.stdout.write(' '*self.shift_width) sys.stdout.write('[{}]\n'.format(colored('DONE', 'green', attrs=['bold']))) sys.stdout.flush() def main(): options = docopt(__doc__, version=VERSION) db = pdb.standard_package_db() components = Path('components') if options['get']: output = CLIOutput() for package in options['<package>']: install_package_hierarchy(package, db, components, log_output=output.log)
import sys from docopt import docopt from termcolor import colored from webshack.install_package import install_package_hierarchy import webshack.package_db as pdb from pathlib import Path VERSION="0.0.1" class CLIOutput: def __init__(self): self.shift_width = 0 def log(self, package): if package is None: self.end_package() else: self.begin_package(package) def begin_package(self, package): self.shift_width = 50 - len(package) sys.stdout.write("Installing {pkg}...".format(pkg=colored(package, 'blue'))) sys.stdout.flush() def end_package(self): sys.stdout.write(' '*self.shift_width) sys.stdout.write('[{}]\n'.format(colored('DONE', 'green', attrs=['bold']))) sys.stdout.flush() def main(): options = docopt(__doc__, version=VERSION) db = pdb.standard_package_db() components = Path('components') if options['get']: output = CLIOutput() for package in options['<package>']: install_package_hierarchy(package, db, components, log_output=output.log) elif options['list']: for package in sorted(db): print(package)
Add a subcommand for listing packages
Add a subcommand for listing packages
Python
mit
prophile/webshack
import sys from docopt import docopt from termcolor import colored from webshack.install_package import install_package_hierarchy import webshack.package_db as pdb from pathlib import Path VERSION="0.0.1" class CLIOutput: def __init__(self): self.shift_width = 0 def log(self, package): if package is None: self.end_package() else: self.begin_package(package) def begin_package(self, package): self.shift_width = 50 - len(package) sys.stdout.write("Installing {pkg}...".format(pkg=colored(package, 'blue'))) sys.stdout.flush() def end_package(self): sys.stdout.write(' '*self.shift_width) sys.stdout.write('[{}]\n'.format(colored('DONE', 'green', attrs=['bold']))) sys.stdout.flush() def main(): options = docopt(__doc__, version=VERSION) db = pdb.standard_package_db() components = Path('components') if options['get']: output = CLIOutput() for package in options['<package>']: install_package_hierarchy(package, db, components, log_output=output.log) + elif options['list']: + for package in sorted(db): + print(package)
Add a subcommand for listing packages
## Code Before: import sys from docopt import docopt from termcolor import colored from webshack.install_package import install_package_hierarchy import webshack.package_db as pdb from pathlib import Path VERSION="0.0.1" class CLIOutput: def __init__(self): self.shift_width = 0 def log(self, package): if package is None: self.end_package() else: self.begin_package(package) def begin_package(self, package): self.shift_width = 50 - len(package) sys.stdout.write("Installing {pkg}...".format(pkg=colored(package, 'blue'))) sys.stdout.flush() def end_package(self): sys.stdout.write(' '*self.shift_width) sys.stdout.write('[{}]\n'.format(colored('DONE', 'green', attrs=['bold']))) sys.stdout.flush() def main(): options = docopt(__doc__, version=VERSION) db = pdb.standard_package_db() components = Path('components') if options['get']: output = CLIOutput() for package in options['<package>']: install_package_hierarchy(package, db, components, log_output=output.log) ## Instruction: Add a subcommand for listing packages ## Code After: import sys from docopt import docopt from termcolor import colored from webshack.install_package import install_package_hierarchy import webshack.package_db as pdb from pathlib import Path VERSION="0.0.1" class CLIOutput: def __init__(self): self.shift_width = 0 def log(self, package): if package is None: self.end_package() else: self.begin_package(package) def begin_package(self, package): self.shift_width = 50 - len(package) sys.stdout.write("Installing {pkg}...".format(pkg=colored(package, 'blue'))) sys.stdout.flush() def end_package(self): sys.stdout.write(' '*self.shift_width) sys.stdout.write('[{}]\n'.format(colored('DONE', 'green', attrs=['bold']))) sys.stdout.flush() def main(): options = docopt(__doc__, version=VERSION) db = pdb.standard_package_db() components = Path('components') if options['get']: output = CLIOutput() for package in options['<package>']: install_package_hierarchy(package, db, components, log_output=output.log) elif options['list']: for package in sorted(db): print(package)
# ... existing code ... log_output=output.log) elif options['list']: for package in sorted(db): print(package) # ... rest of the code ...
e0b82cf9ed24870cb313328e5539acc5fe7f6508
stock_awesome/levels/chock_a_block.py
stock_awesome/levels/chock_a_block.py
import time from stock_awesome.obj import market def main(): """ Algorithm: Wait for an ask, then send a fill or kill for the quantity of the ask at the ask price. """ m = market.StockAPI('WEB29978261', 'NOWUEX', 'BBCM') #collection of orders placed orders = {} filled = 0 upper_limit = 3300 #try to buy 100000 to_send = 1000 while to_send > 0: quote = m.quote() ask = quote.get('ask') if ask and ask < upper_limit: r = m.buy(quote['askSize'], quote['ask'], order_type='fill-or-kill') to_send -= 1 orders[r['id']] = r orders = update_orders(m, orders) filled += update_filled(orders) else: time.sleep(1) def update_orders(m, orders): """ update order status """ return {o: m.order_status(o) for o in orders} def update_filled(orders): """ Remove filled orders and update our count. """ closed = [o for o in orders if not orders[o]['open']] #remove and sum filled orders filled = sum(orders.pop(o)['totalFilled'] for o in closed) return filled if __name__ == '__main__': main()
import time from stock_awesome.obj import market def main(): """ Algorithm: Wait for an ask, then send a fill or kill for the quantity of the ask at the ask price. """ m = market.StockAPI('RAJ40214463', 'SSMCEX', 'IPSO') #collection of orders placed orders = {} filled = 0 upper_limit = 2450 #try to buy 100000 to_buy = 100000 while to_buy > 0: quote = m.quote() ask = quote.get('ask', 0) bid = quote.get('bid') if ask < upper_limit: r = m.buy(quote['askSize'], ask, order_type='fill-or-kill') to_buy -= r['totalFilled'] print("Bought {}, {} remaining".format(r['totalFilled'], to_buy)) else: time.sleep(1) print('done') def update_orders(m, orders): """ update order status """ return {o: m.order_status(o) for o in orders} def update_filled(orders): """ Remove filled orders and update our count. """ closed = [o for o in orders if not orders[o]['open']] #remove and sum filled orders filled = sum(orders.pop(o)['totalFilled'] for o in closed) return filled if __name__ == '__main__': main()
Add some (inefective) score maximizing attempts
Add some (inefective) score maximizing attempts
Python
mit
ForeverWintr/stock_awesome
import time from stock_awesome.obj import market def main(): """ Algorithm: Wait for an ask, then send a fill or kill for the quantity of the ask at the ask price. """ - m = market.StockAPI('WEB29978261', 'NOWUEX', 'BBCM') + m = market.StockAPI('RAJ40214463', 'SSMCEX', 'IPSO') #collection of orders placed orders = {} filled = 0 - upper_limit = 3300 + upper_limit = 2450 #try to buy 100000 - to_send = 1000 + to_buy = 100000 - while to_send > 0: + while to_buy > 0: quote = m.quote() - ask = quote.get('ask') + ask = quote.get('ask', 0) + bid = quote.get('bid') - if ask and ask < upper_limit: + if ask < upper_limit: - r = m.buy(quote['askSize'], quote['ask'], order_type='fill-or-kill') + r = m.buy(quote['askSize'], ask, order_type='fill-or-kill') - to_send -= 1 + to_buy -= r['totalFilled'] + print("Bought {}, {} remaining".format(r['totalFilled'], to_buy)) - orders[r['id']] = r - - orders = update_orders(m, orders) - filled += update_filled(orders) else: time.sleep(1) - + print('done') def update_orders(m, orders): """ update order status """ return {o: m.order_status(o) for o in orders} def update_filled(orders): """ Remove filled orders and update our count. """ closed = [o for o in orders if not orders[o]['open']] #remove and sum filled orders filled = sum(orders.pop(o)['totalFilled'] for o in closed) return filled if __name__ == '__main__': main()
Add some (inefective) score maximizing attempts
## Code Before: import time from stock_awesome.obj import market def main(): """ Algorithm: Wait for an ask, then send a fill or kill for the quantity of the ask at the ask price. """ m = market.StockAPI('WEB29978261', 'NOWUEX', 'BBCM') #collection of orders placed orders = {} filled = 0 upper_limit = 3300 #try to buy 100000 to_send = 1000 while to_send > 0: quote = m.quote() ask = quote.get('ask') if ask and ask < upper_limit: r = m.buy(quote['askSize'], quote['ask'], order_type='fill-or-kill') to_send -= 1 orders[r['id']] = r orders = update_orders(m, orders) filled += update_filled(orders) else: time.sleep(1) def update_orders(m, orders): """ update order status """ return {o: m.order_status(o) for o in orders} def update_filled(orders): """ Remove filled orders and update our count. """ closed = [o for o in orders if not orders[o]['open']] #remove and sum filled orders filled = sum(orders.pop(o)['totalFilled'] for o in closed) return filled if __name__ == '__main__': main() ## Instruction: Add some (inefective) score maximizing attempts ## Code After: import time from stock_awesome.obj import market def main(): """ Algorithm: Wait for an ask, then send a fill or kill for the quantity of the ask at the ask price. """ m = market.StockAPI('RAJ40214463', 'SSMCEX', 'IPSO') #collection of orders placed orders = {} filled = 0 upper_limit = 2450 #try to buy 100000 to_buy = 100000 while to_buy > 0: quote = m.quote() ask = quote.get('ask', 0) bid = quote.get('bid') if ask < upper_limit: r = m.buy(quote['askSize'], ask, order_type='fill-or-kill') to_buy -= r['totalFilled'] print("Bought {}, {} remaining".format(r['totalFilled'], to_buy)) else: time.sleep(1) print('done') def update_orders(m, orders): """ update order status """ return {o: m.order_status(o) for o in orders} def update_filled(orders): """ Remove filled orders and update our count. """ closed = [o for o in orders if not orders[o]['open']] #remove and sum filled orders filled = sum(orders.pop(o)['totalFilled'] for o in closed) return filled if __name__ == '__main__': main()
// ... existing code ... """ m = market.StockAPI('RAJ40214463', 'SSMCEX', 'IPSO') // ... modified code ... upper_limit = 2450 ... #try to buy 100000 to_buy = 100000 while to_buy > 0: quote = m.quote() ask = quote.get('ask', 0) bid = quote.get('bid') if ask < upper_limit: r = m.buy(quote['askSize'], ask, order_type='fill-or-kill') to_buy -= r['totalFilled'] print("Bought {}, {} remaining".format(r['totalFilled'], to_buy)) else: ... time.sleep(1) print('done') // ... rest of the code ...
98ca748996fe462cedf284ad91a74bdd30eb81f3
mopidy/__init__.py
mopidy/__init__.py
from __future__ import absolute_import, unicode_literals import platform import sys import textwrap import warnings if not (2, 7) <= sys.version_info < (3,): sys.exit( 'ERROR: Mopidy requires Python 2.7, but found %s.' % platform.python_version()) try: import gobject # noqa except ImportError: print(textwrap.dedent(""" ERROR: The gobject Python package was not found. Mopidy requires GStreamer (and GObject) to work. These are C libraries with a number of dependencies themselves, and cannot be installed with the regular Python tools like pip. Please see http://docs.mopidy.com/en/latest/installation/ for instructions on how to install the required dependencies. """)) raise warnings.filterwarnings('ignore', 'could not open display') __version__ = '0.19.4'
from __future__ import absolute_import, print_function, unicode_literals import platform import sys import textwrap import warnings if not (2, 7) <= sys.version_info < (3,): sys.exit( 'ERROR: Mopidy requires Python 2.7, but found %s.' % platform.python_version()) try: import gobject # noqa except ImportError: print(textwrap.dedent(""" ERROR: The gobject Python package was not found. Mopidy requires GStreamer (and GObject) to work. These are C libraries with a number of dependencies themselves, and cannot be installed with the regular Python tools like pip. Please see http://docs.mopidy.com/en/latest/installation/ for instructions on how to install the required dependencies. """)) raise warnings.filterwarnings('ignore', 'could not open display') __version__ = '0.19.4'
Use print function instead of print statement
py3: Use print function instead of print statement
Python
apache-2.0
jcass77/mopidy,ZenithDK/mopidy,SuperStarPL/mopidy,vrs01/mopidy,jcass77/mopidy,diandiankan/mopidy,dbrgn/mopidy,SuperStarPL/mopidy,mokieyue/mopidy,rawdlite/mopidy,jcass77/mopidy,jmarsik/mopidy,mopidy/mopidy,bencevans/mopidy,mopidy/mopidy,diandiankan/mopidy,jmarsik/mopidy,vrs01/mopidy,mokieyue/mopidy,kingosticks/mopidy,SuperStarPL/mopidy,ali/mopidy,ali/mopidy,tkem/mopidy,hkariti/mopidy,glogiotatidis/mopidy,quartz55/mopidy,kingosticks/mopidy,rawdlite/mopidy,bencevans/mopidy,quartz55/mopidy,bencevans/mopidy,swak/mopidy,rawdlite/mopidy,dbrgn/mopidy,bacontext/mopidy,jodal/mopidy,ZenithDK/mopidy,diandiankan/mopidy,priestd09/mopidy,hkariti/mopidy,kingosticks/mopidy,adamcik/mopidy,jodal/mopidy,pacificIT/mopidy,quartz55/mopidy,mopidy/mopidy,swak/mopidy,priestd09/mopidy,ali/mopidy,pacificIT/mopidy,adamcik/mopidy,glogiotatidis/mopidy,mokieyue/mopidy,bacontext/mopidy,pacificIT/mopidy,pacificIT/mopidy,bacontext/mopidy,tkem/mopidy,hkariti/mopidy,swak/mopidy,mokieyue/mopidy,ZenithDK/mopidy,bacontext/mopidy,rawdlite/mopidy,ZenithDK/mopidy,glogiotatidis/mopidy,dbrgn/mopidy,jmarsik/mopidy,swak/mopidy,diandiankan/mopidy,priestd09/mopidy,SuperStarPL/mopidy,vrs01/mopidy,quartz55/mopidy,adamcik/mopidy,glogiotatidis/mopidy,jodal/mopidy,tkem/mopidy,jmarsik/mopidy,dbrgn/mopidy,hkariti/mopidy,vrs01/mopidy,bencevans/mopidy,tkem/mopidy,ali/mopidy
- from __future__ import absolute_import, unicode_literals + from __future__ import absolute_import, print_function, unicode_literals import platform import sys import textwrap import warnings if not (2, 7) <= sys.version_info < (3,): sys.exit( 'ERROR: Mopidy requires Python 2.7, but found %s.' % platform.python_version()) try: import gobject # noqa except ImportError: print(textwrap.dedent(""" ERROR: The gobject Python package was not found. Mopidy requires GStreamer (and GObject) to work. These are C libraries with a number of dependencies themselves, and cannot be installed with the regular Python tools like pip. Please see http://docs.mopidy.com/en/latest/installation/ for instructions on how to install the required dependencies. """)) raise warnings.filterwarnings('ignore', 'could not open display') __version__ = '0.19.4'
Use print function instead of print statement
## Code Before: from __future__ import absolute_import, unicode_literals import platform import sys import textwrap import warnings if not (2, 7) <= sys.version_info < (3,): sys.exit( 'ERROR: Mopidy requires Python 2.7, but found %s.' % platform.python_version()) try: import gobject # noqa except ImportError: print(textwrap.dedent(""" ERROR: The gobject Python package was not found. Mopidy requires GStreamer (and GObject) to work. These are C libraries with a number of dependencies themselves, and cannot be installed with the regular Python tools like pip. Please see http://docs.mopidy.com/en/latest/installation/ for instructions on how to install the required dependencies. """)) raise warnings.filterwarnings('ignore', 'could not open display') __version__ = '0.19.4' ## Instruction: Use print function instead of print statement ## Code After: from __future__ import absolute_import, print_function, unicode_literals import platform import sys import textwrap import warnings if not (2, 7) <= sys.version_info < (3,): sys.exit( 'ERROR: Mopidy requires Python 2.7, but found %s.' % platform.python_version()) try: import gobject # noqa except ImportError: print(textwrap.dedent(""" ERROR: The gobject Python package was not found. Mopidy requires GStreamer (and GObject) to work. These are C libraries with a number of dependencies themselves, and cannot be installed with the regular Python tools like pip. Please see http://docs.mopidy.com/en/latest/installation/ for instructions on how to install the required dependencies. """)) raise warnings.filterwarnings('ignore', 'could not open display') __version__ = '0.19.4'
# ... existing code ... from __future__ import absolute_import, print_function, unicode_literals # ... rest of the code ...
37903904cd0b1a8c4a04811b4a10a16606f9d7b0
doc/jsdoc_conf.py
doc/jsdoc_conf.py
from __future__ import unicode_literals from common_conf import * SITEURL = ".." TEMPLATE = "doc/theme/templates/jsdoc.html" TITLE = "NuvolaKit 3.0 JavaScript API Reference"
from __future__ import unicode_literals from common_conf import * SITEURL = ".." TEMPLATE = "doc/theme/templates/jsdoc.html" TITLE = "NuvolaKit 3.0 JavaScript API Reference" INTERLINKS = { "doc": "../", "tiliado": TILIADOWEB, }
Add interlinks urls for doc and tiliado
Add interlinks urls for doc and tiliado Signed-off-by: Jiří Janoušek <[email protected]>
Python
bsd-2-clause
tiliado/nuvolaruntime,tiliado/nuvolaruntime,tiliado/nuvolaruntime,tiliado/nuvolaruntime
from __future__ import unicode_literals from common_conf import * SITEURL = ".." TEMPLATE = "doc/theme/templates/jsdoc.html" TITLE = "NuvolaKit 3.0 JavaScript API Reference" + INTERLINKS = { + "doc": "../", + "tiliado": TILIADOWEB, + } +
Add interlinks urls for doc and tiliado
## Code Before: from __future__ import unicode_literals from common_conf import * SITEURL = ".." TEMPLATE = "doc/theme/templates/jsdoc.html" TITLE = "NuvolaKit 3.0 JavaScript API Reference" ## Instruction: Add interlinks urls for doc and tiliado ## Code After: from __future__ import unicode_literals from common_conf import * SITEURL = ".." TEMPLATE = "doc/theme/templates/jsdoc.html" TITLE = "NuvolaKit 3.0 JavaScript API Reference" INTERLINKS = { "doc": "../", "tiliado": TILIADOWEB, }
# ... existing code ... TITLE = "NuvolaKit 3.0 JavaScript API Reference" INTERLINKS = { "doc": "../", "tiliado": TILIADOWEB, } # ... rest of the code ...
4dca51fe6cd976c0312156ab32f787cecbb765a2
task_router/tests/test_views.py
task_router/tests/test_views.py
from django.test import TestCase, Client class HomePageTest(TestCase): def setUp(self): self.client = Client() def test_home_page(self): # Act response = self.client.get('/') # Assert # This is a class-based view, so we can mostly rely on Django's own # tests to make sure it works. We'll check for a bit of copy, though self.assertIn('Task Router', str(response.content))
from xmlunittest import XmlTestCase from django.test import TestCase, Client from unittest import skip class HomePageTest(TestCase, XmlTestCase): def setUp(self): self.client = Client() def test_home_page(self): # Act response = self.client.get('/') # Assert # This is a class-based view, so we can mostly rely on Django's own # tests to make sure it works. We'll check for a bit of copy, though self.assertIn('Task Router', str(response.content)) @skip("WIP") def test_incoming_call(self): # Act response = self.client.get('/') root = self.assertXmlDocument(response.data) say = root.xpath('./Gather/say()') # <Response> # <Gather action="/call/enqueue" numDigits="1" timeout="5"> # <Say>For ACME Rockets, press one.&</Say> # <Say>For ACME TNT, press two.</Say> # </Gather> # </Response> self.assertEquals(2, len(sat), response.data) self.assertEquals('For ACME Rockets, press one.', say[0]) self.assertEquals('For ACME TNT, press two.', say[1])
Add test for test incoming call
Add test for test incoming call
Python
mit
TwilioDevEd/task-router-django,TwilioDevEd/task-router-django,TwilioDevEd/task-router-django
+ from xmlunittest import XmlTestCase from django.test import TestCase, Client + from unittest import skip - - class HomePageTest(TestCase): + class HomePageTest(TestCase, XmlTestCase): def setUp(self): self.client = Client() def test_home_page(self): # Act response = self.client.get('/') # Assert # This is a class-based view, so we can mostly rely on Django's own # tests to make sure it works. We'll check for a bit of copy, though self.assertIn('Task Router', str(response.content)) + @skip("WIP") + def test_incoming_call(self): + # Act + response = self.client.get('/') + + root = self.assertXmlDocument(response.data) + say = root.xpath('./Gather/say()') + # <Response> + # <Gather action="/call/enqueue" numDigits="1" timeout="5"> + # <Say>For ACME Rockets, press one.&</Say> + # <Say>For ACME TNT, press two.</Say> + # </Gather> + # </Response> + self.assertEquals(2, len(sat), response.data) + self.assertEquals('For ACME Rockets, press one.', say[0]) + self.assertEquals('For ACME TNT, press two.', say[1]) +
Add test for test incoming call
## Code Before: from django.test import TestCase, Client class HomePageTest(TestCase): def setUp(self): self.client = Client() def test_home_page(self): # Act response = self.client.get('/') # Assert # This is a class-based view, so we can mostly rely on Django's own # tests to make sure it works. We'll check for a bit of copy, though self.assertIn('Task Router', str(response.content)) ## Instruction: Add test for test incoming call ## Code After: from xmlunittest import XmlTestCase from django.test import TestCase, Client from unittest import skip class HomePageTest(TestCase, XmlTestCase): def setUp(self): self.client = Client() def test_home_page(self): # Act response = self.client.get('/') # Assert # This is a class-based view, so we can mostly rely on Django's own # tests to make sure it works. We'll check for a bit of copy, though self.assertIn('Task Router', str(response.content)) @skip("WIP") def test_incoming_call(self): # Act response = self.client.get('/') root = self.assertXmlDocument(response.data) say = root.xpath('./Gather/say()') # <Response> # <Gather action="/call/enqueue" numDigits="1" timeout="5"> # <Say>For ACME Rockets, press one.&</Say> # <Say>For ACME TNT, press two.</Say> # </Gather> # </Response> self.assertEquals(2, len(sat), response.data) self.assertEquals('For ACME Rockets, press one.', say[0]) self.assertEquals('For ACME TNT, press two.', say[1])
... from xmlunittest import XmlTestCase from django.test import TestCase, Client from unittest import skip class HomePageTest(TestCase, XmlTestCase): ... self.assertIn('Task Router', str(response.content)) @skip("WIP") def test_incoming_call(self): # Act response = self.client.get('/') root = self.assertXmlDocument(response.data) say = root.xpath('./Gather/say()') # <Response> # <Gather action="/call/enqueue" numDigits="1" timeout="5"> # <Say>For ACME Rockets, press one.&</Say> # <Say>For ACME TNT, press two.</Say> # </Gather> # </Response> self.assertEquals(2, len(sat), response.data) self.assertEquals('For ACME Rockets, press one.', say[0]) self.assertEquals('For ACME TNT, press two.', say[1]) ...
a72cf5997439533d7ce74d6c4fc50d1189466c1b
peloid/app/shell/service.py
peloid/app/shell/service.py
from twisted.cred import portal from twisted.conch.checkers import SSHPublicKeyDatabase from carapace.util import ssh as util from peloid.app import mud from peloid.app.shell import gameshell, setupshell def getGameShellFactory(**namespace): """ The "namespace" kwargs here contains the passed objects that will be accessible via the shell, namely: * "app" * "services" These two are passed in the call to peloid.app.service.makeService. """ game = mud.Game() sshRealm = gameshell.TerminalRealm(namespace, game) sshPortal = portal.Portal(sshRealm) factory = gameshell.GameShellFactory(sshPortal) factory.privateKeys = {'ssh-rsa': util.getPrivKey()} factory.publicKeys = {'ssh-rsa': util.getPubKey()} factory.portal.registerChecker(SSHPublicKeyDatabase()) return factory def getSetupShellFactory(**namespace): return setupshell.SetupShellServerFactory(namespace)
from twisted.cred import portal from twisted.conch.checkers import SSHPublicKeyDatabase from carapace.util import ssh as util from peloid import const from peloid.app import mud from peloid.app.shell import gameshell, setupshell def getGameShellFactory(**namespace): """ The "namespace" kwargs here contains the passed objects that will be accessible via the shell, namely: * "app" * "services" These two are passed in the call to peloid.app.service.makeService. """ game = mud.Game() game.setMode(const.modes.lobby) sshRealm = gameshell.TerminalRealm(namespace, game) sshPortal = portal.Portal(sshRealm) factory = gameshell.GameShellFactory(sshPortal) factory.privateKeys = {'ssh-rsa': util.getPrivKey()} factory.publicKeys = {'ssh-rsa': util.getPubKey()} factory.portal.registerChecker(SSHPublicKeyDatabase()) return factory def getSetupShellFactory(**namespace): return setupshell.SetupShellServerFactory(namespace)
Set initial mode to lobby.
Set initial mode to lobby.
Python
mit
oubiwann/peloid
from twisted.cred import portal from twisted.conch.checkers import SSHPublicKeyDatabase from carapace.util import ssh as util + from peloid import const from peloid.app import mud from peloid.app.shell import gameshell, setupshell def getGameShellFactory(**namespace): """ The "namespace" kwargs here contains the passed objects that will be accessible via the shell, namely: * "app" * "services" These two are passed in the call to peloid.app.service.makeService. """ game = mud.Game() + game.setMode(const.modes.lobby) sshRealm = gameshell.TerminalRealm(namespace, game) sshPortal = portal.Portal(sshRealm) factory = gameshell.GameShellFactory(sshPortal) factory.privateKeys = {'ssh-rsa': util.getPrivKey()} factory.publicKeys = {'ssh-rsa': util.getPubKey()} factory.portal.registerChecker(SSHPublicKeyDatabase()) return factory def getSetupShellFactory(**namespace): return setupshell.SetupShellServerFactory(namespace) -
Set initial mode to lobby.
## Code Before: from twisted.cred import portal from twisted.conch.checkers import SSHPublicKeyDatabase from carapace.util import ssh as util from peloid.app import mud from peloid.app.shell import gameshell, setupshell def getGameShellFactory(**namespace): """ The "namespace" kwargs here contains the passed objects that will be accessible via the shell, namely: * "app" * "services" These two are passed in the call to peloid.app.service.makeService. """ game = mud.Game() sshRealm = gameshell.TerminalRealm(namespace, game) sshPortal = portal.Portal(sshRealm) factory = gameshell.GameShellFactory(sshPortal) factory.privateKeys = {'ssh-rsa': util.getPrivKey()} factory.publicKeys = {'ssh-rsa': util.getPubKey()} factory.portal.registerChecker(SSHPublicKeyDatabase()) return factory def getSetupShellFactory(**namespace): return setupshell.SetupShellServerFactory(namespace) ## Instruction: Set initial mode to lobby. ## Code After: from twisted.cred import portal from twisted.conch.checkers import SSHPublicKeyDatabase from carapace.util import ssh as util from peloid import const from peloid.app import mud from peloid.app.shell import gameshell, setupshell def getGameShellFactory(**namespace): """ The "namespace" kwargs here contains the passed objects that will be accessible via the shell, namely: * "app" * "services" These two are passed in the call to peloid.app.service.makeService. """ game = mud.Game() game.setMode(const.modes.lobby) sshRealm = gameshell.TerminalRealm(namespace, game) sshPortal = portal.Portal(sshRealm) factory = gameshell.GameShellFactory(sshPortal) factory.privateKeys = {'ssh-rsa': util.getPrivKey()} factory.publicKeys = {'ssh-rsa': util.getPubKey()} factory.portal.registerChecker(SSHPublicKeyDatabase()) return factory def getSetupShellFactory(**namespace): return setupshell.SetupShellServerFactory(namespace)
// ... existing code ... from peloid import const from peloid.app import mud // ... modified code ... game = mud.Game() game.setMode(const.modes.lobby) sshRealm = gameshell.TerminalRealm(namespace, game) // ... rest of the code ...
acd84f19d8d8820aecdba62bf4d0c97a2d4bdf34
src/source_weather/source_weather.py
src/source_weather/source_weather.py
from src.source import Source class SourceMock(Source): """Add a funny key with a funny value in the given dict""" def __init__(self, funny_message="Java.OutOfMemoryError" funny_key="Who's there ?"): self.funny_message = funny_message self.funny_key = funny_key def enrichment(self, data_dict): data_dict[self.funny_key] = self.funny_message return data_dict def keywords(self): return {self.funny_key}
from src.source import Source from . import weather class SourceWeaver(Source): """ Throught Open Weather Map generates today weather and expected weather for next days, if possible """ def enrichment(self, data_dict): if default.FIELD_COORDINATES in data_dict: lat, lon = data_dict[default.FIELD_COORDINATES] data_dict[default.FIELD_WEATHER] = weather.actual(lat, lon) if default.FIELD_DATE in data_dict: date = data_dict[default.FIELD_DATE] if weather.is_predictable(date): data_dict[default.FIELD_WEATHER_PREDICTED] = weather.predicted(lat, lon)[str(default.FIELD_DATE)] return data_dict def keywords(self): return {default.FIELD_WEATHER_PREDICTED, default.FIELD_WEATHER}
Access to actual or predicted weather done
Access to actual or predicted weather done
Python
unlicense
Aluriak/24hducode2016,Aluriak/24hducode2016
from src.source import Source + from . import weather - class SourceMock(Source): + class SourceWeaver(Source): - """Add a funny key with a funny value in the given dict""" + """ + Throught Open Weather Map generates today weather and + expected weather for next days, if possible + """ - def __init__(self, funny_message="Java.OutOfMemoryError" - funny_key="Who's there ?"): - self.funny_message = funny_message - self.funny_key = funny_key def enrichment(self, data_dict): - data_dict[self.funny_key] = self.funny_message + if default.FIELD_COORDINATES in data_dict: + lat, lon = data_dict[default.FIELD_COORDINATES] + data_dict[default.FIELD_WEATHER] = weather.actual(lat, lon) + if default.FIELD_DATE in data_dict: + date = data_dict[default.FIELD_DATE] + if weather.is_predictable(date): + data_dict[default.FIELD_WEATHER_PREDICTED] = weather.predicted(lat, lon)[str(default.FIELD_DATE)] + return data_dict def keywords(self): - return {self.funny_key} + return {default.FIELD_WEATHER_PREDICTED, + default.FIELD_WEATHER}
Access to actual or predicted weather done
## Code Before: from src.source import Source class SourceMock(Source): """Add a funny key with a funny value in the given dict""" def __init__(self, funny_message="Java.OutOfMemoryError" funny_key="Who's there ?"): self.funny_message = funny_message self.funny_key = funny_key def enrichment(self, data_dict): data_dict[self.funny_key] = self.funny_message return data_dict def keywords(self): return {self.funny_key} ## Instruction: Access to actual or predicted weather done ## Code After: from src.source import Source from . import weather class SourceWeaver(Source): """ Throught Open Weather Map generates today weather and expected weather for next days, if possible """ def enrichment(self, data_dict): if default.FIELD_COORDINATES in data_dict: lat, lon = data_dict[default.FIELD_COORDINATES] data_dict[default.FIELD_WEATHER] = weather.actual(lat, lon) if default.FIELD_DATE in data_dict: date = data_dict[default.FIELD_DATE] if weather.is_predictable(date): data_dict[default.FIELD_WEATHER_PREDICTED] = weather.predicted(lat, lon)[str(default.FIELD_DATE)] return data_dict def keywords(self): return {default.FIELD_WEATHER_PREDICTED, default.FIELD_WEATHER}
# ... existing code ... from src.source import Source from . import weather class SourceWeaver(Source): """ Throught Open Weather Map generates today weather and expected weather for next days, if possible """ # ... modified code ... def enrichment(self, data_dict): if default.FIELD_COORDINATES in data_dict: lat, lon = data_dict[default.FIELD_COORDINATES] data_dict[default.FIELD_WEATHER] = weather.actual(lat, lon) if default.FIELD_DATE in data_dict: date = data_dict[default.FIELD_DATE] if weather.is_predictable(date): data_dict[default.FIELD_WEATHER_PREDICTED] = weather.predicted(lat, lon)[str(default.FIELD_DATE)] return data_dict ... def keywords(self): return {default.FIELD_WEATHER_PREDICTED, default.FIELD_WEATHER} # ... rest of the code ...
88f0c284b01bf5b4545fe63bdd1fde7cc66ad937
us_ignite/apps/admin.py
us_ignite/apps/admin.py
from django.contrib import admin from us_ignite.apps.models import (Application, ApplicationURL, ApplicationImage, Domain, Feature) class ApplicationURLInline(admin.TabularInline): model = ApplicationURL class ApplicationImageInline(admin.TabularInline): model = ApplicationImage class ApplicationAdmin(admin.ModelAdmin): list_display = ('name', 'slug', 'stage', 'status') search_fields = ['name', 'slug', 'short_description', 'description'] list_filter = ['stage', 'status', 'created'] date_hierarchy = 'created' inlines = [ApplicationURLInline, ApplicationImageInline] class DomainAdmin(admin.ModelAdmin): list_display = ('name', 'slug') class FeatureAdmin(admin. ModelAdmin): list_display = ('name', 'slug') admin.site.register(Application, ApplicationAdmin) admin.site.register(Domain, DomainAdmin) admin.site.register(Feature, FeatureAdmin)
from django.contrib import admin from us_ignite.apps.models import (Application, ApplicationURL, ApplicationImage, Domain, Feature, Page, PageApplication) class ApplicationURLInline(admin.TabularInline): model = ApplicationURL class ApplicationImageInline(admin.TabularInline): model = ApplicationImage class ApplicationAdmin(admin.ModelAdmin): list_display = ('name', 'slug', 'stage', 'status') search_fields = ['name', 'slug', 'short_description', 'description'] list_filter = ['stage', 'status', 'created'] date_hierarchy = 'created' inlines = [ApplicationURLInline, ApplicationImageInline] class DomainAdmin(admin.ModelAdmin): list_display = ('name', 'slug') class FeatureAdmin(admin.ModelAdmin): list_display = ('name', 'slug') class PageApplicationInline(admin.TabularInline): raw_id_fields = ('application', ) model = PageApplication class PageAdmin(admin.ModelAdmin): list_display = ('name', 'slug', 'status', 'created', ) list_filter = ('status', 'created', ) date_hierarchy = 'created' inlines = [PageApplicationInline] admin.site.register(Application, ApplicationAdmin) admin.site.register(Domain, DomainAdmin) admin.site.register(Feature, FeatureAdmin) admin.site.register(Page, PageAdmin)
Add admi to add Applications to the Pages.
Add admi to add Applications to the Pages. https://github.com/madewithbytes/us_ignite/issues/79 The applications can be added to a page and ordered in the admin.
Python
bsd-3-clause
us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite
from django.contrib import admin from us_ignite.apps.models import (Application, ApplicationURL, - ApplicationImage, Domain, Feature) + ApplicationImage, Domain, Feature, + Page, PageApplication) class ApplicationURLInline(admin.TabularInline): model = ApplicationURL class ApplicationImageInline(admin.TabularInline): model = ApplicationImage class ApplicationAdmin(admin.ModelAdmin): list_display = ('name', 'slug', 'stage', 'status') search_fields = ['name', 'slug', 'short_description', 'description'] list_filter = ['stage', 'status', 'created'] date_hierarchy = 'created' inlines = [ApplicationURLInline, ApplicationImageInline] class DomainAdmin(admin.ModelAdmin): list_display = ('name', 'slug') - class FeatureAdmin(admin. ModelAdmin): + class FeatureAdmin(admin.ModelAdmin): list_display = ('name', 'slug') + + + class PageApplicationInline(admin.TabularInline): + raw_id_fields = ('application', ) + model = PageApplication + + + class PageAdmin(admin.ModelAdmin): + list_display = ('name', 'slug', 'status', 'created', ) + list_filter = ('status', 'created', ) + date_hierarchy = 'created' + inlines = [PageApplicationInline] admin.site.register(Application, ApplicationAdmin) admin.site.register(Domain, DomainAdmin) admin.site.register(Feature, FeatureAdmin) + admin.site.register(Page, PageAdmin)
Add admi to add Applications to the Pages.
## Code Before: from django.contrib import admin from us_ignite.apps.models import (Application, ApplicationURL, ApplicationImage, Domain, Feature) class ApplicationURLInline(admin.TabularInline): model = ApplicationURL class ApplicationImageInline(admin.TabularInline): model = ApplicationImage class ApplicationAdmin(admin.ModelAdmin): list_display = ('name', 'slug', 'stage', 'status') search_fields = ['name', 'slug', 'short_description', 'description'] list_filter = ['stage', 'status', 'created'] date_hierarchy = 'created' inlines = [ApplicationURLInline, ApplicationImageInline] class DomainAdmin(admin.ModelAdmin): list_display = ('name', 'slug') class FeatureAdmin(admin. ModelAdmin): list_display = ('name', 'slug') admin.site.register(Application, ApplicationAdmin) admin.site.register(Domain, DomainAdmin) admin.site.register(Feature, FeatureAdmin) ## Instruction: Add admi to add Applications to the Pages. ## Code After: from django.contrib import admin from us_ignite.apps.models import (Application, ApplicationURL, ApplicationImage, Domain, Feature, Page, PageApplication) class ApplicationURLInline(admin.TabularInline): model = ApplicationURL class ApplicationImageInline(admin.TabularInline): model = ApplicationImage class ApplicationAdmin(admin.ModelAdmin): list_display = ('name', 'slug', 'stage', 'status') search_fields = ['name', 'slug', 'short_description', 'description'] list_filter = ['stage', 'status', 'created'] date_hierarchy = 'created' inlines = [ApplicationURLInline, ApplicationImageInline] class DomainAdmin(admin.ModelAdmin): list_display = ('name', 'slug') class FeatureAdmin(admin.ModelAdmin): list_display = ('name', 'slug') class PageApplicationInline(admin.TabularInline): raw_id_fields = ('application', ) model = PageApplication class PageAdmin(admin.ModelAdmin): list_display = ('name', 'slug', 'status', 'created', ) list_filter = ('status', 'created', ) date_hierarchy = 'created' inlines = [PageApplicationInline] admin.site.register(Application, ApplicationAdmin) admin.site.register(Domain, DomainAdmin) admin.site.register(Feature, FeatureAdmin) admin.site.register(Page, PageAdmin)
... from us_ignite.apps.models import (Application, ApplicationURL, ApplicationImage, Domain, Feature, Page, PageApplication) ... class FeatureAdmin(admin.ModelAdmin): list_display = ('name', 'slug') class PageApplicationInline(admin.TabularInline): raw_id_fields = ('application', ) model = PageApplication class PageAdmin(admin.ModelAdmin): list_display = ('name', 'slug', 'status', 'created', ) list_filter = ('status', 'created', ) date_hierarchy = 'created' inlines = [PageApplicationInline] ... admin.site.register(Feature, FeatureAdmin) admin.site.register(Page, PageAdmin) ...
ed350a7387c376538f51a8a7a8cfde5469baba8a
tests/testutils.py
tests/testutils.py
import psycopg2 import os import getpass def get_pg_connection(): return psycopg2.connect( "dbname=bedquilt_test user={}".format(getpass.getuser()) )
import psycopg2 import os import getpass # CREATE DATABASE bedquilt_test # WITH OWNER = {{owner}} # ENCODING = 'UTF8' # TABLESPACE = pg_default # LC_COLLATE = 'en_GB.UTF-8' # LC_CTYPE = 'en_GB.UTF-8' # CONNECTION LIMIT = -1; def get_pg_connection(): return psycopg2.connect( "dbname=bedquilt_test user={}".format(getpass.getuser()) )
Add the sql to create the test database
Add the sql to create the test database
Python
mit
BedquiltDB/bedquilt-core
import psycopg2 import os import getpass + + + # CREATE DATABASE bedquilt_test + # WITH OWNER = {{owner}} + # ENCODING = 'UTF8' + # TABLESPACE = pg_default + # LC_COLLATE = 'en_GB.UTF-8' + # LC_CTYPE = 'en_GB.UTF-8' + # CONNECTION LIMIT = -1; def get_pg_connection(): return psycopg2.connect( "dbname=bedquilt_test user={}".format(getpass.getuser()) )
Add the sql to create the test database
## Code Before: import psycopg2 import os import getpass def get_pg_connection(): return psycopg2.connect( "dbname=bedquilt_test user={}".format(getpass.getuser()) ) ## Instruction: Add the sql to create the test database ## Code After: import psycopg2 import os import getpass # CREATE DATABASE bedquilt_test # WITH OWNER = {{owner}} # ENCODING = 'UTF8' # TABLESPACE = pg_default # LC_COLLATE = 'en_GB.UTF-8' # LC_CTYPE = 'en_GB.UTF-8' # CONNECTION LIMIT = -1; def get_pg_connection(): return psycopg2.connect( "dbname=bedquilt_test user={}".format(getpass.getuser()) )
# ... existing code ... import getpass # CREATE DATABASE bedquilt_test # WITH OWNER = {{owner}} # ENCODING = 'UTF8' # TABLESPACE = pg_default # LC_COLLATE = 'en_GB.UTF-8' # LC_CTYPE = 'en_GB.UTF-8' # CONNECTION LIMIT = -1; # ... rest of the code ...
345056a7a6a801013cdc340f0f9cd8b4f5d48173
convert-bookmarks.py
convert-bookmarks.py
from argparse import ArgumentParser from bs4 import BeautifulSoup from datetime import datetime, timezone from bson import json_util import json parser = ArgumentParser(description='Convert Netscape bookmarks to JSON') parser.add_argument(dest='filenames', metavar='filename', nargs='+') parser.add_argument('-t', '--tag', metavar='tag', dest='tags', action='append', help='add tag to bookmarks, repeat \ for multiple tags') parser.add_argument('-m', '--mongodb', action='store_true', dest='mongo', help='output in mongodb import format') args = parser.parse_args() for filename in args.filenames: soup = BeautifulSoup(open(filename, encoding='utf8'), "html5lib") for link in soup.find_all('a'): bookmark = {} # url and title bookmark['url'] = link.get('href') bookmark['title'] = link.string.strip() if link.string\ else bookmark['url'] # add date secs = link.get('add_date') date = datetime.fromtimestamp(int(secs), tz=timezone.utc) bookmark['add_date'] = date # tags tags = link.get('tags') bookmark['tags'] = tags.split(',') if tags else [] if args.tags: bookmark['tags'] += args.tags # comment sibling = link.parent.next_sibling bookmark['comment'] = \ sibling.string.strip() if sibling and sibling.name == 'dd' \ else '' # make json if args.mongo: print(json_util.dumps(bookmark, sort_keys=False, indent=4)) else: print(json.dumps(bookmark, sort_keys=False, indent=4))
from argparse import ArgumentParser from bs4 import BeautifulSoup import json parser = ArgumentParser(description='Convert Netscape bookmarks to JSON') parser.add_argument(dest='filenames', metavar='filename', nargs='+') parser.add_argument('-t', '--tag', metavar='tag', dest='tags', action='append', help='add tag to bookmarks, repeat \ for multiple tags') args = parser.parse_args() for filename in args.filenames: soup = BeautifulSoup(open(filename, encoding='utf8'), "html5lib") for link in soup.find_all('a'): bookmark = {} # url and title bookmark['url'] = link.get('href') bookmark['title'] = link.string.strip() if link.string\ else bookmark['url'] # tags tags = link.get('tags') bookmark['tags'] = tags.split(',') if tags else [] if args.tags: bookmark['tags'] += args.tags # comment sibling = link.parent.next_sibling bookmark['comment'] = \ sibling.string.strip() if sibling and sibling.name == 'dd' \ else '' print(json.dumps(bookmark, sort_keys=False, indent=4))
Remove bson, datetime, and mongo
Remove bson, datetime, and mongo Current BSON fails to work datetime can't be serialized by json_util mongodb is not needed; just use JSON
Python
mit
jhh/netscape-bookmark-converter
from argparse import ArgumentParser from bs4 import BeautifulSoup - from datetime import datetime, timezone - from bson import json_util import json - parser = ArgumentParser(description='Convert Netscape bookmarks to JSON') parser.add_argument(dest='filenames', metavar='filename', nargs='+') parser.add_argument('-t', '--tag', metavar='tag', dest='tags', action='append', help='add tag to bookmarks, repeat \ for multiple tags') - parser.add_argument('-m', '--mongodb', action='store_true', dest='mongo', - help='output in mongodb import format') args = parser.parse_args() for filename in args.filenames: soup = BeautifulSoup(open(filename, encoding='utf8'), "html5lib") for link in soup.find_all('a'): bookmark = {} # url and title bookmark['url'] = link.get('href') bookmark['title'] = link.string.strip() if link.string\ else bookmark['url'] - # add date - secs = link.get('add_date') - date = datetime.fromtimestamp(int(secs), tz=timezone.utc) - bookmark['add_date'] = date # tags tags = link.get('tags') bookmark['tags'] = tags.split(',') if tags else [] if args.tags: bookmark['tags'] += args.tags # comment sibling = link.parent.next_sibling bookmark['comment'] = \ sibling.string.strip() if sibling and sibling.name == 'dd' \ - else '' + else '' - # make json - if args.mongo: - print(json_util.dumps(bookmark, sort_keys=False, indent=4)) - else: - print(json.dumps(bookmark, sort_keys=False, indent=4)) + print(json.dumps(bookmark, sort_keys=False, indent=4))
Remove bson, datetime, and mongo
## Code Before: from argparse import ArgumentParser from bs4 import BeautifulSoup from datetime import datetime, timezone from bson import json_util import json parser = ArgumentParser(description='Convert Netscape bookmarks to JSON') parser.add_argument(dest='filenames', metavar='filename', nargs='+') parser.add_argument('-t', '--tag', metavar='tag', dest='tags', action='append', help='add tag to bookmarks, repeat \ for multiple tags') parser.add_argument('-m', '--mongodb', action='store_true', dest='mongo', help='output in mongodb import format') args = parser.parse_args() for filename in args.filenames: soup = BeautifulSoup(open(filename, encoding='utf8'), "html5lib") for link in soup.find_all('a'): bookmark = {} # url and title bookmark['url'] = link.get('href') bookmark['title'] = link.string.strip() if link.string\ else bookmark['url'] # add date secs = link.get('add_date') date = datetime.fromtimestamp(int(secs), tz=timezone.utc) bookmark['add_date'] = date # tags tags = link.get('tags') bookmark['tags'] = tags.split(',') if tags else [] if args.tags: bookmark['tags'] += args.tags # comment sibling = link.parent.next_sibling bookmark['comment'] = \ sibling.string.strip() if sibling and sibling.name == 'dd' \ else '' # make json if args.mongo: print(json_util.dumps(bookmark, sort_keys=False, indent=4)) else: print(json.dumps(bookmark, sort_keys=False, indent=4)) ## Instruction: Remove bson, datetime, and mongo ## Code After: from argparse import ArgumentParser from bs4 import BeautifulSoup import json parser = ArgumentParser(description='Convert Netscape bookmarks to JSON') parser.add_argument(dest='filenames', metavar='filename', nargs='+') parser.add_argument('-t', '--tag', metavar='tag', dest='tags', action='append', help='add tag to bookmarks, repeat \ for multiple tags') args = parser.parse_args() for filename in args.filenames: soup = BeautifulSoup(open(filename, encoding='utf8'), "html5lib") for link in soup.find_all('a'): bookmark = {} # url and title bookmark['url'] = link.get('href') bookmark['title'] = link.string.strip() if link.string\ else bookmark['url'] # tags tags = link.get('tags') bookmark['tags'] = tags.split(',') if tags else [] if args.tags: bookmark['tags'] += args.tags # comment sibling = link.parent.next_sibling bookmark['comment'] = \ sibling.string.strip() if sibling and sibling.name == 'dd' \ else '' print(json.dumps(bookmark, sort_keys=False, indent=4))
# ... existing code ... from bs4 import BeautifulSoup import json # ... modified code ... for multiple tags') args = parser.parse_args() ... else bookmark['url'] # tags ... sibling.string.strip() if sibling and sibling.name == 'dd' \ else '' print(json.dumps(bookmark, sort_keys=False, indent=4)) # ... rest of the code ...
13ba81df82f2c43838066ec9cd0fa1222324349f
srsly/util.py
srsly/util.py
from __future__ import unicode_literals from pathlib import Path import sys def force_path(location, require_exists=True): if not isinstance(location, Path): location = Path(location) if require_exists and not location.exists(): raise ValueError("Can't read file: {}".format(location)) return location def force_string(location): if sys.version_info[0] == 2: # Python 2 return str(location).decode("utf8") return str(location)
from __future__ import unicode_literals from pathlib import Path import sys is_python2 = sys.version_info[0] == 2 is_python3 = sys.version_info[0] == 3 if is_python2: basestring_ = basestring # noqa: F821 else: basestring_ = str def force_path(location, require_exists=True): if not isinstance(location, Path): location = Path(location) if require_exists and not location.exists(): raise ValueError("Can't read file: {}".format(location)) return location def force_string(location): if isinstance(location, basestring_): return location if sys.version_info[0] == 2: # Python 2 return str(location).decode("utf8") return str(location)
Improve compat handling in force_string
Improve compat handling in force_string If we know we already have a string, no need to force it into a strinbg
Python
mit
explosion/srsly,explosion/srsly,explosion/srsly,explosion/srsly
from __future__ import unicode_literals from pathlib import Path import sys + + + is_python2 = sys.version_info[0] == 2 + is_python3 = sys.version_info[0] == 3 + + if is_python2: + basestring_ = basestring # noqa: F821 + else: + basestring_ = str def force_path(location, require_exists=True): if not isinstance(location, Path): location = Path(location) if require_exists and not location.exists(): raise ValueError("Can't read file: {}".format(location)) return location def force_string(location): + if isinstance(location, basestring_): + return location if sys.version_info[0] == 2: # Python 2 return str(location).decode("utf8") return str(location)
Improve compat handling in force_string
## Code Before: from __future__ import unicode_literals from pathlib import Path import sys def force_path(location, require_exists=True): if not isinstance(location, Path): location = Path(location) if require_exists and not location.exists(): raise ValueError("Can't read file: {}".format(location)) return location def force_string(location): if sys.version_info[0] == 2: # Python 2 return str(location).decode("utf8") return str(location) ## Instruction: Improve compat handling in force_string ## Code After: from __future__ import unicode_literals from pathlib import Path import sys is_python2 = sys.version_info[0] == 2 is_python3 = sys.version_info[0] == 3 if is_python2: basestring_ = basestring # noqa: F821 else: basestring_ = str def force_path(location, require_exists=True): if not isinstance(location, Path): location = Path(location) if require_exists and not location.exists(): raise ValueError("Can't read file: {}".format(location)) return location def force_string(location): if isinstance(location, basestring_): return location if sys.version_info[0] == 2: # Python 2 return str(location).decode("utf8") return str(location)
# ... existing code ... import sys is_python2 = sys.version_info[0] == 2 is_python3 = sys.version_info[0] == 3 if is_python2: basestring_ = basestring # noqa: F821 else: basestring_ = str # ... modified code ... def force_string(location): if isinstance(location, basestring_): return location if sys.version_info[0] == 2: # Python 2 # ... rest of the code ...
071f42389aef9c57cb4f0a0434d8297ccba05ab2
openquake/hazardlib/general.py
openquake/hazardlib/general.py
import os import subprocess def git_suffix(fname): """ :returns: `<short git hash>` if Git repository found """ try: po = subprocess.Popen( ['git', 'rev-parse', '--short', 'HEAD'], stdout=subprocess.PIPE, stderr=open(os.devnull, 'w'), cwd=os.path.dirname(fname)) return "-git" + po.stdout.read().strip() except: # trapping everything on purpose; git may not be installed or it # may not work properly return ''
import os import subprocess def git_suffix(fname): """ :returns: `<short git hash>` if Git repository found """ try: po = subprocess.check_output( ['git', 'rev-parse', '--short', 'HEAD'], cwd=os.path.dirname(fname)) return "-git" + po.stdout.read().strip() except: # trapping everything on purpose; git may not be installed or it # may not work properly return ''
Replace subprocess.Popen with check_output to avoid git zombies
Replace subprocess.Popen with check_output to avoid git zombies
Python
agpl-3.0
larsbutler/oq-hazardlib,larsbutler/oq-hazardlib,gem/oq-engine,gem/oq-hazardlib,mmpagani/oq-hazardlib,gem/oq-engine,gem/oq-engine,mmpagani/oq-hazardlib,silviacanessa/oq-hazardlib,gem/oq-engine,larsbutler/oq-hazardlib,silviacanessa/oq-hazardlib,silviacanessa/oq-hazardlib,gem/oq-hazardlib,gem/oq-engine,vup1120/oq-hazardlib,g-weatherill/oq-hazardlib,vup1120/oq-hazardlib,g-weatherill/oq-hazardlib,g-weatherill/oq-hazardlib,mmpagani/oq-hazardlib,rcgee/oq-hazardlib,silviacanessa/oq-hazardlib,gem/oq-hazardlib,g-weatherill/oq-hazardlib,rcgee/oq-hazardlib,vup1120/oq-hazardlib
import os import subprocess def git_suffix(fname): """ :returns: `<short git hash>` if Git repository found """ try: - po = subprocess.Popen( + po = subprocess.check_output( - ['git', 'rev-parse', '--short', 'HEAD'], stdout=subprocess.PIPE, + ['git', 'rev-parse', '--short', 'HEAD'], - stderr=open(os.devnull, 'w'), cwd=os.path.dirname(fname)) + cwd=os.path.dirname(fname)) return "-git" + po.stdout.read().strip() except: # trapping everything on purpose; git may not be installed or it # may not work properly return ''
Replace subprocess.Popen with check_output to avoid git zombies
## Code Before: import os import subprocess def git_suffix(fname): """ :returns: `<short git hash>` if Git repository found """ try: po = subprocess.Popen( ['git', 'rev-parse', '--short', 'HEAD'], stdout=subprocess.PIPE, stderr=open(os.devnull, 'w'), cwd=os.path.dirname(fname)) return "-git" + po.stdout.read().strip() except: # trapping everything on purpose; git may not be installed or it # may not work properly return '' ## Instruction: Replace subprocess.Popen with check_output to avoid git zombies ## Code After: import os import subprocess def git_suffix(fname): """ :returns: `<short git hash>` if Git repository found """ try: po = subprocess.check_output( ['git', 'rev-parse', '--short', 'HEAD'], cwd=os.path.dirname(fname)) return "-git" + po.stdout.read().strip() except: # trapping everything on purpose; git may not be installed or it # may not work properly return ''
// ... existing code ... try: po = subprocess.check_output( ['git', 'rev-parse', '--short', 'HEAD'], cwd=os.path.dirname(fname)) return "-git" + po.stdout.read().strip() // ... rest of the code ...
f8aae767944cb6fe6163eb3eb99d08b12458060f
GoogleCalendarV3/setup.py
GoogleCalendarV3/setup.py
from distutils.core import setup setup( name='GoogleCalendarV3', version='0.1.1', author='Ashutosh Priyadarshy', author_email='[email protected]', packages=['google_calendar_v3', 'google_calendar_v3.test'], scripts=['bin/example.py'], url='http://www.github.com/priyadarshy/google-calendar-v3/', license='LICENSE.txt', description='Python Client for Google Calendar API V3.', long_description=open('README.txt').read(), install_requires=[ "requests-oauthlib >= 0.4.0", ], )
from distutils.core import setup setup( name='GoogleCalendarV3', version='0.1.2', author='Ashutosh Priyadarshy', author_email='[email protected]', packages=['google_calendar_v3', 'google_calendar_v3.test'], scripts=['bin/example.py'], url='http://www.github.com/priyadarshy/google-calendar-v3/', license='LICENSE.txt', description='Python Client for Google Calendar API V3.', long_description=open('README.txt').read(), install_requires=[ "requests >= 2.3.0", "requests-oauthlib >= 0.4.0" ], )
Update dependencies and update version.
Update dependencies and update version.
Python
apache-2.0
priyadarshy/google-calendar-v3,mbrondani/google-calendar-v3
from distutils.core import setup setup( name='GoogleCalendarV3', - version='0.1.1', + version='0.1.2', author='Ashutosh Priyadarshy', author_email='[email protected]', packages=['google_calendar_v3', 'google_calendar_v3.test'], scripts=['bin/example.py'], url='http://www.github.com/priyadarshy/google-calendar-v3/', license='LICENSE.txt', description='Python Client for Google Calendar API V3.', long_description=open('README.txt').read(), install_requires=[ + "requests >= 2.3.0", - "requests-oauthlib >= 0.4.0", + "requests-oauthlib >= 0.4.0" ], )
Update dependencies and update version.
## Code Before: from distutils.core import setup setup( name='GoogleCalendarV3', version='0.1.1', author='Ashutosh Priyadarshy', author_email='[email protected]', packages=['google_calendar_v3', 'google_calendar_v3.test'], scripts=['bin/example.py'], url='http://www.github.com/priyadarshy/google-calendar-v3/', license='LICENSE.txt', description='Python Client for Google Calendar API V3.', long_description=open('README.txt').read(), install_requires=[ "requests-oauthlib >= 0.4.0", ], ) ## Instruction: Update dependencies and update version. ## Code After: from distutils.core import setup setup( name='GoogleCalendarV3', version='0.1.2', author='Ashutosh Priyadarshy', author_email='[email protected]', packages=['google_calendar_v3', 'google_calendar_v3.test'], scripts=['bin/example.py'], url='http://www.github.com/priyadarshy/google-calendar-v3/', license='LICENSE.txt', description='Python Client for Google Calendar API V3.', long_description=open('README.txt').read(), install_requires=[ "requests >= 2.3.0", "requests-oauthlib >= 0.4.0" ], )
# ... existing code ... name='GoogleCalendarV3', version='0.1.2', author='Ashutosh Priyadarshy', # ... modified code ... install_requires=[ "requests >= 2.3.0", "requests-oauthlib >= 0.4.0" ], # ... rest of the code ...
db6222adea234921f82a843846778f5327566aaf
homebrew/logger.py
homebrew/logger.py
import logging import sys logger = logging.getLogger() logFormatter = logging.Formatter("%(message)s") consoleHandler = logging.StreamHandler(sys.stdout) consoleHandler.setFormatter(logFormatter) logger.addHandler(consoleHandler) logger.setLevel(logging.INFO) UNDERLINE_SYMBOL = "-" def log_title(logline): logger.info(logline) logger.info(len(logline) * UNDERLINE_SYMBOL) def log_blank_line(): logger.info("") def log( installed, packages_not_needed_by_other, packages_needed_by_other, package_dependencies, ): log_title("Installed packages:") logger.info(", ".join(sorted(installed))) log_blank_line() log_title("No package depends on these packages:") logger.info(", ".join(sorted(packages_not_needed_by_other))) log_blank_line() log_title("These packages are needed by other packages:") for package, needed_by in sorted(packages_needed_by_other.items()): logger.info("Package {} is needed by: {}".format(package, ", ".join(needed_by))) log_blank_line() log_title("These packages depend on other packages:") for package, package_dependencies in sorted(package_dependencies.items()): logger.info( "Package {} depends on: {}".format(package, ", ".join(package_dependencies)) ) log_blank_line()
import logging import sys logger = logging.getLogger() formatter = logging.Formatter("%(message)s") handler = logging.StreamHandler(sys.stdout) handler.setFormatter(formatter) logger.addHandler(handler) logger.setLevel(logging.INFO) UNDERLINE_SYMBOL = "-" def log_title(logline): logger.info(logline) logger.info(len(logline) * UNDERLINE_SYMBOL) def log_blank_line(): logger.info("") def log( installed, packages_not_needed_by_other, packages_needed_by_other, package_dependencies, ): log_title("Installed packages:") logger.info(", ".join(sorted(installed))) log_blank_line() log_title("No package depends on these packages:") logger.info(", ".join(sorted(packages_not_needed_by_other))) log_blank_line() log_title("These packages are needed by other packages:") for package, needed_by in sorted(packages_needed_by_other.items()): logger.info("Package {} is needed by: {}".format(package, ", ".join(needed_by))) log_blank_line() log_title("These packages depend on other packages:") for package, package_dependencies in sorted(package_dependencies.items()): logger.info( "Package {} depends on: {}".format(package, ", ".join(package_dependencies)) ) log_blank_line()
Rename variables used for setting up logging
Rename variables used for setting up logging
Python
isc
igroen/homebrew
import logging import sys logger = logging.getLogger() - logFormatter = logging.Formatter("%(message)s") + formatter = logging.Formatter("%(message)s") - consoleHandler = logging.StreamHandler(sys.stdout) + handler = logging.StreamHandler(sys.stdout) - consoleHandler.setFormatter(logFormatter) + handler.setFormatter(formatter) - logger.addHandler(consoleHandler) + logger.addHandler(handler) logger.setLevel(logging.INFO) UNDERLINE_SYMBOL = "-" def log_title(logline): logger.info(logline) logger.info(len(logline) * UNDERLINE_SYMBOL) def log_blank_line(): logger.info("") def log( installed, packages_not_needed_by_other, packages_needed_by_other, package_dependencies, ): log_title("Installed packages:") logger.info(", ".join(sorted(installed))) log_blank_line() log_title("No package depends on these packages:") logger.info(", ".join(sorted(packages_not_needed_by_other))) log_blank_line() log_title("These packages are needed by other packages:") for package, needed_by in sorted(packages_needed_by_other.items()): logger.info("Package {} is needed by: {}".format(package, ", ".join(needed_by))) log_blank_line() log_title("These packages depend on other packages:") for package, package_dependencies in sorted(package_dependencies.items()): logger.info( "Package {} depends on: {}".format(package, ", ".join(package_dependencies)) ) log_blank_line()
Rename variables used for setting up logging
## Code Before: import logging import sys logger = logging.getLogger() logFormatter = logging.Formatter("%(message)s") consoleHandler = logging.StreamHandler(sys.stdout) consoleHandler.setFormatter(logFormatter) logger.addHandler(consoleHandler) logger.setLevel(logging.INFO) UNDERLINE_SYMBOL = "-" def log_title(logline): logger.info(logline) logger.info(len(logline) * UNDERLINE_SYMBOL) def log_blank_line(): logger.info("") def log( installed, packages_not_needed_by_other, packages_needed_by_other, package_dependencies, ): log_title("Installed packages:") logger.info(", ".join(sorted(installed))) log_blank_line() log_title("No package depends on these packages:") logger.info(", ".join(sorted(packages_not_needed_by_other))) log_blank_line() log_title("These packages are needed by other packages:") for package, needed_by in sorted(packages_needed_by_other.items()): logger.info("Package {} is needed by: {}".format(package, ", ".join(needed_by))) log_blank_line() log_title("These packages depend on other packages:") for package, package_dependencies in sorted(package_dependencies.items()): logger.info( "Package {} depends on: {}".format(package, ", ".join(package_dependencies)) ) log_blank_line() ## Instruction: Rename variables used for setting up logging ## Code After: import logging import sys logger = logging.getLogger() formatter = logging.Formatter("%(message)s") handler = logging.StreamHandler(sys.stdout) handler.setFormatter(formatter) logger.addHandler(handler) logger.setLevel(logging.INFO) UNDERLINE_SYMBOL = "-" def log_title(logline): logger.info(logline) logger.info(len(logline) * UNDERLINE_SYMBOL) def log_blank_line(): logger.info("") def log( installed, packages_not_needed_by_other, packages_needed_by_other, package_dependencies, ): log_title("Installed packages:") logger.info(", ".join(sorted(installed))) log_blank_line() log_title("No package depends on these packages:") logger.info(", ".join(sorted(packages_not_needed_by_other))) log_blank_line() log_title("These packages are needed by other packages:") for package, needed_by in sorted(packages_needed_by_other.items()): logger.info("Package {} is needed by: {}".format(package, ", ".join(needed_by))) log_blank_line() log_title("These packages depend on other packages:") for package, package_dependencies in sorted(package_dependencies.items()): logger.info( "Package {} depends on: {}".format(package, ", ".join(package_dependencies)) ) log_blank_line()
# ... existing code ... logger = logging.getLogger() formatter = logging.Formatter("%(message)s") handler = logging.StreamHandler(sys.stdout) handler.setFormatter(formatter) logger.addHandler(handler) logger.setLevel(logging.INFO) # ... rest of the code ...
7577c51486169e8026a74cd680e2f4b58e4ea60a
models/phase3_eval/process_sparser.py
models/phase3_eval/process_sparser.py
from __future__ import absolute_import, print_function, unicode_literals from builtins import dict, str import os import glob from indra import sparser base_folder = 'sources/sparser-20170330' sentences_folder = 'sources/sparser-20170210' def get_file_names(base_dir): fnames = glob.glob(os.path.join(base_dir, '*.xml')) return fnames def get_file_stmts(fname): with open(fname, 'rb') as fh: print(fname) xml_bytes = fh.read() sp = sparser.process_xml(xml_bytes) if sp is None: print('ERROR: Could not process %s' % fname.split('/')[-1]) print('----') return [] return sp.statements def read_stmts(folder): fnames = get_file_names(folder) all_stmts = [] for fname in fnames: st = get_file_stmts(fname) all_stmts += st return all_stmts if __name__ == '__main__': stmts = read_stmts(base_folder)
from __future__ import absolute_import, print_function, unicode_literals from builtins import dict, str import os import glob import json from indra import sparser from indra.statements import stmts_from_json, get_valid_location, \ get_valid_residue base_folder = os.environ['HOME'] + \ '/data/darpa/phase3_eval/sources/sparser-20170530' def get_file_names(base_dir): fnames = glob.glob(os.path.join(base_dir, '*.json')) return fnames def get_file_stmts(fname): with open(fname, 'rb') as fh: print(fname) try: jd = json.load(fh) except ValueError as e: print(e) return [] for st in jd: if st.get('type') == 'Translocation': for loc in ['from_location', 'to_location']: val = st.get(loc) try: loc_valid = get_valid_location(val) st[loc] = loc_valid except: st[loc] = None try: res = st['residue'] if res is False: st['residue'] = None except: pass try: res = st.get('residue') if res: get_valid_residue(res) except: st['residue'] = None try: res = st['position'] if res is False: st['position'] = None except: pass stmts = stmts_from_json(jd) return stmts def read_stmts(folder): fnames = get_file_names(folder) all_stmts = [] for fname in fnames: st = get_file_stmts(fname) all_stmts += st return all_stmts if __name__ == '__main__': stmts = read_stmts(base_folder)
Read and fix Sparser jsons
Read and fix Sparser jsons
Python
bsd-2-clause
pvtodorov/indra,sorgerlab/belpy,johnbachman/belpy,johnbachman/belpy,bgyori/indra,sorgerlab/indra,sorgerlab/belpy,pvtodorov/indra,sorgerlab/indra,bgyori/indra,johnbachman/belpy,sorgerlab/indra,bgyori/indra,johnbachman/indra,sorgerlab/belpy,pvtodorov/indra,johnbachman/indra,pvtodorov/indra,johnbachman/indra
from __future__ import absolute_import, print_function, unicode_literals from builtins import dict, str import os import glob + import json from indra import sparser + from indra.statements import stmts_from_json, get_valid_location, \ + get_valid_residue - base_folder = 'sources/sparser-20170330' - sentences_folder = 'sources/sparser-20170210' + base_folder = os.environ['HOME'] + \ + '/data/darpa/phase3_eval/sources/sparser-20170530' def get_file_names(base_dir): - fnames = glob.glob(os.path.join(base_dir, '*.xml')) + fnames = glob.glob(os.path.join(base_dir, '*.json')) return fnames def get_file_stmts(fname): with open(fname, 'rb') as fh: print(fname) + try: + jd = json.load(fh) + except ValueError as e: - xml_bytes = fh.read() - sp = sparser.process_xml(xml_bytes) - if sp is None: - print('ERROR: Could not process %s' % fname.split('/')[-1]) - print('----') + print(e) return [] - return sp.statements + for st in jd: + if st.get('type') == 'Translocation': + for loc in ['from_location', 'to_location']: + val = st.get(loc) + try: + loc_valid = get_valid_location(val) + st[loc] = loc_valid + except: + st[loc] = None + try: + res = st['residue'] + if res is False: + st['residue'] = None + except: + pass + + try: + res = st.get('residue') + if res: + get_valid_residue(res) + except: + st['residue'] = None + + try: + res = st['position'] + if res is False: + st['position'] = None + except: + pass + + stmts = stmts_from_json(jd) + return stmts def read_stmts(folder): fnames = get_file_names(folder) all_stmts = [] for fname in fnames: st = get_file_stmts(fname) all_stmts += st return all_stmts + if __name__ == '__main__': stmts = read_stmts(base_folder)
Read and fix Sparser jsons
## Code Before: from __future__ import absolute_import, print_function, unicode_literals from builtins import dict, str import os import glob from indra import sparser base_folder = 'sources/sparser-20170330' sentences_folder = 'sources/sparser-20170210' def get_file_names(base_dir): fnames = glob.glob(os.path.join(base_dir, '*.xml')) return fnames def get_file_stmts(fname): with open(fname, 'rb') as fh: print(fname) xml_bytes = fh.read() sp = sparser.process_xml(xml_bytes) if sp is None: print('ERROR: Could not process %s' % fname.split('/')[-1]) print('----') return [] return sp.statements def read_stmts(folder): fnames = get_file_names(folder) all_stmts = [] for fname in fnames: st = get_file_stmts(fname) all_stmts += st return all_stmts if __name__ == '__main__': stmts = read_stmts(base_folder) ## Instruction: Read and fix Sparser jsons ## Code After: from __future__ import absolute_import, print_function, unicode_literals from builtins import dict, str import os import glob import json from indra import sparser from indra.statements import stmts_from_json, get_valid_location, \ get_valid_residue base_folder = os.environ['HOME'] + \ '/data/darpa/phase3_eval/sources/sparser-20170530' def get_file_names(base_dir): fnames = glob.glob(os.path.join(base_dir, '*.json')) return fnames def get_file_stmts(fname): with open(fname, 'rb') as fh: print(fname) try: jd = json.load(fh) except ValueError as e: print(e) return [] for st in jd: if st.get('type') == 'Translocation': for loc in ['from_location', 'to_location']: val = st.get(loc) try: loc_valid = get_valid_location(val) st[loc] = loc_valid except: st[loc] = None try: res = st['residue'] if res is False: st['residue'] = None except: pass try: res = st.get('residue') if res: get_valid_residue(res) except: st['residue'] = None try: res = st['position'] if res is False: st['position'] = None except: pass stmts = stmts_from_json(jd) return stmts def read_stmts(folder): fnames = get_file_names(folder) all_stmts = [] for fname in fnames: st = get_file_stmts(fname) all_stmts += st return all_stmts if __name__ == '__main__': stmts = read_stmts(base_folder)
// ... existing code ... import glob import json from indra import sparser from indra.statements import stmts_from_json, get_valid_location, \ get_valid_residue base_folder = os.environ['HOME'] + \ '/data/darpa/phase3_eval/sources/sparser-20170530' // ... modified code ... def get_file_names(base_dir): fnames = glob.glob(os.path.join(base_dir, '*.json')) return fnames ... print(fname) try: jd = json.load(fh) except ValueError as e: print(e) return [] for st in jd: if st.get('type') == 'Translocation': for loc in ['from_location', 'to_location']: val = st.get(loc) try: loc_valid = get_valid_location(val) st[loc] = loc_valid except: st[loc] = None try: res = st['residue'] if res is False: st['residue'] = None except: pass try: res = st.get('residue') if res: get_valid_residue(res) except: st['residue'] = None try: res = st['position'] if res is False: st['position'] = None except: pass stmts = stmts_from_json(jd) return stmts ... if __name__ == '__main__': // ... rest of the code ...
394f5832c6d3ff3efefbc5c21163adcdedd9a9bb
sale_stock_availability/__openerp__.py
sale_stock_availability/__openerp__.py
{ 'name': 'Stock availability in sales order line', 'version': '0.1', 'category': 'Tools', 'description': """ Stock availability in sales order line ====================================== * Add two groups. One for seeing stock on sale orders and other to see only if or not available * Add an option in warehouse to disable stock warning IMPORTANT: ---------- * This module could break some warnings as the ones implemented by "warning" module * If you dont disable warning and give a user availbility to see only "true/false" on sale order stock, he can see stock if the warning is raised """, 'author': 'Moldeo Interactive & Ingenieria Adhoc', 'website': 'http://business.moldeo.coop http://ingadhoc.com/', 'images': [], 'depends': [ 'sale_stock' ], 'demo': [], 'data': [ 'sale_view.xml', 'stock_view.xml', 'security/sale_order.xml', ], 'test': [], 'installable': True, } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
{ 'name': 'Stock availability in sales order line', 'version': '0.1', 'category': 'Tools', 'description': """ Stock availability in sales order line ====================================== * Add two groups. One for seeing stock on sale orders and other to see only if or not available * Add an option in warehouse to disable stock warning IMPORTANT: ---------- * This module could break some warnings as the ones implemented by "warning" module * If you dont disable warning and give a user availbility to see only "true/false" on sale order stock, he can see stock if the warning is raised """, 'author': 'Moldeo Interactive & Ingenieria Adhoc', 'website': 'http://business.moldeo.coop http://ingadhoc.com/', 'images': [], 'depends': [ 'sale_stock' ], 'demo': [], 'data': [ 'sale_view.xml', 'stock_view.xml', 'security/sale_order.xml', ], 'test': [], 'installable': True, } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
FIX sale stock availa.. description
FIX sale stock availa.. description
Python
agpl-3.0
syci/ingadhoc-odoo-addons,HBEE/odoo-addons,jorsea/odoo-addons,adhoc-dev/odoo-addons,bmya/odoo-addons,bmya/odoo-addons,levkar/odoo-addons,bmya/odoo-addons,dvitme/odoo-addons,ClearCorp/account-financial-tools,jorsea/odoo-addons,sysadminmatmoz/ingadhoc,ingadhoc/account-payment,HBEE/odoo-addons,ingadhoc/stock,adhoc-dev/account-financial-tools,maljac/odoo-addons,ingadhoc/odoo-addons,ingadhoc/product,adhoc-dev/account-financial-tools,maljac/odoo-addons,ingadhoc/odoo-addons,levkar/odoo-addons,ingadhoc/account-invoicing,ingadhoc/account-financial-tools,ingadhoc/sale,levkar/odoo-addons,ingadhoc/odoo-addons,sysadminmatmoz/ingadhoc,jorsea/odoo-addons,dvitme/odoo-addons,ingadhoc/sale,dvitme/odoo-addons,syci/ingadhoc-odoo-addons,HBEE/odoo-addons,ingadhoc/account-analytic,ingadhoc/partner,adhoc-dev/odoo-addons,ingadhoc/sale,ingadhoc/product,ingadhoc/sale,sysadminmatmoz/ingadhoc,adhoc-dev/odoo-addons,ClearCorp/account-financial-tools,levkar/odoo-addons,maljac/odoo-addons,syci/ingadhoc-odoo-addons
{ 'name': 'Stock availability in sales order line', 'version': '0.1', 'category': 'Tools', 'description': """ - Stock availability in sales order line + Stock availability in sales order line - ====================================== + ====================================== - * Add two groups. One for seeing stock on sale orders and other to see only if or not available + * Add two groups. One for seeing stock on sale orders and other to see only if or not available - * Add an option in warehouse to disable stock warning + * Add an option in warehouse to disable stock warning + - IMPORTANT: + IMPORTANT: - ---------- + ---------- - * This module could break some warnings as the ones implemented by "warning" module + * This module could break some warnings as the ones implemented by "warning" module - * If you dont disable warning and give a user availbility to see only "true/false" on sale order stock, he can see stock if the warning is raised + * If you dont disable warning and give a user availbility to see only "true/false" on sale order stock, he can see stock if the warning is raised """, 'author': 'Moldeo Interactive & Ingenieria Adhoc', 'website': 'http://business.moldeo.coop http://ingadhoc.com/', 'images': [], 'depends': [ 'sale_stock' ], 'demo': [], 'data': [ 'sale_view.xml', 'stock_view.xml', 'security/sale_order.xml', ], 'test': [], 'installable': True, } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
FIX sale stock availa.. description
## Code Before: { 'name': 'Stock availability in sales order line', 'version': '0.1', 'category': 'Tools', 'description': """ Stock availability in sales order line ====================================== * Add two groups. One for seeing stock on sale orders and other to see only if or not available * Add an option in warehouse to disable stock warning IMPORTANT: ---------- * This module could break some warnings as the ones implemented by "warning" module * If you dont disable warning and give a user availbility to see only "true/false" on sale order stock, he can see stock if the warning is raised """, 'author': 'Moldeo Interactive & Ingenieria Adhoc', 'website': 'http://business.moldeo.coop http://ingadhoc.com/', 'images': [], 'depends': [ 'sale_stock' ], 'demo': [], 'data': [ 'sale_view.xml', 'stock_view.xml', 'security/sale_order.xml', ], 'test': [], 'installable': True, } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: ## Instruction: FIX sale stock availa.. description ## Code After: { 'name': 'Stock availability in sales order line', 'version': '0.1', 'category': 'Tools', 'description': """ Stock availability in sales order line ====================================== * Add two groups. One for seeing stock on sale orders and other to see only if or not available * Add an option in warehouse to disable stock warning IMPORTANT: ---------- * This module could break some warnings as the ones implemented by "warning" module * If you dont disable warning and give a user availbility to see only "true/false" on sale order stock, he can see stock if the warning is raised """, 'author': 'Moldeo Interactive & Ingenieria Adhoc', 'website': 'http://business.moldeo.coop http://ingadhoc.com/', 'images': [], 'depends': [ 'sale_stock' ], 'demo': [], 'data': [ 'sale_view.xml', 'stock_view.xml', 'security/sale_order.xml', ], 'test': [], 'installable': True, } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
# ... existing code ... 'description': """ Stock availability in sales order line ====================================== * Add two groups. One for seeing stock on sale orders and other to see only if or not available * Add an option in warehouse to disable stock warning IMPORTANT: ---------- * This module could break some warnings as the ones implemented by "warning" module * If you dont disable warning and give a user availbility to see only "true/false" on sale order stock, he can see stock if the warning is raised """, # ... rest of the code ...
da05390fa11a12d0491caff18d38e71a1e134b82
spicedham/sqlalchemywrapper/models.py
spicedham/sqlalchemywrapper/models.py
from sqlalchemy import Column, Integer, String from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.schema import UniqueConstraint Base = declarative_base() class Store(Base): __tablename__ = 'store' id = Column(Integer, primary_key=True, autoincrement=True) key = Column(String) tag = Column(String) value = Column(String) __table_args__ = (UniqueConstraint('key', 'tag'),) def __unicode__(self): return unicode(key)
from sqlalchemy import Column, Integer, String from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.schema import PrimaryKeyConstraint Base = declarative_base() class Store(Base): __tablename__ = 'store' key = Column(String) tag = Column(String) value = Column(String) __table_args__ = (PrimaryKeyConstraint('key', 'tag'),) def __unicode__(self): return unicode(key)
Make tag and key be a composite primary key
Make tag and key be a composite primary key
Python
mpl-2.0
mozilla/spicedham,mozilla/spicedham
from sqlalchemy import Column, Integer, String from sqlalchemy.ext.declarative import declarative_base - from sqlalchemy.schema import UniqueConstraint + from sqlalchemy.schema import PrimaryKeyConstraint Base = declarative_base() class Store(Base): __tablename__ = 'store' - id = Column(Integer, primary_key=True, autoincrement=True) key = Column(String) tag = Column(String) value = Column(String) - __table_args__ = (UniqueConstraint('key', 'tag'),) + __table_args__ = (PrimaryKeyConstraint('key', 'tag'),) def __unicode__(self): return unicode(key)
Make tag and key be a composite primary key
## Code Before: from sqlalchemy import Column, Integer, String from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.schema import UniqueConstraint Base = declarative_base() class Store(Base): __tablename__ = 'store' id = Column(Integer, primary_key=True, autoincrement=True) key = Column(String) tag = Column(String) value = Column(String) __table_args__ = (UniqueConstraint('key', 'tag'),) def __unicode__(self): return unicode(key) ## Instruction: Make tag and key be a composite primary key ## Code After: from sqlalchemy import Column, Integer, String from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.schema import PrimaryKeyConstraint Base = declarative_base() class Store(Base): __tablename__ = 'store' key = Column(String) tag = Column(String) value = Column(String) __table_args__ = (PrimaryKeyConstraint('key', 'tag'),) def __unicode__(self): return unicode(key)
# ... existing code ... from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.schema import PrimaryKeyConstraint # ... modified code ... __tablename__ = 'store' key = Column(String) ... value = Column(String) __table_args__ = (PrimaryKeyConstraint('key', 'tag'),) # ... rest of the code ...
324beaae091b2bc4699d4840ccd313aa0645b07e
nets.py
nets.py
class FeedForwardNet: pass
from layers import InputLayer, Layer, OutputLayer import math import random class FeedForwardNet(object): def __init__(self, inlayersize, layersize, outlayersize): self._inlayer = InputLayer(inlayersize) self._middlelayer = Layer(layersize) self._outlayer = OutputLayer(outlayersize) self._inlayer.connect_layer(self._middlelayer) self._middlelayer.connect_layer(self._outlayer) @property def neurons(self): return [self._inlayer.neurons, self._middlelayer.neurons, self._outlayer.neurons] def train(self, inputs, targets, verbose=False): ''' inputs: a sequence of floats that map to the input neurons targetlabels: a sequence of floats that are the desired output neuron values. ''' self._inlayer.inputs = inputs self._middlelayer.propagate() self._outlayer.propagate() self._outlayer.backpropagate1(targets) self._middlelayer.backpropagate1() self._outlayer.backpropagate2() self._middlelayer.backpropagate2() if verbose: print("Training results") print("\tInput: {0}".format(inputs)) print("\tTarget output: {0}".format(targets)) print("\tActual output: {0}".format(self._outlayer.outputs)) self.display_signals() print("") raw_input() def predict(self, inputs): ''' inputs: a sequence of floats that map to the input neurons return: a sequence of floats mapped from the output neurons ''' self._inlayer.inputs = inputs self._middlelayer.propagate() self._outlayer.propagate() return self._outlayer.outputs def display_signals(self): col1 = self._inlayer.inputs col2 = [x.signal for x in self._middlelayer.neurons] col3 = self._outlayer.outputs numrows = max(len(col1), len(col2), len(col3)) roundto = 3 #round to print("Signals") print("\tInput\tHidden\tOutput") for row in range(numrows): line = [] for col in col1, col2, col3: if len(col)-1 < row: line.append("") else: element = round(col[row], roundto) element = str(element) line.append(element) print('\t' + '\t'.join(line)) if __name__ == '__main__': f = FeedForwardNet(1, 2, 1) for i in range(50000): f.train((1, 1), (0,)) f.train((1, 0), (1,)) f.train((0, 1), (1,)) f.train((0, 0), (0,)) while True: x = input("Input: ") y = f.predict(x) print("Output: {0}".format(y))
Add main code and feed forward net class
Add main code and feed forward net class It can XOR, but sin function still fails
Python
mit
tmerr/trevornet
- class FeedForwardNet: - pass + from layers import InputLayer, Layer, OutputLayer + import math + import random + class FeedForwardNet(object): + def __init__(self, inlayersize, layersize, outlayersize): + self._inlayer = InputLayer(inlayersize) + self._middlelayer = Layer(layersize) + self._outlayer = OutputLayer(outlayersize) + + self._inlayer.connect_layer(self._middlelayer) + self._middlelayer.connect_layer(self._outlayer) + + @property + def neurons(self): + return [self._inlayer.neurons, self._middlelayer.neurons, self._outlayer.neurons] + + def train(self, inputs, targets, verbose=False): + ''' + inputs: a sequence of floats that map to the input neurons + targetlabels: a sequence of floats that are the desired output neuron + values. + ''' + + self._inlayer.inputs = inputs + self._middlelayer.propagate() + self._outlayer.propagate() + + self._outlayer.backpropagate1(targets) + self._middlelayer.backpropagate1() + + self._outlayer.backpropagate2() + self._middlelayer.backpropagate2() + + if verbose: + print("Training results") + print("\tInput: {0}".format(inputs)) + print("\tTarget output: {0}".format(targets)) + print("\tActual output: {0}".format(self._outlayer.outputs)) + self.display_signals() + print("") + raw_input() + + def predict(self, inputs): + ''' + inputs: a sequence of floats that map to the input neurons + return: a sequence of floats mapped from the output neurons + ''' + self._inlayer.inputs = inputs + self._middlelayer.propagate() + self._outlayer.propagate() + return self._outlayer.outputs + + def display_signals(self): + col1 = self._inlayer.inputs + col2 = [x.signal for x in self._middlelayer.neurons] + col3 = self._outlayer.outputs + numrows = max(len(col1), len(col2), len(col3)) + + roundto = 3 #round to + print("Signals") + print("\tInput\tHidden\tOutput") + for row in range(numrows): + line = [] + for col in col1, col2, col3: + if len(col)-1 < row: + line.append("") + else: + element = round(col[row], roundto) + element = str(element) + line.append(element) + print('\t' + '\t'.join(line)) + + if __name__ == '__main__': + f = FeedForwardNet(1, 2, 1) + + for i in range(50000): + f.train((1, 1), (0,)) + f.train((1, 0), (1,)) + f.train((0, 1), (1,)) + f.train((0, 0), (0,)) + + while True: + x = input("Input: ") + y = f.predict(x) + print("Output: {0}".format(y)) +
Add main code and feed forward net class
## Code Before: class FeedForwardNet: pass ## Instruction: Add main code and feed forward net class ## Code After: from layers import InputLayer, Layer, OutputLayer import math import random class FeedForwardNet(object): def __init__(self, inlayersize, layersize, outlayersize): self._inlayer = InputLayer(inlayersize) self._middlelayer = Layer(layersize) self._outlayer = OutputLayer(outlayersize) self._inlayer.connect_layer(self._middlelayer) self._middlelayer.connect_layer(self._outlayer) @property def neurons(self): return [self._inlayer.neurons, self._middlelayer.neurons, self._outlayer.neurons] def train(self, inputs, targets, verbose=False): ''' inputs: a sequence of floats that map to the input neurons targetlabels: a sequence of floats that are the desired output neuron values. ''' self._inlayer.inputs = inputs self._middlelayer.propagate() self._outlayer.propagate() self._outlayer.backpropagate1(targets) self._middlelayer.backpropagate1() self._outlayer.backpropagate2() self._middlelayer.backpropagate2() if verbose: print("Training results") print("\tInput: {0}".format(inputs)) print("\tTarget output: {0}".format(targets)) print("\tActual output: {0}".format(self._outlayer.outputs)) self.display_signals() print("") raw_input() def predict(self, inputs): ''' inputs: a sequence of floats that map to the input neurons return: a sequence of floats mapped from the output neurons ''' self._inlayer.inputs = inputs self._middlelayer.propagate() self._outlayer.propagate() return self._outlayer.outputs def display_signals(self): col1 = self._inlayer.inputs col2 = [x.signal for x in self._middlelayer.neurons] col3 = self._outlayer.outputs numrows = max(len(col1), len(col2), len(col3)) roundto = 3 #round to print("Signals") print("\tInput\tHidden\tOutput") for row in range(numrows): line = [] for col in col1, col2, col3: if len(col)-1 < row: line.append("") else: element = round(col[row], roundto) element = str(element) line.append(element) print('\t' + '\t'.join(line)) if __name__ == '__main__': f = FeedForwardNet(1, 2, 1) for i in range(50000): f.train((1, 1), (0,)) f.train((1, 0), (1,)) f.train((0, 1), (1,)) f.train((0, 0), (0,)) while True: x = input("Input: ") y = f.predict(x) print("Output: {0}".format(y))
... from layers import InputLayer, Layer, OutputLayer import math import random class FeedForwardNet(object): def __init__(self, inlayersize, layersize, outlayersize): self._inlayer = InputLayer(inlayersize) self._middlelayer = Layer(layersize) self._outlayer = OutputLayer(outlayersize) self._inlayer.connect_layer(self._middlelayer) self._middlelayer.connect_layer(self._outlayer) @property def neurons(self): return [self._inlayer.neurons, self._middlelayer.neurons, self._outlayer.neurons] def train(self, inputs, targets, verbose=False): ''' inputs: a sequence of floats that map to the input neurons targetlabels: a sequence of floats that are the desired output neuron values. ''' self._inlayer.inputs = inputs self._middlelayer.propagate() self._outlayer.propagate() self._outlayer.backpropagate1(targets) self._middlelayer.backpropagate1() self._outlayer.backpropagate2() self._middlelayer.backpropagate2() if verbose: print("Training results") print("\tInput: {0}".format(inputs)) print("\tTarget output: {0}".format(targets)) print("\tActual output: {0}".format(self._outlayer.outputs)) self.display_signals() print("") raw_input() def predict(self, inputs): ''' inputs: a sequence of floats that map to the input neurons return: a sequence of floats mapped from the output neurons ''' self._inlayer.inputs = inputs self._middlelayer.propagate() self._outlayer.propagate() return self._outlayer.outputs def display_signals(self): col1 = self._inlayer.inputs col2 = [x.signal for x in self._middlelayer.neurons] col3 = self._outlayer.outputs numrows = max(len(col1), len(col2), len(col3)) roundto = 3 #round to print("Signals") print("\tInput\tHidden\tOutput") for row in range(numrows): line = [] for col in col1, col2, col3: if len(col)-1 < row: line.append("") else: element = round(col[row], roundto) element = str(element) line.append(element) print('\t' + '\t'.join(line)) if __name__ == '__main__': f = FeedForwardNet(1, 2, 1) for i in range(50000): f.train((1, 1), (0,)) f.train((1, 0), (1,)) f.train((0, 1), (1,)) f.train((0, 0), (0,)) while True: x = input("Input: ") y = f.predict(x) print("Output: {0}".format(y)) ...
175c72d97d073a64714cebef05bd37f0221f94fa
test_octave_kernel.py
test_octave_kernel.py
"""Example use of jupyter_kernel_test, with tests for IPython.""" import sys import unittest import jupyter_kernel_test as jkt class OctaveKernelTests(jkt.KernelTests): kernel_name = "octave" language_name = "octave" code_hello_world = "disp('hello, world')" code_display_data = [ {'code': '%plot -f png\nplot([1,2,3])', 'mime': 'image/png'}, {'code': '%plot -f svg\nplot([1,2,3])', 'mime': 'image/svg+xml'} ] if sys.platform == 'darwin' else [] completion_samples = [ { 'text': 'one', 'matches': {'ones', 'onenormest'}, }, ] code_page_something = "ones?" if __name__ == '__main__': unittest.main()
"""Example use of jupyter_kernel_test, with tests for IPython.""" import sys import unittest import jupyter_kernel_test as jkt class OctaveKernelTests(jkt.KernelTests): kernel_name = "octave" language_name = "octave" code_hello_world = "disp('hello, world')" code_display_data = [ {'code': '%plot -f png\nplot([1,2,3])', 'mime': 'image/png'}, {'code': '%plot -f svg\nplot([1,2,3])', 'mime': 'image/svg+xml'} ] if sys.platform == 'darwin' else [] completion_samples = [ { 'text': 'acos', 'matches': {'acos', 'acosd', 'acosh'}, }, ] code_page_something = "ones?" if __name__ == '__main__': unittest.main()
Fix tests with Octave 5.
Fix tests with Octave 5.
Python
bsd-3-clause
Calysto/octave_kernel,Calysto/octave_kernel
"""Example use of jupyter_kernel_test, with tests for IPython.""" import sys import unittest import jupyter_kernel_test as jkt class OctaveKernelTests(jkt.KernelTests): kernel_name = "octave" language_name = "octave" code_hello_world = "disp('hello, world')" code_display_data = [ {'code': '%plot -f png\nplot([1,2,3])', 'mime': 'image/png'}, {'code': '%plot -f svg\nplot([1,2,3])', 'mime': 'image/svg+xml'} ] if sys.platform == 'darwin' else [] completion_samples = [ { - 'text': 'one', + 'text': 'acos', - 'matches': {'ones', 'onenormest'}, + 'matches': {'acos', 'acosd', 'acosh'}, }, ] code_page_something = "ones?" if __name__ == '__main__': unittest.main()
Fix tests with Octave 5.
## Code Before: """Example use of jupyter_kernel_test, with tests for IPython.""" import sys import unittest import jupyter_kernel_test as jkt class OctaveKernelTests(jkt.KernelTests): kernel_name = "octave" language_name = "octave" code_hello_world = "disp('hello, world')" code_display_data = [ {'code': '%plot -f png\nplot([1,2,3])', 'mime': 'image/png'}, {'code': '%plot -f svg\nplot([1,2,3])', 'mime': 'image/svg+xml'} ] if sys.platform == 'darwin' else [] completion_samples = [ { 'text': 'one', 'matches': {'ones', 'onenormest'}, }, ] code_page_something = "ones?" if __name__ == '__main__': unittest.main() ## Instruction: Fix tests with Octave 5. ## Code After: """Example use of jupyter_kernel_test, with tests for IPython.""" import sys import unittest import jupyter_kernel_test as jkt class OctaveKernelTests(jkt.KernelTests): kernel_name = "octave" language_name = "octave" code_hello_world = "disp('hello, world')" code_display_data = [ {'code': '%plot -f png\nplot([1,2,3])', 'mime': 'image/png'}, {'code': '%plot -f svg\nplot([1,2,3])', 'mime': 'image/svg+xml'} ] if sys.platform == 'darwin' else [] completion_samples = [ { 'text': 'acos', 'matches': {'acos', 'acosd', 'acosh'}, }, ] code_page_something = "ones?" if __name__ == '__main__': unittest.main()
... { 'text': 'acos', 'matches': {'acos', 'acosd', 'acosh'}, }, ...
b03dff0d6964d886f122936d097c3d4acc0582db
proper_parens.py
proper_parens.py
from __future__ import print_function from __future__ import unicode_literals def safe_input(prompt): """Return user input after catching KeyboardInterrupt and EOFError""" try: reply = raw_input(prompt) except (EOFError, KeyboardInterrupt): quit() else: return reply.decode('utf-8') # Convert input to unicode prompt = "Input a Lisp style statement '(test)': " reply = safe_input(prompt) def check_statement(value): where_open = value.find("(") where_close = value.find(")") if ((where_open == -1) and where_close != -1) or (where_open > where_close): return -1
from __future__ import print_function from __future__ import unicode_literals def safe_input(prompt): """Return user input after catching KeyboardInterrupt and EOFError""" try: reply = raw_input(prompt) except (EOFError, KeyboardInterrupt): quit() else: return reply.decode('utf-8') # Convert input to unicode prompt = "Input a Lisp style statement '(test)': " reply = safe_input(prompt) def check_statement(value): open_index = [i for i, val in enumerate(reply) if val == "("] close_index = [i for i, val in enumerate(reply) if val == ")"] paren_total_broken = [a < b for a, b in zip(open_index, close_index)] if paren_total_broken.find(False): return -1 else: return 0
Add function for broken and groundwork for other objectives
Add function for broken and groundwork for other objectives
Python
mit
constanthatz/data-structures
from __future__ import print_function from __future__ import unicode_literals def safe_input(prompt): """Return user input after catching KeyboardInterrupt and EOFError""" try: reply = raw_input(prompt) except (EOFError, KeyboardInterrupt): quit() else: return reply.decode('utf-8') # Convert input to unicode prompt = "Input a Lisp style statement '(test)': " reply = safe_input(prompt) def check_statement(value): - where_open = value.find("(") - where_close = value.find(")") + open_index = [i for i, val in enumerate(reply) if val == "("] + close_index = [i for i, val in enumerate(reply) if val == ")"] - if ((where_open == -1) and where_close != -1) or (where_open > where_close): + paren_total_broken = [a < b for a, b in zip(open_index, close_index)] + + if paren_total_broken.find(False): return -1 - + else: + return 0
Add function for broken and groundwork for other objectives
## Code Before: from __future__ import print_function from __future__ import unicode_literals def safe_input(prompt): """Return user input after catching KeyboardInterrupt and EOFError""" try: reply = raw_input(prompt) except (EOFError, KeyboardInterrupt): quit() else: return reply.decode('utf-8') # Convert input to unicode prompt = "Input a Lisp style statement '(test)': " reply = safe_input(prompt) def check_statement(value): where_open = value.find("(") where_close = value.find(")") if ((where_open == -1) and where_close != -1) or (where_open > where_close): return -1 ## Instruction: Add function for broken and groundwork for other objectives ## Code After: from __future__ import print_function from __future__ import unicode_literals def safe_input(prompt): """Return user input after catching KeyboardInterrupt and EOFError""" try: reply = raw_input(prompt) except (EOFError, KeyboardInterrupt): quit() else: return reply.decode('utf-8') # Convert input to unicode prompt = "Input a Lisp style statement '(test)': " reply = safe_input(prompt) def check_statement(value): open_index = [i for i, val in enumerate(reply) if val == "("] close_index = [i for i, val in enumerate(reply) if val == ")"] paren_total_broken = [a < b for a, b in zip(open_index, close_index)] if paren_total_broken.find(False): return -1 else: return 0
... def check_statement(value): open_index = [i for i, val in enumerate(reply) if val == "("] close_index = [i for i, val in enumerate(reply) if val == ")"] paren_total_broken = [a < b for a, b in zip(open_index, close_index)] if paren_total_broken.find(False): return -1 else: return 0 ...
57b4d39749021305a2d5850e642537224d30611f
requests/hooks.py
requests/hooks.py
import traceback HOOKS = ('args', 'pre_request', 'pre_send', 'post_request', 'response') def dispatch_hook(key, hooks, hook_data): """Dispatches a hook dictionary on a given piece of data.""" hooks = hooks or dict() if key in hooks: hooks = hooks.get(key) if hasattr(hooks, '__call__'): hooks = [hooks] for hook in hooks: try: _hook_data = hook(hook_data) if _hook_data is not None: hook_data = _hook_data except Exception: traceback.print_exc() return hook_data
HOOKS = ('args', 'pre_request', 'pre_send', 'post_request', 'response') def dispatch_hook(key, hooks, hook_data): """Dispatches a hook dictionary on a given piece of data.""" hooks = hooks or dict() if key in hooks: hooks = hooks.get(key) if hasattr(hooks, '__call__'): hooks = [hooks] for hook in hooks: _hook_data = hook(hook_data) if _hook_data is not None: hook_data = _hook_data return hook_data
Remove exception eating from dispatch_hook.
Remove exception eating from dispatch_hook.
Python
isc
Bluehorn/requests,revolunet/requests,revolunet/requests,psf/requests
- - import traceback HOOKS = ('args', 'pre_request', 'pre_send', 'post_request', 'response') def dispatch_hook(key, hooks, hook_data): """Dispatches a hook dictionary on a given piece of data.""" hooks = hooks or dict() if key in hooks: hooks = hooks.get(key) if hasattr(hooks, '__call__'): hooks = [hooks] for hook in hooks: - try: - _hook_data = hook(hook_data) + _hook_data = hook(hook_data) - if _hook_data is not None: + if _hook_data is not None: - hook_data = _hook_data + hook_data = _hook_data - except Exception: - traceback.print_exc() return hook_data
Remove exception eating from dispatch_hook.
## Code Before: import traceback HOOKS = ('args', 'pre_request', 'pre_send', 'post_request', 'response') def dispatch_hook(key, hooks, hook_data): """Dispatches a hook dictionary on a given piece of data.""" hooks = hooks or dict() if key in hooks: hooks = hooks.get(key) if hasattr(hooks, '__call__'): hooks = [hooks] for hook in hooks: try: _hook_data = hook(hook_data) if _hook_data is not None: hook_data = _hook_data except Exception: traceback.print_exc() return hook_data ## Instruction: Remove exception eating from dispatch_hook. ## Code After: HOOKS = ('args', 'pre_request', 'pre_send', 'post_request', 'response') def dispatch_hook(key, hooks, hook_data): """Dispatches a hook dictionary on a given piece of data.""" hooks = hooks or dict() if key in hooks: hooks = hooks.get(key) if hasattr(hooks, '__call__'): hooks = [hooks] for hook in hooks: _hook_data = hook(hook_data) if _hook_data is not None: hook_data = _hook_data return hook_data
# ... existing code ... # ... modified code ... for hook in hooks: _hook_data = hook(hook_data) if _hook_data is not None: hook_data = _hook_data # ... rest of the code ...
43978f8c709d5f195229deb6ec7817a1815a4db6
sass_processor/storage.py
sass_processor/storage.py
from django.conf import settings from django.contrib.staticfiles.finders import get_finders from django.core.files.storage import FileSystemStorage class SassFileStorage(FileSystemStorage): def __init__(self, location=None, base_url=None, *args, **kwargs): if location is None: location = getattr(settings, 'SASS_PROCESSOR_ROOT', settings.STATIC_ROOT) if base_url is None: base_url = settings.STATIC_URL super(SassFileStorage, self).__init__(location, base_url, *args, **kwargs) try: from storages.backends.s3boto3 import S3Boto3Storage class SassS3Boto3Storage(S3Boto3Storage): base_url = '{}.s3.amazonaws.com'.format(settings.AWS_STORAGE_BUCKET_NAME) except ImportError: pass def find_file(path): for finder in get_finders(): result = finder.find(path) if result: return result
from django.conf import settings from django.contrib.staticfiles.finders import get_finders from django.core.files.storage import FileSystemStorage class SassFileStorage(FileSystemStorage): def __init__(self, location=None, base_url=None, *args, **kwargs): if location is None: location = getattr(settings, 'SASS_PROCESSOR_ROOT', settings.STATIC_ROOT) if base_url is None: base_url = settings.STATIC_URL super(SassFileStorage, self).__init__(location, base_url, *args, **kwargs) try: from storages.backends.s3boto3 import S3Boto3Storage class SassS3Boto3Storage(S3Boto3Storage): base_url = '{}.s3.amazonaws.com'.format(settings.AWS_STORAGE_BUCKET_NAME) except (AttributeError, ImportError): pass def find_file(path): for finder in get_finders(): result = finder.find(path) if result: return result
Fix in case s3boto is not installed
Fix in case s3boto is not installed
Python
mit
jrief/django-sass-processor,jrief/django-sass-processor
from django.conf import settings from django.contrib.staticfiles.finders import get_finders from django.core.files.storage import FileSystemStorage class SassFileStorage(FileSystemStorage): def __init__(self, location=None, base_url=None, *args, **kwargs): if location is None: location = getattr(settings, 'SASS_PROCESSOR_ROOT', settings.STATIC_ROOT) if base_url is None: base_url = settings.STATIC_URL super(SassFileStorage, self).__init__(location, base_url, *args, **kwargs) try: from storages.backends.s3boto3 import S3Boto3Storage class SassS3Boto3Storage(S3Boto3Storage): base_url = '{}.s3.amazonaws.com'.format(settings.AWS_STORAGE_BUCKET_NAME) - except ImportError: + except (AttributeError, ImportError): pass def find_file(path): for finder in get_finders(): result = finder.find(path) if result: return result
Fix in case s3boto is not installed
## Code Before: from django.conf import settings from django.contrib.staticfiles.finders import get_finders from django.core.files.storage import FileSystemStorage class SassFileStorage(FileSystemStorage): def __init__(self, location=None, base_url=None, *args, **kwargs): if location is None: location = getattr(settings, 'SASS_PROCESSOR_ROOT', settings.STATIC_ROOT) if base_url is None: base_url = settings.STATIC_URL super(SassFileStorage, self).__init__(location, base_url, *args, **kwargs) try: from storages.backends.s3boto3 import S3Boto3Storage class SassS3Boto3Storage(S3Boto3Storage): base_url = '{}.s3.amazonaws.com'.format(settings.AWS_STORAGE_BUCKET_NAME) except ImportError: pass def find_file(path): for finder in get_finders(): result = finder.find(path) if result: return result ## Instruction: Fix in case s3boto is not installed ## Code After: from django.conf import settings from django.contrib.staticfiles.finders import get_finders from django.core.files.storage import FileSystemStorage class SassFileStorage(FileSystemStorage): def __init__(self, location=None, base_url=None, *args, **kwargs): if location is None: location = getattr(settings, 'SASS_PROCESSOR_ROOT', settings.STATIC_ROOT) if base_url is None: base_url = settings.STATIC_URL super(SassFileStorage, self).__init__(location, base_url, *args, **kwargs) try: from storages.backends.s3boto3 import S3Boto3Storage class SassS3Boto3Storage(S3Boto3Storage): base_url = '{}.s3.amazonaws.com'.format(settings.AWS_STORAGE_BUCKET_NAME) except (AttributeError, ImportError): pass def find_file(path): for finder in get_finders(): result = finder.find(path) if result: return result
// ... existing code ... except (AttributeError, ImportError): pass // ... rest of the code ...
ff6ee622204500101ad5721dccea69a1c62de65f
smbackend/urls.py
smbackend/urls.py
from django.conf.urls import patterns, include, url from services.api import all_views as services_views from services.api import AccessibilityRuleView from observations.api import views as observations_views from rest_framework import routers from observations.views import obtain_auth_token from munigeo.api import all_views as munigeo_views # from django.contrib import admin # admin.autodiscover() router = routers.DefaultRouter() registered_api_views = set() for view in services_views + munigeo_views + observations_views: kwargs = {} if view['name'] in registered_api_views: continue else: registered_api_views.add(view['name']) if 'base_name' in view: kwargs['base_name'] = view['base_name'] router.register(view['name'], view['class'], **kwargs) urlpatterns = patterns('', # Examples: # url(r'^$', 'smbackend.views.home', name='home'), # url(r'^blog/', include('blog.urls')), # url(r'^', include(v1_api.urls)), # url(r'^admin/', include(admin.site.urls)), url(r'^open311/', 'services.views.post_service_request', name='services'), url(r'^v1/', include(router.urls)), url(r'^v1/api-token-auth/', obtain_auth_token) )
from django.conf.urls import patterns, include, url from services.api import all_views as services_views from services.api import AccessibilityRuleView from observations.api import views as observations_views from rest_framework import routers from observations.views import obtain_auth_token from munigeo.api import all_views as munigeo_views # from django.contrib import admin # admin.autodiscover() router = routers.DefaultRouter() registered_api_views = set() for view in services_views + munigeo_views + observations_views: kwargs = {} if view['name'] in registered_api_views: continue else: registered_api_views.add(view['name']) if 'base_name' in view: kwargs['base_name'] = view['base_name'] router.register(view['name'], view['class'], **kwargs) urlpatterns = patterns('', # Examples: # url(r'^$', 'smbackend.views.home', name='home'), # url(r'^blog/', include('blog.urls')), # url(r'^', include(v1_api.urls)), # url(r'^admin/', include(admin.site.urls)), url(r'^open311/', 'services.views.post_service_request', name='services'), url(r'^v1/', include(router.urls)), url(r'^v1/api-token-auth/', obtain_auth_token, name='api-auth-token') )
Add name to api-token-auth url endpoint.
Add name to api-token-auth url endpoint.
Python
agpl-3.0
City-of-Helsinki/smbackend,City-of-Helsinki/smbackend
from django.conf.urls import patterns, include, url from services.api import all_views as services_views from services.api import AccessibilityRuleView from observations.api import views as observations_views from rest_framework import routers from observations.views import obtain_auth_token from munigeo.api import all_views as munigeo_views # from django.contrib import admin # admin.autodiscover() router = routers.DefaultRouter() registered_api_views = set() for view in services_views + munigeo_views + observations_views: kwargs = {} if view['name'] in registered_api_views: continue else: registered_api_views.add(view['name']) if 'base_name' in view: kwargs['base_name'] = view['base_name'] router.register(view['name'], view['class'], **kwargs) urlpatterns = patterns('', # Examples: # url(r'^$', 'smbackend.views.home', name='home'), # url(r'^blog/', include('blog.urls')), # url(r'^', include(v1_api.urls)), # url(r'^admin/', include(admin.site.urls)), url(r'^open311/', 'services.views.post_service_request', name='services'), url(r'^v1/', include(router.urls)), - url(r'^v1/api-token-auth/', obtain_auth_token) + url(r'^v1/api-token-auth/', obtain_auth_token, name='api-auth-token') )
Add name to api-token-auth url endpoint.
## Code Before: from django.conf.urls import patterns, include, url from services.api import all_views as services_views from services.api import AccessibilityRuleView from observations.api import views as observations_views from rest_framework import routers from observations.views import obtain_auth_token from munigeo.api import all_views as munigeo_views # from django.contrib import admin # admin.autodiscover() router = routers.DefaultRouter() registered_api_views = set() for view in services_views + munigeo_views + observations_views: kwargs = {} if view['name'] in registered_api_views: continue else: registered_api_views.add(view['name']) if 'base_name' in view: kwargs['base_name'] = view['base_name'] router.register(view['name'], view['class'], **kwargs) urlpatterns = patterns('', # Examples: # url(r'^$', 'smbackend.views.home', name='home'), # url(r'^blog/', include('blog.urls')), # url(r'^', include(v1_api.urls)), # url(r'^admin/', include(admin.site.urls)), url(r'^open311/', 'services.views.post_service_request', name='services'), url(r'^v1/', include(router.urls)), url(r'^v1/api-token-auth/', obtain_auth_token) ) ## Instruction: Add name to api-token-auth url endpoint. ## Code After: from django.conf.urls import patterns, include, url from services.api import all_views as services_views from services.api import AccessibilityRuleView from observations.api import views as observations_views from rest_framework import routers from observations.views import obtain_auth_token from munigeo.api import all_views as munigeo_views # from django.contrib import admin # admin.autodiscover() router = routers.DefaultRouter() registered_api_views = set() for view in services_views + munigeo_views + observations_views: kwargs = {} if view['name'] in registered_api_views: continue else: registered_api_views.add(view['name']) if 'base_name' in view: kwargs['base_name'] = view['base_name'] router.register(view['name'], view['class'], **kwargs) urlpatterns = patterns('', # Examples: # url(r'^$', 'smbackend.views.home', name='home'), # url(r'^blog/', include('blog.urls')), # url(r'^', include(v1_api.urls)), # url(r'^admin/', include(admin.site.urls)), url(r'^open311/', 'services.views.post_service_request', name='services'), url(r'^v1/', include(router.urls)), url(r'^v1/api-token-auth/', obtain_auth_token, name='api-auth-token') )
# ... existing code ... url(r'^v1/', include(router.urls)), url(r'^v1/api-token-auth/', obtain_auth_token, name='api-auth-token') ) # ... rest of the code ...
e981369f61cec6582b3b9b583639f519ab5f0106
deployments/prob140/image/ipython_config.py
deployments/prob140/image/ipython_config.py
c.Historymanager.enabled = False # Use memory for notebook notary file to workaround corrupted files on nfs # https://www.sqlite.org/inmemorydb.html # https://github.com/jupyter/jupyter/issues/174 # https://github.com/ipython/ipython/issues/9163 c.NotebookNotary.db_file = ":memory:"
c.HistoryManager.enabled = False
Fix typo on ipython config
Fix typo on ipython config s/Historymanager/HistoryManager/
Python
bsd-3-clause
berkeley-dsep-infra/datahub,ryanlovett/datahub,berkeley-dsep-infra/datahub,berkeley-dsep-infra/datahub,ryanlovett/datahub,ryanlovett/datahub
- c.Historymanager.enabled = False + c.HistoryManager.enabled = False - - # Use memory for notebook notary file to workaround corrupted files on nfs - # https://www.sqlite.org/inmemorydb.html - # https://github.com/jupyter/jupyter/issues/174 - # https://github.com/ipython/ipython/issues/9163 - c.NotebookNotary.db_file = ":memory:" -
Fix typo on ipython config
## Code Before: c.Historymanager.enabled = False # Use memory for notebook notary file to workaround corrupted files on nfs # https://www.sqlite.org/inmemorydb.html # https://github.com/jupyter/jupyter/issues/174 # https://github.com/ipython/ipython/issues/9163 c.NotebookNotary.db_file = ":memory:" ## Instruction: Fix typo on ipython config ## Code After: c.HistoryManager.enabled = False
// ... existing code ... c.HistoryManager.enabled = False // ... rest of the code ...
4257381997e8ac6968713f1bad96019f977bafc9
server.py
server.py
import tweepy, time, sys, os from ConfigParser import SafeConfigParser parser = SafeConfigParser() parser.read('secrets.cfg') #enter the corresponding information from your Twitter application: CONSUMER_KEY = parser.get('bug_tracker', 'CONSUMER_KEY') CONSUMER_SECRET = parser.get('bug_tracker', 'CONSUMER_SECRET') ACCESS_KEY = parser.get('bug_tracker', 'ACCESS_KEY') ACCESS_SECRET = parser.get('bug_tracker', 'ACCESS_SECRET') auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET) auth.set_access_token(ACCESS_KEY, ACCESS_SECRET) api = tweepy.API(auth) line = "Test tweet!" api.update_status(line)
import tweepy, time, sys, os from ConfigParser import SafeConfigParser parser = SafeConfigParser() parser.read('secrets.cfg') #enter the corresponding information from your Twitter application: CONSUMER_KEY = parser.get('Twitter', 'CONSUMER_KEY') CONSUMER_SECRET = parser.get('Twitter', 'CONSUMER_SECRET') ACCESS_KEY = parser.get('Twitter', 'ACCESS_KEY') ACCESS_SECRET = parser.get('Twitter', 'ACCESS_SECRET') auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET) auth.set_access_token(ACCESS_KEY, ACCESS_SECRET) api = tweepy.API(auth) line = "Test tweet!" api.update_status(line)
Fix config parsing. Tweeting works
Fix config parsing. Tweeting works
Python
mit
premgane/agolo-twitterbot,premgane/agolo-twitterbot
import tweepy, time, sys, os from ConfigParser import SafeConfigParser parser = SafeConfigParser() parser.read('secrets.cfg') #enter the corresponding information from your Twitter application: - CONSUMER_KEY = parser.get('bug_tracker', 'CONSUMER_KEY') + CONSUMER_KEY = parser.get('Twitter', 'CONSUMER_KEY') - CONSUMER_SECRET = parser.get('bug_tracker', 'CONSUMER_SECRET') + CONSUMER_SECRET = parser.get('Twitter', 'CONSUMER_SECRET') - ACCESS_KEY = parser.get('bug_tracker', 'ACCESS_KEY') + ACCESS_KEY = parser.get('Twitter', 'ACCESS_KEY') - ACCESS_SECRET = parser.get('bug_tracker', 'ACCESS_SECRET') + ACCESS_SECRET = parser.get('Twitter', 'ACCESS_SECRET') auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET) auth.set_access_token(ACCESS_KEY, ACCESS_SECRET) api = tweepy.API(auth) line = "Test tweet!" api.update_status(line)
Fix config parsing. Tweeting works
## Code Before: import tweepy, time, sys, os from ConfigParser import SafeConfigParser parser = SafeConfigParser() parser.read('secrets.cfg') #enter the corresponding information from your Twitter application: CONSUMER_KEY = parser.get('bug_tracker', 'CONSUMER_KEY') CONSUMER_SECRET = parser.get('bug_tracker', 'CONSUMER_SECRET') ACCESS_KEY = parser.get('bug_tracker', 'ACCESS_KEY') ACCESS_SECRET = parser.get('bug_tracker', 'ACCESS_SECRET') auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET) auth.set_access_token(ACCESS_KEY, ACCESS_SECRET) api = tweepy.API(auth) line = "Test tweet!" api.update_status(line) ## Instruction: Fix config parsing. Tweeting works ## Code After: import tweepy, time, sys, os from ConfigParser import SafeConfigParser parser = SafeConfigParser() parser.read('secrets.cfg') #enter the corresponding information from your Twitter application: CONSUMER_KEY = parser.get('Twitter', 'CONSUMER_KEY') CONSUMER_SECRET = parser.get('Twitter', 'CONSUMER_SECRET') ACCESS_KEY = parser.get('Twitter', 'ACCESS_KEY') ACCESS_SECRET = parser.get('Twitter', 'ACCESS_SECRET') auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET) auth.set_access_token(ACCESS_KEY, ACCESS_SECRET) api = tweepy.API(auth) line = "Test tweet!" api.update_status(line)
... #enter the corresponding information from your Twitter application: CONSUMER_KEY = parser.get('Twitter', 'CONSUMER_KEY') CONSUMER_SECRET = parser.get('Twitter', 'CONSUMER_SECRET') ACCESS_KEY = parser.get('Twitter', 'ACCESS_KEY') ACCESS_SECRET = parser.get('Twitter', 'ACCESS_SECRET') ...
bbeb9b780908cf1322722669f1c68259345fe261
readthedocs/v3/routers.py
readthedocs/v3/routers.py
from rest_framework.routers import DefaultRouter from rest_framework_extensions.routers import NestedRouterMixin class DefaultRouterWithNesting(NestedRouterMixin, DefaultRouter): pass
from rest_framework.routers import DefaultRouter, APIRootView from rest_framework_extensions.routers import NestedRouterMixin class DocsAPIRootView(APIRootView): # Overridden only to add documentation for BrowsableAPIRenderer. """ Read the Docs APIv3 root endpoint. Full documentation at [https://docs.readthedocs.io/en/latest/api/v3.html](https://docs.readthedocs.io/en/latest/api/v3.html). """ def get_view_name(self): return 'Read the Docs APIv3' class DefaultRouterWithNesting(NestedRouterMixin, DefaultRouter): APIRootView = DocsAPIRootView root_view_name = 'api-v3-root'
Add documentation to the root view of BrowsableAPI
Add documentation to the root view of BrowsableAPI
Python
mit
rtfd/readthedocs.org,rtfd/readthedocs.org,rtfd/readthedocs.org,rtfd/readthedocs.org
- from rest_framework.routers import DefaultRouter + from rest_framework.routers import DefaultRouter, APIRootView from rest_framework_extensions.routers import NestedRouterMixin + class DocsAPIRootView(APIRootView): + + # Overridden only to add documentation for BrowsableAPIRenderer. + + """ + Read the Docs APIv3 root endpoint. + + Full documentation at [https://docs.readthedocs.io/en/latest/api/v3.html](https://docs.readthedocs.io/en/latest/api/v3.html). + """ + + def get_view_name(self): + return 'Read the Docs APIv3' + + class DefaultRouterWithNesting(NestedRouterMixin, DefaultRouter): - pass + APIRootView = DocsAPIRootView + root_view_name = 'api-v3-root'
Add documentation to the root view of BrowsableAPI
## Code Before: from rest_framework.routers import DefaultRouter from rest_framework_extensions.routers import NestedRouterMixin class DefaultRouterWithNesting(NestedRouterMixin, DefaultRouter): pass ## Instruction: Add documentation to the root view of BrowsableAPI ## Code After: from rest_framework.routers import DefaultRouter, APIRootView from rest_framework_extensions.routers import NestedRouterMixin class DocsAPIRootView(APIRootView): # Overridden only to add documentation for BrowsableAPIRenderer. """ Read the Docs APIv3 root endpoint. Full documentation at [https://docs.readthedocs.io/en/latest/api/v3.html](https://docs.readthedocs.io/en/latest/api/v3.html). """ def get_view_name(self): return 'Read the Docs APIv3' class DefaultRouterWithNesting(NestedRouterMixin, DefaultRouter): APIRootView = DocsAPIRootView root_view_name = 'api-v3-root'
... from rest_framework.routers import DefaultRouter, APIRootView from rest_framework_extensions.routers import NestedRouterMixin ... class DocsAPIRootView(APIRootView): # Overridden only to add documentation for BrowsableAPIRenderer. """ Read the Docs APIv3 root endpoint. Full documentation at [https://docs.readthedocs.io/en/latest/api/v3.html](https://docs.readthedocs.io/en/latest/api/v3.html). """ def get_view_name(self): return 'Read the Docs APIv3' class DefaultRouterWithNesting(NestedRouterMixin, DefaultRouter): APIRootView = DocsAPIRootView root_view_name = 'api-v3-root' ...
ee070606be405b86bfcc6e6796bbe322a78511ed
ui/assetmanager.py
ui/assetmanager.py
"""Loads and manages art assets""" import pyglet _ASSET_PATHS = ["res"] _ASSET_FILE_NAMES = [ "black_key_down.png", "black_key_up.png", "white_key_down.png", "white_key_up.png", "staff_line.png", ] class Assets(object): _loadedAssets = None @staticmethod def loadAssets(): Assets._loadedAssets = dict() Assets._updateResourcePath() for f in _ASSET_FILE_NAMES: Assets.loadAsset(f) @staticmethod def loadAsset(filename): Assets._loadedAssets[filename] = pyglet.resource.image(filename) @staticmethod def _updateResourcePath(): for p in _ASSET_PATHS: pyglet.resource.path.append(p) pyglet.resource.reindex() @staticmethod def get(filename): if Assets._loadedAssets is None: raise RuntimeError("You must initialize the asset manager before " "retrieving assets") return Assets._loadedAssets[filename]
"""Loads and manages art assets""" import pyglet import os _ASSET_PATHS = ["res"] _ASSET_FILE_NAMES = [ "black_key_down.png", "black_key_up.png", "white_key_down.png", "white_key_up.png", "staff_line.png", ] class Assets(object): _loadedAssets = None @staticmethod def loadAssets(): Assets._loadedAssets = dict() Assets._updateResourcePath() for f in _ASSET_FILE_NAMES: Assets.loadAsset(f) @staticmethod def loadAsset(filename): Assets._loadedAssets[filename] = pyglet.resource.image(filename) @staticmethod def _updateResourcePath(): for p in _ASSET_PATHS: pyglet.resource.path.append(os.path.join(os.getcwd(), p)) pyglet.resource.reindex() @staticmethod def get(filename): if Assets._loadedAssets is None: raise RuntimeError("You must initialize the asset manager before " "retrieving assets") return Assets._loadedAssets[filename]
Use absolute resource path in Pyglet
Use absolute resource path in Pyglet It appears that a recent change in Pyglet causes relative paths to fail here.
Python
bsd-2-clause
aschmied/keyzer
"""Loads and manages art assets""" import pyglet + import os _ASSET_PATHS = ["res"] _ASSET_FILE_NAMES = [ "black_key_down.png", "black_key_up.png", "white_key_down.png", "white_key_up.png", "staff_line.png", ] class Assets(object): _loadedAssets = None @staticmethod def loadAssets(): Assets._loadedAssets = dict() Assets._updateResourcePath() for f in _ASSET_FILE_NAMES: Assets.loadAsset(f) @staticmethod def loadAsset(filename): Assets._loadedAssets[filename] = pyglet.resource.image(filename) @staticmethod def _updateResourcePath(): for p in _ASSET_PATHS: - pyglet.resource.path.append(p) + pyglet.resource.path.append(os.path.join(os.getcwd(), p)) pyglet.resource.reindex() @staticmethod def get(filename): if Assets._loadedAssets is None: raise RuntimeError("You must initialize the asset manager before " "retrieving assets") return Assets._loadedAssets[filename]
Use absolute resource path in Pyglet
## Code Before: """Loads and manages art assets""" import pyglet _ASSET_PATHS = ["res"] _ASSET_FILE_NAMES = [ "black_key_down.png", "black_key_up.png", "white_key_down.png", "white_key_up.png", "staff_line.png", ] class Assets(object): _loadedAssets = None @staticmethod def loadAssets(): Assets._loadedAssets = dict() Assets._updateResourcePath() for f in _ASSET_FILE_NAMES: Assets.loadAsset(f) @staticmethod def loadAsset(filename): Assets._loadedAssets[filename] = pyglet.resource.image(filename) @staticmethod def _updateResourcePath(): for p in _ASSET_PATHS: pyglet.resource.path.append(p) pyglet.resource.reindex() @staticmethod def get(filename): if Assets._loadedAssets is None: raise RuntimeError("You must initialize the asset manager before " "retrieving assets") return Assets._loadedAssets[filename] ## Instruction: Use absolute resource path in Pyglet ## Code After: """Loads and manages art assets""" import pyglet import os _ASSET_PATHS = ["res"] _ASSET_FILE_NAMES = [ "black_key_down.png", "black_key_up.png", "white_key_down.png", "white_key_up.png", "staff_line.png", ] class Assets(object): _loadedAssets = None @staticmethod def loadAssets(): Assets._loadedAssets = dict() Assets._updateResourcePath() for f in _ASSET_FILE_NAMES: Assets.loadAsset(f) @staticmethod def loadAsset(filename): Assets._loadedAssets[filename] = pyglet.resource.image(filename) @staticmethod def _updateResourcePath(): for p in _ASSET_PATHS: pyglet.resource.path.append(os.path.join(os.getcwd(), p)) pyglet.resource.reindex() @staticmethod def get(filename): if Assets._loadedAssets is None: raise RuntimeError("You must initialize the asset manager before " "retrieving assets") return Assets._loadedAssets[filename]
// ... existing code ... import pyglet import os // ... modified code ... for p in _ASSET_PATHS: pyglet.resource.path.append(os.path.join(os.getcwd(), p)) pyglet.resource.reindex() // ... rest of the code ...
4efa9c87264eabb6712f4fb787ab0de42be18de6
places/urls.py
places/urls.py
from django.conf.urls import url from . import views app_name = 'places' urlpatterns = [ url(r'^$', views.IndexView.as_view(), name='index'), url(r'^(?P<slug>[-\w]+)/$', views.PlaceView.as_view(), name='place'), ]
from django.urls import include, path from . import views app_name = 'places' urlpatterns = [ path('', views.IndexView.as_view(), name='index'), path('<slug:slug>/', views.PlaceView.as_view(), name='place'), ]
Move places urlpatterns to Django 2.0 preferred method
Move places urlpatterns to Django 2.0 preferred method
Python
mit
evanepio/dotmanca,evanepio/dotmanca,evanepio/dotmanca
- from django.conf.urls import url + from django.urls import include, path from . import views app_name = 'places' urlpatterns = [ - url(r'^$', views.IndexView.as_view(), name='index'), + path('', views.IndexView.as_view(), name='index'), - url(r'^(?P<slug>[-\w]+)/$', views.PlaceView.as_view(), name='place'), + path('<slug:slug>/', views.PlaceView.as_view(), name='place'), ]
Move places urlpatterns to Django 2.0 preferred method
## Code Before: from django.conf.urls import url from . import views app_name = 'places' urlpatterns = [ url(r'^$', views.IndexView.as_view(), name='index'), url(r'^(?P<slug>[-\w]+)/$', views.PlaceView.as_view(), name='place'), ] ## Instruction: Move places urlpatterns to Django 2.0 preferred method ## Code After: from django.urls import include, path from . import views app_name = 'places' urlpatterns = [ path('', views.IndexView.as_view(), name='index'), path('<slug:slug>/', views.PlaceView.as_view(), name='place'), ]
... from django.urls import include, path ... urlpatterns = [ path('', views.IndexView.as_view(), name='index'), path('<slug:slug>/', views.PlaceView.as_view(), name='place'), ] ...
25ebc324c0af6e1ce74535cc75227071637a7a18
areaScraper.py
areaScraper.py
from bs4 import BeautifulSoup import re fh = open("sites.htm", "r") soup = BeautifulSoup(fh, "html.parser") for columnDiv in soup.h1.next_sibling.next_sibling: for state in columnDiv: for city in state: print(city) #print(soup.text) print("\n----Done----\n\n")
from bs4 import BeautifulSoup import re def getCities(): fh = open("sites.htm", "r") soup = BeautifulSoup(fh, "html.parser") placesDict = {} for columnDiv in soup.h1.next_sibling.next_sibling: for state in columnDiv: for city in state: m = (re.search('<li><a href="(.+)">(.+)</a>', str(city))) if m: placesDict[m.group(2)] = m.group(1) return(placesDict) getCities()
Complete site scraper for all American cities
Complete site scraper for all American cities areaScraper.py contains the getCities() function, which will return a dictionary of 'city name string' : 'url string' for each Craigslist "site", corresponding to American cities, regions, etc.
Python
mit
MuSystemsAnalysis/craigslist_area_search,MuSystemsAnalysis/craigslist_area_search
from bs4 import BeautifulSoup import re + def getCities(): - fh = open("sites.htm", "r") - soup = BeautifulSoup(fh, "html.parser") + fh = open("sites.htm", "r") + soup = BeautifulSoup(fh, "html.parser") + placesDict = {} - for columnDiv in soup.h1.next_sibling.next_sibling: - for state in columnDiv: - for city in state: - print(city) - #print(soup.text) - print("\n----Done----\n\n") + for columnDiv in soup.h1.next_sibling.next_sibling: + for state in columnDiv: + for city in state: + m = (re.search('<li><a href="(.+)">(.+)</a>', str(city))) + if m: + placesDict[m.group(2)] = m.group(1) + return(placesDict) + + getCities() +
Complete site scraper for all American cities
## Code Before: from bs4 import BeautifulSoup import re fh = open("sites.htm", "r") soup = BeautifulSoup(fh, "html.parser") for columnDiv in soup.h1.next_sibling.next_sibling: for state in columnDiv: for city in state: print(city) #print(soup.text) print("\n----Done----\n\n") ## Instruction: Complete site scraper for all American cities ## Code After: from bs4 import BeautifulSoup import re def getCities(): fh = open("sites.htm", "r") soup = BeautifulSoup(fh, "html.parser") placesDict = {} for columnDiv in soup.h1.next_sibling.next_sibling: for state in columnDiv: for city in state: m = (re.search('<li><a href="(.+)">(.+)</a>', str(city))) if m: placesDict[m.group(2)] = m.group(1) return(placesDict) getCities()
# ... existing code ... def getCities(): fh = open("sites.htm", "r") soup = BeautifulSoup(fh, "html.parser") placesDict = {} for columnDiv in soup.h1.next_sibling.next_sibling: for state in columnDiv: for city in state: m = (re.search('<li><a href="(.+)">(.+)</a>', str(city))) if m: placesDict[m.group(2)] = m.group(1) return(placesDict) getCities() # ... rest of the code ...
a2b4b53635ab1188e95efd68f64104a469e7ff66
scheduler/executor.py
scheduler/executor.py
import threading import subprocess class TestExecutor(threading.Thread): """ The general thread to perform the tests executions """ def __init__(self, run_id, test_name, queue): super().__init__() self.run_id = run_id self.test_name = test_name self.queue = queue # __init __() def run(self): """ Execute the command to perform the test execution. The return code is enqueued so the scheduler can determine if the run has completed """ with open( "runs/static/runs/autotests/runs/{}.txt".format( self.run_id), "w") as f: CMD = "python3 -m unittest -v autotests/tests/{}.py 2>&1".format( self.test_name) return_code = subprocess.call(CMD, stdout=f, shell=True) self.queue.put((self.run_id, return_code)) # run()
import threading import subprocess import os class TestExecutor(threading.Thread): """ The general thread to perform the tests executions """ def __init__(self, run_id, test_name, queue): super().__init__() self.run_id = run_id self.test_name = test_name self.queue = queue # __init __() def run(self): """ Execute the command to perform the test execution. The return code is enqueued so the scheduler can determine if the run has completed """ filename = "runs/static/runs/logs/{}.txt".format(self.run_id) os.makedirs(os.path.dirname(filename), exist_ok=True) with open(filename, "w") as f: CMD = "python3 -m unittest -v autotests/tests/{}.py 2>&1".format( self.test_name) return_code = subprocess.call(CMD, stdout=f, shell=True) self.queue.put((self.run_id, return_code)) # run()
Fix bug related to creating the log directory
Fix bug related to creating the log directory
Python
mit
jfelipefilho/test-manager,jfelipefilho/test-manager,jfelipefilho/test-manager
import threading import subprocess + import os class TestExecutor(threading.Thread): """ The general thread to perform the tests executions """ def __init__(self, run_id, test_name, queue): super().__init__() self.run_id = run_id self.test_name = test_name self.queue = queue # __init __() def run(self): """ Execute the command to perform the test execution. The return code is enqueued so the scheduler can determine if the run has completed """ + filename = "runs/static/runs/logs/{}.txt".format(self.run_id) + os.makedirs(os.path.dirname(filename), exist_ok=True) + with open(filename, "w") as f: - with open( - "runs/static/runs/autotests/runs/{}.txt".format( - self.run_id), "w") as f: - CMD = "python3 -m unittest -v autotests/tests/{}.py 2>&1".format( self.test_name) return_code = subprocess.call(CMD, stdout=f, shell=True) self.queue.put((self.run_id, return_code)) # run()
Fix bug related to creating the log directory
## Code Before: import threading import subprocess class TestExecutor(threading.Thread): """ The general thread to perform the tests executions """ def __init__(self, run_id, test_name, queue): super().__init__() self.run_id = run_id self.test_name = test_name self.queue = queue # __init __() def run(self): """ Execute the command to perform the test execution. The return code is enqueued so the scheduler can determine if the run has completed """ with open( "runs/static/runs/autotests/runs/{}.txt".format( self.run_id), "w") as f: CMD = "python3 -m unittest -v autotests/tests/{}.py 2>&1".format( self.test_name) return_code = subprocess.call(CMD, stdout=f, shell=True) self.queue.put((self.run_id, return_code)) # run() ## Instruction: Fix bug related to creating the log directory ## Code After: import threading import subprocess import os class TestExecutor(threading.Thread): """ The general thread to perform the tests executions """ def __init__(self, run_id, test_name, queue): super().__init__() self.run_id = run_id self.test_name = test_name self.queue = queue # __init __() def run(self): """ Execute the command to perform the test execution. The return code is enqueued so the scheduler can determine if the run has completed """ filename = "runs/static/runs/logs/{}.txt".format(self.run_id) os.makedirs(os.path.dirname(filename), exist_ok=True) with open(filename, "w") as f: CMD = "python3 -m unittest -v autotests/tests/{}.py 2>&1".format( self.test_name) return_code = subprocess.call(CMD, stdout=f, shell=True) self.queue.put((self.run_id, return_code)) # run()
... import subprocess import os ... """ filename = "runs/static/runs/logs/{}.txt".format(self.run_id) os.makedirs(os.path.dirname(filename), exist_ok=True) with open(filename, "w") as f: CMD = "python3 -m unittest -v autotests/tests/{}.py 2>&1".format( ...
854fe79574782f812313508bd8b207f6c033352a
event/models.py
event/models.py
from django.db import models class Artist(models.Model): name = models.CharField(max_length=100) image_url = models.URLField(blank=True) thumb_url = models.URLField(blank=True) events = models.ManyToManyField( 'event.Event', related_name='artists', blank=True, ) class Meta: ordering = ['name'] def __str__(self): return self.name class Event(models.Model): title = models.CharField(max_length=200) datetime = models.DateTimeField() venue = models.ForeignKey( 'event.Venue', related_name='events', on_delete=models.CASCADE, ) def __str__(self): return self.title class Venue(models.Model): name = models.CharField(max_length=100) city = models.CharField(max_length=100) country = models.CharField(max_length=100) def __str__(self): return self.name
from django.db import models class Artist(models.Model): name = models.CharField(max_length=100) image_url = models.URLField(blank=True) thumb_url = models.URLField(blank=True) events = models.ManyToManyField( 'event.Event', related_name='artists', blank=True, ) class Meta: ordering = ['name'] def __str__(self): return self.name class Event(models.Model): title = models.CharField(max_length=200) datetime = models.DateTimeField() venue = models.ForeignKey( 'event.Venue', related_name='events', on_delete=models.CASCADE, ) class Meta: ordering = ['datetime'] def __str__(self): return self.title class Venue(models.Model): name = models.CharField(max_length=100) city = models.CharField(max_length=100) country = models.CharField(max_length=100) def __str__(self): return self.name
Add Event ordering by datetime
Add Event ordering by datetime
Python
mit
FedorSelitsky/eventrack,FedorSelitsky/eventrack,FedorSelitsky/eventrack,FedorSelitsky/eventrack
from django.db import models class Artist(models.Model): name = models.CharField(max_length=100) image_url = models.URLField(blank=True) thumb_url = models.URLField(blank=True) events = models.ManyToManyField( 'event.Event', related_name='artists', blank=True, ) class Meta: ordering = ['name'] def __str__(self): return self.name class Event(models.Model): title = models.CharField(max_length=200) datetime = models.DateTimeField() venue = models.ForeignKey( 'event.Venue', related_name='events', on_delete=models.CASCADE, ) + class Meta: + ordering = ['datetime'] + def __str__(self): return self.title class Venue(models.Model): name = models.CharField(max_length=100) city = models.CharField(max_length=100) country = models.CharField(max_length=100) def __str__(self): return self.name
Add Event ordering by datetime
## Code Before: from django.db import models class Artist(models.Model): name = models.CharField(max_length=100) image_url = models.URLField(blank=True) thumb_url = models.URLField(blank=True) events = models.ManyToManyField( 'event.Event', related_name='artists', blank=True, ) class Meta: ordering = ['name'] def __str__(self): return self.name class Event(models.Model): title = models.CharField(max_length=200) datetime = models.DateTimeField() venue = models.ForeignKey( 'event.Venue', related_name='events', on_delete=models.CASCADE, ) def __str__(self): return self.title class Venue(models.Model): name = models.CharField(max_length=100) city = models.CharField(max_length=100) country = models.CharField(max_length=100) def __str__(self): return self.name ## Instruction: Add Event ordering by datetime ## Code After: from django.db import models class Artist(models.Model): name = models.CharField(max_length=100) image_url = models.URLField(blank=True) thumb_url = models.URLField(blank=True) events = models.ManyToManyField( 'event.Event', related_name='artists', blank=True, ) class Meta: ordering = ['name'] def __str__(self): return self.name class Event(models.Model): title = models.CharField(max_length=200) datetime = models.DateTimeField() venue = models.ForeignKey( 'event.Venue', related_name='events', on_delete=models.CASCADE, ) class Meta: ordering = ['datetime'] def __str__(self): return self.title class Venue(models.Model): name = models.CharField(max_length=100) city = models.CharField(max_length=100) country = models.CharField(max_length=100) def __str__(self): return self.name
... class Meta: ordering = ['datetime'] def __str__(self): ...
0dc217bd0cec8a0321dfc38b88696514179bf833
editorconfig/__init__.py
editorconfig/__init__.py
"""EditorConfig Python Core""" from editorconfig.versiontools import join_version VERSION = (0, 11, 3, "development") __all__ = ['get_properties', 'EditorConfigError', 'exceptions'] __version__ = join_version(VERSION) def get_properties(filename): """Locate and parse EditorConfig files for the given filename""" handler = EditorConfigHandler(filename) return handler.get_configurations() from editorconfig.handler import EditorConfigHandler from editorconfig.exceptions import *
"""EditorConfig Python Core""" from editorconfig.versiontools import join_version VERSION = (0, 11, 3, "final") __all__ = ['get_properties', 'EditorConfigError', 'exceptions'] __version__ = join_version(VERSION) def get_properties(filename): """Locate and parse EditorConfig files for the given filename""" handler = EditorConfigHandler(filename) return handler.get_configurations() from editorconfig.handler import EditorConfigHandler from editorconfig.exceptions import *
Upgrade version to 0.11.3 final
Upgrade version to 0.11.3 final
Python
bsd-2-clause
VictorBjelkholm/editorconfig-vim,johnfraney/editorconfig-vim,pocke/editorconfig-vim,pocke/editorconfig-vim,johnfraney/editorconfig-vim,benjifisher/editorconfig-vim,pocke/editorconfig-vim,johnfraney/editorconfig-vim,VictorBjelkholm/editorconfig-vim,VictorBjelkholm/editorconfig-vim,benjifisher/editorconfig-vim,benjifisher/editorconfig-vim
"""EditorConfig Python Core""" from editorconfig.versiontools import join_version - VERSION = (0, 11, 3, "development") + VERSION = (0, 11, 3, "final") __all__ = ['get_properties', 'EditorConfigError', 'exceptions'] __version__ = join_version(VERSION) def get_properties(filename): """Locate and parse EditorConfig files for the given filename""" handler = EditorConfigHandler(filename) return handler.get_configurations() from editorconfig.handler import EditorConfigHandler from editorconfig.exceptions import *
Upgrade version to 0.11.3 final
## Code Before: """EditorConfig Python Core""" from editorconfig.versiontools import join_version VERSION = (0, 11, 3, "development") __all__ = ['get_properties', 'EditorConfigError', 'exceptions'] __version__ = join_version(VERSION) def get_properties(filename): """Locate and parse EditorConfig files for the given filename""" handler = EditorConfigHandler(filename) return handler.get_configurations() from editorconfig.handler import EditorConfigHandler from editorconfig.exceptions import * ## Instruction: Upgrade version to 0.11.3 final ## Code After: """EditorConfig Python Core""" from editorconfig.versiontools import join_version VERSION = (0, 11, 3, "final") __all__ = ['get_properties', 'EditorConfigError', 'exceptions'] __version__ = join_version(VERSION) def get_properties(filename): """Locate and parse EditorConfig files for the given filename""" handler = EditorConfigHandler(filename) return handler.get_configurations() from editorconfig.handler import EditorConfigHandler from editorconfig.exceptions import *
# ... existing code ... VERSION = (0, 11, 3, "final") # ... rest of the code ...
ed3c03ac4f213f3882e28f25ae0596a7021928cd
test/ParseableInterface/Inputs/make-unreadable.py
test/ParseableInterface/Inputs/make-unreadable.py
import platform import subprocess import sys if platform.system() == 'Windows': import ctypes AdvAPI32 = ctypes.windll.Advapi32 from ctypes.wintypes import POINTER UNLEN = 256 GetUserNameW = AdvAPI32.GetUserNameW GetUserNameW.argtypes = ( ctypes.c_wchar_p, # _In_Out_ lpBuffer POINTER(ctypes.c_uint) # _In_out_ pcBuffer ) GetUserNameW.restype = ctypes.c_uint buffer = ctypes.create_unicode_buffer(UNLEN + 1) size = ctypes.c_uint(len(buffer)) GetUserNameW(buffer, ctypes.byref(size)) for path in sys.argv[1:]: subprocess.call(['icacls', path, '/deny', '{}:(R)'.format(buffer.value)]) else: for path in sys.argv[1:]: subprocess.call(['chmod', 'a-r', path])
import platform import subprocess import sys if platform.system() == 'Windows': import ctypes AdvAPI32 = ctypes.windll.Advapi32 from ctypes.wintypes import POINTER UNLEN = 256 GetUserNameW = AdvAPI32.GetUserNameW GetUserNameW.argtypes = ( ctypes.c_wchar_p, # _In_Out_ lpBuffer POINTER(ctypes.c_uint) # _In_out_ pcBuffer ) GetUserNameW.restype = ctypes.c_uint buffer = ctypes.create_unicode_buffer(UNLEN + 1) size = ctypes.c_uint(len(buffer)) GetUserNameW(buffer, ctypes.byref(size)) # For NetworkService, Host$ is returned, so we choose have to turn it back # into something that icacls understands. if not buffer.value.endswith('$'): user_name = buffer.value else: user_name = 'NT AUTHORITY\\NetworkService' for path in sys.argv[1:]: subprocess.call(['icacls', path, '/deny', '{}:(R)'.format(user_name)]) else: for path in sys.argv[1:]: subprocess.call(['chmod', 'a-r', path])
Fix handling of Network Service username.
[windows] Fix handling of Network Service username. In Windows Server 2016 at least, the Network Service user (the one being used by the CI machine) is returned as Host$, which icacls doesn't understand. Turn the name into something that icacls if we get a name that ends with a dollar.
Python
apache-2.0
atrick/swift,hooman/swift,harlanhaskins/swift,shahmishal/swift,stephentyrone/swift,jmgc/swift,devincoughlin/swift,ahoppen/swift,tkremenek/swift,xedin/swift,shahmishal/swift,xwu/swift,xedin/swift,harlanhaskins/swift,harlanhaskins/swift,sschiau/swift,shajrawi/swift,karwa/swift,gribozavr/swift,apple/swift,CodaFi/swift,ahoppen/swift,lorentey/swift,nathawes/swift,JGiola/swift,allevato/swift,airspeedswift/swift,harlanhaskins/swift,hooman/swift,karwa/swift,rudkx/swift,CodaFi/swift,gregomni/swift,lorentey/swift,sschiau/swift,shajrawi/swift,karwa/swift,parkera/swift,tkremenek/swift,sschiau/swift,devincoughlin/swift,xedin/swift,aschwaighofer/swift,airspeedswift/swift,jmgc/swift,nathawes/swift,lorentey/swift,tkremenek/swift,allevato/swift,jmgc/swift,xwu/swift,JGiola/swift,ahoppen/swift,shahmishal/swift,tkremenek/swift,roambotics/swift,benlangmuir/swift,roambotics/swift,hooman/swift,atrick/swift,gribozavr/swift,gregomni/swift,glessard/swift,xedin/swift,apple/swift,jckarter/swift,gregomni/swift,karwa/swift,benlangmuir/swift,sschiau/swift,xedin/swift,stephentyrone/swift,aschwaighofer/swift,jckarter/swift,CodaFi/swift,lorentey/swift,CodaFi/swift,lorentey/swift,harlanhaskins/swift,tkremenek/swift,karwa/swift,gribozavr/swift,nathawes/swift,gregomni/swift,tkremenek/swift,JGiola/swift,nathawes/swift,JGiola/swift,parkera/swift,gregomni/swift,aschwaighofer/swift,airspeedswift/swift,CodaFi/swift,karwa/swift,apple/swift,shajrawi/swift,atrick/swift,stephentyrone/swift,hooman/swift,apple/swift,xwu/swift,parkera/swift,CodaFi/swift,glessard/swift,devincoughlin/swift,glessard/swift,hooman/swift,rudkx/swift,ahoppen/swift,harlanhaskins/swift,sschiau/swift,hooman/swift,allevato/swift,shajrawi/swift,ahoppen/swift,allevato/swift,devincoughlin/swift,shajrawi/swift,devincoughlin/swift,nathawes/swift,JGiola/swift,rudkx/swift,devincoughlin/swift,benlangmuir/swift,parkera/swift,roambotics/swift,rudkx/swift,jmgc/swift,xwu/swift,xedin/swift,roambotics/swift,aschwaighofer/swift,jmgc/swift,airspeedswift/swift,shahmishal/swift,stephentyrone/swift,gribozavr/swift,karwa/swift,devincoughlin/swift,gribozavr/swift,nathawes/swift,roambotics/swift,benlangmuir/swift,rudkx/swift,shahmishal/swift,xwu/swift,glessard/swift,karwa/swift,aschwaighofer/swift,allevato/swift,parkera/swift,glessard/swift,atrick/swift,tkremenek/swift,gribozavr/swift,sschiau/swift,jmgc/swift,benlangmuir/swift,jckarter/swift,jckarter/swift,shahmishal/swift,allevato/swift,gregomni/swift,airspeedswift/swift,parkera/swift,shahmishal/swift,gribozavr/swift,sschiau/swift,jckarter/swift,JGiola/swift,harlanhaskins/swift,xedin/swift,allevato/swift,jckarter/swift,sschiau/swift,nathawes/swift,airspeedswift/swift,xedin/swift,lorentey/swift,glessard/swift,devincoughlin/swift,atrick/swift,lorentey/swift,aschwaighofer/swift,CodaFi/swift,gribozavr/swift,roambotics/swift,shajrawi/swift,rudkx/swift,airspeedswift/swift,lorentey/swift,stephentyrone/swift,apple/swift,aschwaighofer/swift,xwu/swift,xwu/swift,benlangmuir/swift,ahoppen/swift,atrick/swift,jckarter/swift,parkera/swift,parkera/swift,shajrawi/swift,hooman/swift,shahmishal/swift,stephentyrone/swift,shajrawi/swift,jmgc/swift,stephentyrone/swift,apple/swift
import platform import subprocess import sys if platform.system() == 'Windows': import ctypes AdvAPI32 = ctypes.windll.Advapi32 from ctypes.wintypes import POINTER UNLEN = 256 GetUserNameW = AdvAPI32.GetUserNameW GetUserNameW.argtypes = ( ctypes.c_wchar_p, # _In_Out_ lpBuffer POINTER(ctypes.c_uint) # _In_out_ pcBuffer ) GetUserNameW.restype = ctypes.c_uint buffer = ctypes.create_unicode_buffer(UNLEN + 1) size = ctypes.c_uint(len(buffer)) GetUserNameW(buffer, ctypes.byref(size)) + # For NetworkService, Host$ is returned, so we choose have to turn it back + # into something that icacls understands. + if not buffer.value.endswith('$'): + user_name = buffer.value + else: + user_name = 'NT AUTHORITY\\NetworkService' for path in sys.argv[1:]: subprocess.call(['icacls', path, '/deny', - '{}:(R)'.format(buffer.value)]) + '{}:(R)'.format(user_name)]) else: for path in sys.argv[1:]: subprocess.call(['chmod', 'a-r', path])
Fix handling of Network Service username.
## Code Before: import platform import subprocess import sys if platform.system() == 'Windows': import ctypes AdvAPI32 = ctypes.windll.Advapi32 from ctypes.wintypes import POINTER UNLEN = 256 GetUserNameW = AdvAPI32.GetUserNameW GetUserNameW.argtypes = ( ctypes.c_wchar_p, # _In_Out_ lpBuffer POINTER(ctypes.c_uint) # _In_out_ pcBuffer ) GetUserNameW.restype = ctypes.c_uint buffer = ctypes.create_unicode_buffer(UNLEN + 1) size = ctypes.c_uint(len(buffer)) GetUserNameW(buffer, ctypes.byref(size)) for path in sys.argv[1:]: subprocess.call(['icacls', path, '/deny', '{}:(R)'.format(buffer.value)]) else: for path in sys.argv[1:]: subprocess.call(['chmod', 'a-r', path]) ## Instruction: Fix handling of Network Service username. ## Code After: import platform import subprocess import sys if platform.system() == 'Windows': import ctypes AdvAPI32 = ctypes.windll.Advapi32 from ctypes.wintypes import POINTER UNLEN = 256 GetUserNameW = AdvAPI32.GetUserNameW GetUserNameW.argtypes = ( ctypes.c_wchar_p, # _In_Out_ lpBuffer POINTER(ctypes.c_uint) # _In_out_ pcBuffer ) GetUserNameW.restype = ctypes.c_uint buffer = ctypes.create_unicode_buffer(UNLEN + 1) size = ctypes.c_uint(len(buffer)) GetUserNameW(buffer, ctypes.byref(size)) # For NetworkService, Host$ is returned, so we choose have to turn it back # into something that icacls understands. if not buffer.value.endswith('$'): user_name = buffer.value else: user_name = 'NT AUTHORITY\\NetworkService' for path in sys.argv[1:]: subprocess.call(['icacls', path, '/deny', '{}:(R)'.format(user_name)]) else: for path in sys.argv[1:]: subprocess.call(['chmod', 'a-r', path])
// ... existing code ... GetUserNameW(buffer, ctypes.byref(size)) # For NetworkService, Host$ is returned, so we choose have to turn it back # into something that icacls understands. if not buffer.value.endswith('$'): user_name = buffer.value else: user_name = 'NT AUTHORITY\\NetworkService' // ... modified code ... subprocess.call(['icacls', path, '/deny', '{}:(R)'.format(user_name)]) else: // ... rest of the code ...
ee649468df406877ccc51a1042e5657f11caa57d
oauthenticator/tests/test_openshift.py
oauthenticator/tests/test_openshift.py
from pytest import fixture, mark from ..openshift import OpenShiftOAuthenticator from .mocks import setup_oauth_mock def user_model(username): """Return a user model""" return { 'metadata': { 'name': username, } } @fixture def openshift_client(client): setup_oauth_mock(client, host=['localhost'], access_token_path='/oauth/token', user_path='/oapi/v1/users/~', ) return client async def test_openshift(openshift_client): authenticator = OpenShiftOAuthenticator() handler = openshift_client.handler_for_user(user_model('wash')) user_info = await authenticator.authenticate(handler) assert sorted(user_info) == ['auth_state', 'name'] name = user_info['name'] assert name == 'wash' auth_state = user_info['auth_state'] assert 'access_token' in auth_state assert 'openshift_user' in auth_state
from pytest import fixture, mark from ..openshift import OpenShiftOAuthenticator from .mocks import setup_oauth_mock def user_model(username): """Return a user model""" return { 'metadata': { 'name': username, } } @fixture def openshift_client(client): setup_oauth_mock(client, host=['localhost'], access_token_path='/oauth/token', user_path='/apis/user.openshift.io/v1/users/~', ) return client async def test_openshift(openshift_client): authenticator = OpenShiftOAuthenticator() handler = openshift_client.handler_for_user(user_model('wash')) user_info = await authenticator.authenticate(handler) assert sorted(user_info) == ['auth_state', 'name'] name = user_info['name'] assert name == 'wash' auth_state = user_info['auth_state'] assert 'access_token' in auth_state assert 'openshift_user' in auth_state
Update test harness to use new REST API path for OpenShift.
Update test harness to use new REST API path for OpenShift.
Python
bsd-3-clause
minrk/oauthenticator,NickolausDS/oauthenticator,jupyter/oauthenticator,jupyter/oauthenticator,maltevogl/oauthenticator,jupyterhub/oauthenticator
from pytest import fixture, mark from ..openshift import OpenShiftOAuthenticator from .mocks import setup_oauth_mock def user_model(username): """Return a user model""" return { 'metadata': { 'name': username, } } @fixture def openshift_client(client): setup_oauth_mock(client, host=['localhost'], access_token_path='/oauth/token', - user_path='/oapi/v1/users/~', + user_path='/apis/user.openshift.io/v1/users/~', ) return client async def test_openshift(openshift_client): authenticator = OpenShiftOAuthenticator() handler = openshift_client.handler_for_user(user_model('wash')) user_info = await authenticator.authenticate(handler) assert sorted(user_info) == ['auth_state', 'name'] name = user_info['name'] assert name == 'wash' auth_state = user_info['auth_state'] assert 'access_token' in auth_state assert 'openshift_user' in auth_state
Update test harness to use new REST API path for OpenShift.
## Code Before: from pytest import fixture, mark from ..openshift import OpenShiftOAuthenticator from .mocks import setup_oauth_mock def user_model(username): """Return a user model""" return { 'metadata': { 'name': username, } } @fixture def openshift_client(client): setup_oauth_mock(client, host=['localhost'], access_token_path='/oauth/token', user_path='/oapi/v1/users/~', ) return client async def test_openshift(openshift_client): authenticator = OpenShiftOAuthenticator() handler = openshift_client.handler_for_user(user_model('wash')) user_info = await authenticator.authenticate(handler) assert sorted(user_info) == ['auth_state', 'name'] name = user_info['name'] assert name == 'wash' auth_state = user_info['auth_state'] assert 'access_token' in auth_state assert 'openshift_user' in auth_state ## Instruction: Update test harness to use new REST API path for OpenShift. ## Code After: from pytest import fixture, mark from ..openshift import OpenShiftOAuthenticator from .mocks import setup_oauth_mock def user_model(username): """Return a user model""" return { 'metadata': { 'name': username, } } @fixture def openshift_client(client): setup_oauth_mock(client, host=['localhost'], access_token_path='/oauth/token', user_path='/apis/user.openshift.io/v1/users/~', ) return client async def test_openshift(openshift_client): authenticator = OpenShiftOAuthenticator() handler = openshift_client.handler_for_user(user_model('wash')) user_info = await authenticator.authenticate(handler) assert sorted(user_info) == ['auth_state', 'name'] name = user_info['name'] assert name == 'wash' auth_state = user_info['auth_state'] assert 'access_token' in auth_state assert 'openshift_user' in auth_state
... access_token_path='/oauth/token', user_path='/apis/user.openshift.io/v1/users/~', ) ...
b94edbbb717313cc831fa97d3ccf9ab715ff3ade
testing/test_cffitsio.py
testing/test_cffitsio.py
from cffitsio import FitsFile import os def test_create_file(tmpdir): filename = str(tmpdir.join('test.fits')) f = FitsFile.create(filename) assert os.path.isfile(filename)
import pytest import cffitsio import os @pytest.fixture def test_dir(): return os.path.join( os.path.dirname(__file__), 'data') def test_create_file(tmpdir): filename = str(tmpdir.join('test.fits')) f = cffitsio.FitsFile.create(filename) assert os.path.isfile(filename) def test_open_file(test_dir): filename = os.path.join(test_dir, 'all.fits') with cffitsio.open_fits(filename) as infile: assert isinstance(infile, cffitsio.FitsFile)
Add test for opening file
Add test for opening file
Python
mit
mindriot101/fitsio-cffi
- from cffitsio import FitsFile + import pytest + import cffitsio import os + + + @pytest.fixture + def test_dir(): + return os.path.join( + os.path.dirname(__file__), + 'data') def test_create_file(tmpdir): filename = str(tmpdir.join('test.fits')) - f = FitsFile.create(filename) + f = cffitsio.FitsFile.create(filename) assert os.path.isfile(filename) + + def test_open_file(test_dir): + filename = os.path.join(test_dir, 'all.fits') + with cffitsio.open_fits(filename) as infile: + assert isinstance(infile, cffitsio.FitsFile) +
Add test for opening file
## Code Before: from cffitsio import FitsFile import os def test_create_file(tmpdir): filename = str(tmpdir.join('test.fits')) f = FitsFile.create(filename) assert os.path.isfile(filename) ## Instruction: Add test for opening file ## Code After: import pytest import cffitsio import os @pytest.fixture def test_dir(): return os.path.join( os.path.dirname(__file__), 'data') def test_create_file(tmpdir): filename = str(tmpdir.join('test.fits')) f = cffitsio.FitsFile.create(filename) assert os.path.isfile(filename) def test_open_file(test_dir): filename = os.path.join(test_dir, 'all.fits') with cffitsio.open_fits(filename) as infile: assert isinstance(infile, cffitsio.FitsFile)
// ... existing code ... import pytest import cffitsio import os @pytest.fixture def test_dir(): return os.path.join( os.path.dirname(__file__), 'data') // ... modified code ... filename = str(tmpdir.join('test.fits')) f = cffitsio.FitsFile.create(filename) assert os.path.isfile(filename) def test_open_file(test_dir): filename = os.path.join(test_dir, 'all.fits') with cffitsio.open_fits(filename) as infile: assert isinstance(infile, cffitsio.FitsFile) // ... rest of the code ...
5c3900e12216164712c9e7fe7ea064e70fae8d1b
enumfields/enums.py
enumfields/enums.py
import inspect from django.utils.encoding import force_bytes, python_2_unicode_compatible from enum import Enum as BaseEnum, EnumMeta as BaseEnumMeta import six class EnumMeta(BaseEnumMeta): def __new__(cls, name, bases, attrs): Labels = attrs.get('Labels') if Labels is not None and inspect.isclass(Labels): del attrs['Labels'] obj = BaseEnumMeta.__new__(cls, name, bases, attrs) for m in obj: try: m.label = getattr(Labels, m.name) except AttributeError: m.label = m.name.replace('_', ' ').title() return obj @python_2_unicode_compatible class Enum(six.with_metaclass(EnumMeta, BaseEnum)): @classmethod def choices(cls): """ Returns a list formatted for use as field choices. (See https://docs.djangoproject.com/en/dev/ref/models/fields/#choices) """ return tuple((m.value, m.label) for m in cls) def __str__(self): """ Show our label when Django uses the Enum for displaying in a view """ return force_bytes(self.label)
import inspect from django.utils.encoding import force_bytes, python_2_unicode_compatible from enum import Enum as BaseEnum, EnumMeta as BaseEnumMeta import six class EnumMeta(BaseEnumMeta): def __new__(cls, name, bases, attrs): Labels = attrs.get('Labels') if Labels is not None and inspect.isclass(Labels): del attrs['Labels'] if hasattr(attrs, '_member_names'): attrs._member_names.remove('Labels') obj = BaseEnumMeta.__new__(cls, name, bases, attrs) for m in obj: try: m.label = getattr(Labels, m.name) except AttributeError: m.label = m.name.replace('_', ' ').title() return obj @python_2_unicode_compatible class Enum(six.with_metaclass(EnumMeta, BaseEnum)): @classmethod def choices(cls): """ Returns a list formatted for use as field choices. (See https://docs.djangoproject.com/en/dev/ref/models/fields/#choices) """ return tuple((m.value, m.label) for m in cls) def __str__(self): """ Show our label when Django uses the Enum for displaying in a view """ return force_bytes(self.label)
Fix 'Labels' class in Python 3.
Fix 'Labels' class in Python 3. In Python 3, the attrs dict will already be an _EnumDict, which has a separate list of member names (in Python 2, it is still a plain dict at this point).
Python
mit
suutari-ai/django-enumfields,jackyyf/django-enumfields,bxm156/django-enumfields,jessamynsmith/django-enumfields
import inspect from django.utils.encoding import force_bytes, python_2_unicode_compatible from enum import Enum as BaseEnum, EnumMeta as BaseEnumMeta import six class EnumMeta(BaseEnumMeta): def __new__(cls, name, bases, attrs): Labels = attrs.get('Labels') if Labels is not None and inspect.isclass(Labels): del attrs['Labels'] + if hasattr(attrs, '_member_names'): + attrs._member_names.remove('Labels') obj = BaseEnumMeta.__new__(cls, name, bases, attrs) for m in obj: try: m.label = getattr(Labels, m.name) except AttributeError: m.label = m.name.replace('_', ' ').title() return obj @python_2_unicode_compatible class Enum(six.with_metaclass(EnumMeta, BaseEnum)): @classmethod def choices(cls): """ Returns a list formatted for use as field choices. (See https://docs.djangoproject.com/en/dev/ref/models/fields/#choices) """ return tuple((m.value, m.label) for m in cls) def __str__(self): """ Show our label when Django uses the Enum for displaying in a view """ return force_bytes(self.label)
Fix 'Labels' class in Python 3.
## Code Before: import inspect from django.utils.encoding import force_bytes, python_2_unicode_compatible from enum import Enum as BaseEnum, EnumMeta as BaseEnumMeta import six class EnumMeta(BaseEnumMeta): def __new__(cls, name, bases, attrs): Labels = attrs.get('Labels') if Labels is not None and inspect.isclass(Labels): del attrs['Labels'] obj = BaseEnumMeta.__new__(cls, name, bases, attrs) for m in obj: try: m.label = getattr(Labels, m.name) except AttributeError: m.label = m.name.replace('_', ' ').title() return obj @python_2_unicode_compatible class Enum(six.with_metaclass(EnumMeta, BaseEnum)): @classmethod def choices(cls): """ Returns a list formatted for use as field choices. (See https://docs.djangoproject.com/en/dev/ref/models/fields/#choices) """ return tuple((m.value, m.label) for m in cls) def __str__(self): """ Show our label when Django uses the Enum for displaying in a view """ return force_bytes(self.label) ## Instruction: Fix 'Labels' class in Python 3. ## Code After: import inspect from django.utils.encoding import force_bytes, python_2_unicode_compatible from enum import Enum as BaseEnum, EnumMeta as BaseEnumMeta import six class EnumMeta(BaseEnumMeta): def __new__(cls, name, bases, attrs): Labels = attrs.get('Labels') if Labels is not None and inspect.isclass(Labels): del attrs['Labels'] if hasattr(attrs, '_member_names'): attrs._member_names.remove('Labels') obj = BaseEnumMeta.__new__(cls, name, bases, attrs) for m in obj: try: m.label = getattr(Labels, m.name) except AttributeError: m.label = m.name.replace('_', ' ').title() return obj @python_2_unicode_compatible class Enum(six.with_metaclass(EnumMeta, BaseEnum)): @classmethod def choices(cls): """ Returns a list formatted for use as field choices. (See https://docs.djangoproject.com/en/dev/ref/models/fields/#choices) """ return tuple((m.value, m.label) for m in cls) def __str__(self): """ Show our label when Django uses the Enum for displaying in a view """ return force_bytes(self.label)
# ... existing code ... del attrs['Labels'] if hasattr(attrs, '_member_names'): attrs._member_names.remove('Labels') # ... rest of the code ...
350e8bdcb9c6f3eace7839e5dc7270bfeb51e50f
tests/grafana_dashboards/test_config.py
tests/grafana_dashboards/test_config.py
import os from grafana_dashboards.config import Config __author__ = 'Jakub Plichta <[email protected]>' def test_dict(): config_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'config.yaml') config = Config(config_file) assert config.get_config('context') == {'component': 'frontend'} assert config.get_config('unknown') == {}
import os from grafana_dashboards.config import Config __author__ = 'Jakub Plichta <[email protected]>' def test_existent_config_file(): config_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'config.yaml') config = Config(config_file) assert config.get_config('context') == {'component': 'frontend'} assert config.get_config('unknown') == {} def test_nonexistent_config_file(): config_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'no_file.yaml') config = Config(config_file) assert config.get_config('context') == {} assert config.get_config('unknown') == {}
Add more tests for Config
Add more tests for Config
Python
apache-2.0
jakubplichta/grafana-dashboard-builder
import os from grafana_dashboards.config import Config __author__ = 'Jakub Plichta <[email protected]>' - def test_dict(): + def test_existent_config_file(): config_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'config.yaml') config = Config(config_file) assert config.get_config('context') == {'component': 'frontend'} assert config.get_config('unknown') == {} + + def test_nonexistent_config_file(): + config_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'no_file.yaml') + config = Config(config_file) + + assert config.get_config('context') == {} + assert config.get_config('unknown') == {} +
Add more tests for Config
## Code Before: import os from grafana_dashboards.config import Config __author__ = 'Jakub Plichta <[email protected]>' def test_dict(): config_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'config.yaml') config = Config(config_file) assert config.get_config('context') == {'component': 'frontend'} assert config.get_config('unknown') == {} ## Instruction: Add more tests for Config ## Code After: import os from grafana_dashboards.config import Config __author__ = 'Jakub Plichta <[email protected]>' def test_existent_config_file(): config_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'config.yaml') config = Config(config_file) assert config.get_config('context') == {'component': 'frontend'} assert config.get_config('unknown') == {} def test_nonexistent_config_file(): config_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'no_file.yaml') config = Config(config_file) assert config.get_config('context') == {} assert config.get_config('unknown') == {}
// ... existing code ... def test_existent_config_file(): config_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'config.yaml') // ... modified code ... assert config.get_config('unknown') == {} def test_nonexistent_config_file(): config_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'no_file.yaml') config = Config(config_file) assert config.get_config('context') == {} assert config.get_config('unknown') == {} // ... rest of the code ...
e6d3d60265db1947b8af2d1c59c575c632ddc20b
linter.py
linter.py
"""This module exports the Stylelint plugin class.""" import os from SublimeLinter.lint import Linter, util class Stylelint(Linter): """Provides an interface to stylelint.""" syntax = ('css', 'css3', 'sass', 'scss', 'postcss') cmd = ('node', os.path.dirname(os.path.realpath(__file__)) + '/stylelint_wrapper.js', '@') error_stream = util.STREAM_BOTH config_file = ('--config', '.stylelintrc', '~') tempfile_suffix = 'css' regex = ( r'^\s*(?P<line>[0-9]+)\:(?P<col>[0-9]+)\s*(?P<message>.+)' )
"""This module exports the Stylelint plugin class.""" import os from SublimeLinter.lint import Linter, util class Stylelint(Linter): """Provides an interface to stylelint.""" syntax = ('css', 'css3', 'sass', 'scss', 'postcss') cmd = ('node', os.path.dirname(os.path.realpath(__file__)) + '/stylelint_wrapper.js', '@') error_stream = util.STREAM_BOTH config_file = ('--config', '.stylelintrc', '~') tempfile_suffix = 'css' regex = ( r'^\s*(?P<line>[0-9]+)\:(?P<col>[0-9]+)\s*(?:(?P<error>✖)|(?P<warning>⚠))\s*(?P<message>.+)' )
Add support for handling errors and warnings
Add support for handling errors and warnings
Python
mit
lzwme/SublimeLinter-contrib-stylelint,lzwme/SublimeLinter-contrib-stylelint,kungfusheep/SublimeLinter-contrib-stylelint
"""This module exports the Stylelint plugin class.""" import os from SublimeLinter.lint import Linter, util class Stylelint(Linter): """Provides an interface to stylelint.""" syntax = ('css', 'css3', 'sass', 'scss', 'postcss') cmd = ('node', os.path.dirname(os.path.realpath(__file__)) + '/stylelint_wrapper.js', '@') error_stream = util.STREAM_BOTH config_file = ('--config', '.stylelintrc', '~') tempfile_suffix = 'css' regex = ( - r'^\s*(?P<line>[0-9]+)\:(?P<col>[0-9]+)\s*(?P<message>.+)' + r'^\s*(?P<line>[0-9]+)\:(?P<col>[0-9]+)\s*(?:(?P<error>✖)|(?P<warning>⚠))\s*(?P<message>.+)' )
Add support for handling errors and warnings
## Code Before: """This module exports the Stylelint plugin class.""" import os from SublimeLinter.lint import Linter, util class Stylelint(Linter): """Provides an interface to stylelint.""" syntax = ('css', 'css3', 'sass', 'scss', 'postcss') cmd = ('node', os.path.dirname(os.path.realpath(__file__)) + '/stylelint_wrapper.js', '@') error_stream = util.STREAM_BOTH config_file = ('--config', '.stylelintrc', '~') tempfile_suffix = 'css' regex = ( r'^\s*(?P<line>[0-9]+)\:(?P<col>[0-9]+)\s*(?P<message>.+)' ) ## Instruction: Add support for handling errors and warnings ## Code After: """This module exports the Stylelint plugin class.""" import os from SublimeLinter.lint import Linter, util class Stylelint(Linter): """Provides an interface to stylelint.""" syntax = ('css', 'css3', 'sass', 'scss', 'postcss') cmd = ('node', os.path.dirname(os.path.realpath(__file__)) + '/stylelint_wrapper.js', '@') error_stream = util.STREAM_BOTH config_file = ('--config', '.stylelintrc', '~') tempfile_suffix = 'css' regex = ( r'^\s*(?P<line>[0-9]+)\:(?P<col>[0-9]+)\s*(?:(?P<error>✖)|(?P<warning>⚠))\s*(?P<message>.+)' )
# ... existing code ... regex = ( r'^\s*(?P<line>[0-9]+)\:(?P<col>[0-9]+)\s*(?:(?P<error>✖)|(?P<warning>⚠))\s*(?P<message>.+)' ) # ... rest of the code ...
c98ab8807440e3cdbb98e11c53c7f246c35614fe
dedupe/convenience.py
dedupe/convenience.py
import collections import dedupe.core def dataSample(data, sample_size): '''Randomly sample pairs of records from a data dictionary''' random_pairs = dedupe.core.randomPairs(len(data), sample_size) return tuple((data[k1], data[k2]) for k1, k2 in random_pairs) def blockData(data_d, blocker): blocks = dedupe.core.OrderedDict({}) record_blocks = dedupe.core.OrderedDict({}) key_blocks = dedupe.core.OrderedDict({}) blocker.tfIdfBlocks(data_d.iteritems()) for (record_id, record) in data_d.iteritems(): for key in blocker((record_id, record)): blocks.setdefault(key, {}).update({record_id : record}) blocked_records = tuple(block for block in blocks.values()) return blocked_records
import collections import dedupe.core def dataSample(data, sample_size): '''Randomly sample pairs of records from a data dictionary''' data_list = data.values() random_pairs = dedupe.core.randomPairs(len(data_list), sample_size) return tuple((data_list[k1], data_list[k2]) for k1, k2 in random_pairs) def blockData(data_d, blocker): blocks = dedupe.core.OrderedDict({}) record_blocks = dedupe.core.OrderedDict({}) key_blocks = dedupe.core.OrderedDict({}) blocker.tfIdfBlocks(data_d.iteritems()) for (record_id, record) in data_d.iteritems(): for key in blocker((record_id, record)): blocks.setdefault(key, {}).update({record_id : record}) blocked_records = tuple(block for block in blocks.values()) return blocked_records
Change dataSample to generate indices of random pair using list of values
Change dataSample to generate indices of random pair using list of values
Python
mit
nmiranda/dedupe,01-/dedupe,neozhangthe1/dedupe,neozhangthe1/dedupe,nmiranda/dedupe,davidkunio/dedupe,dedupeio/dedupe,dedupeio/dedupe-examples,datamade/dedupe,tfmorris/dedupe,tfmorris/dedupe,davidkunio/dedupe,01-/dedupe,datamade/dedupe,pombredanne/dedupe,dedupeio/dedupe,pombredanne/dedupe
import collections import dedupe.core def dataSample(data, sample_size): '''Randomly sample pairs of records from a data dictionary''' + data_list = data.values() + random_pairs = dedupe.core.randomPairs(len(data_list), sample_size) - random_pairs = dedupe.core.randomPairs(len(data), sample_size) - - return tuple((data[k1], data[k2]) for k1, k2 in random_pairs) + return tuple((data_list[k1], data_list[k2]) for k1, k2 in random_pairs) def blockData(data_d, blocker): blocks = dedupe.core.OrderedDict({}) record_blocks = dedupe.core.OrderedDict({}) key_blocks = dedupe.core.OrderedDict({}) blocker.tfIdfBlocks(data_d.iteritems()) for (record_id, record) in data_d.iteritems(): for key in blocker((record_id, record)): blocks.setdefault(key, {}).update({record_id : record}) blocked_records = tuple(block for block in blocks.values()) return blocked_records
Change dataSample to generate indices of random pair using list of values
## Code Before: import collections import dedupe.core def dataSample(data, sample_size): '''Randomly sample pairs of records from a data dictionary''' random_pairs = dedupe.core.randomPairs(len(data), sample_size) return tuple((data[k1], data[k2]) for k1, k2 in random_pairs) def blockData(data_d, blocker): blocks = dedupe.core.OrderedDict({}) record_blocks = dedupe.core.OrderedDict({}) key_blocks = dedupe.core.OrderedDict({}) blocker.tfIdfBlocks(data_d.iteritems()) for (record_id, record) in data_d.iteritems(): for key in blocker((record_id, record)): blocks.setdefault(key, {}).update({record_id : record}) blocked_records = tuple(block for block in blocks.values()) return blocked_records ## Instruction: Change dataSample to generate indices of random pair using list of values ## Code After: import collections import dedupe.core def dataSample(data, sample_size): '''Randomly sample pairs of records from a data dictionary''' data_list = data.values() random_pairs = dedupe.core.randomPairs(len(data_list), sample_size) return tuple((data_list[k1], data_list[k2]) for k1, k2 in random_pairs) def blockData(data_d, blocker): blocks = dedupe.core.OrderedDict({}) record_blocks = dedupe.core.OrderedDict({}) key_blocks = dedupe.core.OrderedDict({}) blocker.tfIdfBlocks(data_d.iteritems()) for (record_id, record) in data_d.iteritems(): for key in blocker((record_id, record)): blocks.setdefault(key, {}).update({record_id : record}) blocked_records = tuple(block for block in blocks.values()) return blocked_records
# ... existing code ... data_list = data.values() random_pairs = dedupe.core.randomPairs(len(data_list), sample_size) return tuple((data_list[k1], data_list[k2]) for k1, k2 in random_pairs) # ... rest of the code ...
2cf7f70e352f8427cfb7d1dba309ee7d7e0ce5f4
markitup/urls.py
markitup/urls.py
from __future__ import unicode_literals from django.conf.urls import patterns, url from markitup.views import apply_filter urlpatterns = patterns( '', url(r'preview/$', apply_filter, name='markitup_preview') )
from __future__ import unicode_literals from django.conf.urls import url from markitup.views import apply_filter urlpatterns = [ url(r'preview/$', apply_filter, name='markitup_preview'), ]
Use plain Python list for urlpatterns.
Use plain Python list for urlpatterns.
Python
bsd-3-clause
zsiciarz/django-markitup,zsiciarz/django-markitup,carljm/django-markitup,carljm/django-markitup,carljm/django-markitup,zsiciarz/django-markitup
from __future__ import unicode_literals - from django.conf.urls import patterns, url + from django.conf.urls import url from markitup.views import apply_filter + urlpatterns = [ - urlpatterns = patterns( - '', - url(r'preview/$', apply_filter, name='markitup_preview') + url(r'preview/$', apply_filter, name='markitup_preview'), - ) + ]
Use plain Python list for urlpatterns.
## Code Before: from __future__ import unicode_literals from django.conf.urls import patterns, url from markitup.views import apply_filter urlpatterns = patterns( '', url(r'preview/$', apply_filter, name='markitup_preview') ) ## Instruction: Use plain Python list for urlpatterns. ## Code After: from __future__ import unicode_literals from django.conf.urls import url from markitup.views import apply_filter urlpatterns = [ url(r'preview/$', apply_filter, name='markitup_preview'), ]
// ... existing code ... from django.conf.urls import url // ... modified code ... urlpatterns = [ url(r'preview/$', apply_filter, name='markitup_preview'), ] // ... rest of the code ...
2f02960607b75e74a757ded1e2472a5fb8585d4f
tests/pyb/extint.py
tests/pyb/extint.py
import pyb ext = pyb.ExtInt('X1', pyb.ExtInt.IRQ_RISING, pyb.Pin.PULL_DOWN, lambda l:print('line:', l)) ext.disable() ext.enable() print(ext.line()) ext.swint() ext.disable()
import pyb # test basic functionality ext = pyb.ExtInt('X1', pyb.ExtInt.IRQ_RISING, pyb.Pin.PULL_DOWN, lambda l:print('line:', l)) ext.disable() ext.enable() print(ext.line()) ext.swint() # test swint while disabled, then again after re-enabled ext.disable() ext.swint() ext.enable() ext.swint() # disable now that the test is finished ext.disable()
Add test for ExtInt when doing swint while disabled.
tests/pyb: Add test for ExtInt when doing swint while disabled.
Python
mit
infinnovation/micropython,adafruit/circuitpython,turbinenreiter/micropython,dxxb/micropython,pfalcon/micropython,infinnovation/micropython,Timmenem/micropython,oopy/micropython,mhoffma/micropython,pfalcon/micropython,selste/micropython,adafruit/circuitpython,Peetz0r/micropython-esp32,oopy/micropython,torwag/micropython,tobbad/micropython,tralamazza/micropython,selste/micropython,HenrikSolver/micropython,AriZuu/micropython,puuu/micropython,alex-march/micropython,lowRISC/micropython,deshipu/micropython,TDAbboud/micropython,chrisdearman/micropython,jmarcelino/pycom-micropython,SHA2017-badge/micropython-esp32,puuu/micropython,trezor/micropython,turbinenreiter/micropython,HenrikSolver/micropython,toolmacher/micropython,pozetroninc/micropython,selste/micropython,pozetroninc/micropython,tobbad/micropython,MrSurly/micropython,cwyark/micropython,blazewicz/micropython,MrSurly/micropython,kerneltask/micropython,alex-robbins/micropython,ryannathans/micropython,mhoffma/micropython,toolmacher/micropython,bvernoux/micropython,lowRISC/micropython,blazewicz/micropython,matthewelse/micropython,kerneltask/micropython,PappaPeppar/micropython,AriZuu/micropython,MrSurly/micropython-esp32,pramasoul/micropython,adafruit/micropython,Peetz0r/micropython-esp32,bvernoux/micropython,henriknelson/micropython,swegener/micropython,kerneltask/micropython,alex-march/micropython,infinnovation/micropython,matthewelse/micropython,pramasoul/micropython,hiway/micropython,MrSurly/micropython-esp32,alex-robbins/micropython,Peetz0r/micropython-esp32,PappaPeppar/micropython,MrSurly/micropython-esp32,jmarcelino/pycom-micropython,HenrikSolver/micropython,tuc-osg/micropython,alex-robbins/micropython,HenrikSolver/micropython,pramasoul/micropython,TDAbboud/micropython,puuu/micropython,blazewicz/micropython,AriZuu/micropython,deshipu/micropython,lowRISC/micropython,chrisdearman/micropython,hiway/micropython,hiway/micropython,pfalcon/micropython,puuu/micropython,trezor/micropython,bvernoux/micropython,adafruit/micropython,infinnovation/micropython,Timmenem/micropython,deshipu/micropython,ryannathans/micropython,micropython/micropython-esp32,MrSurly/micropython-esp32,matthewelse/micropython,MrSurly/micropython,matthewelse/micropython,tralamazza/micropython,swegener/micropython,oopy/micropython,pozetroninc/micropython,PappaPeppar/micropython,AriZuu/micropython,pramasoul/micropython,micropython/micropython-esp32,Timmenem/micropython,tuc-osg/micropython,puuu/micropython,henriknelson/micropython,deshipu/micropython,ryannathans/micropython,Timmenem/micropython,oopy/micropython,cwyark/micropython,dxxb/micropython,dmazzella/micropython,mhoffma/micropython,toolmacher/micropython,hosaka/micropython,tobbad/micropython,mhoffma/micropython,MrSurly/micropython,hosaka/micropython,micropython/micropython-esp32,bvernoux/micropython,chrisdearman/micropython,toolmacher/micropython,TDAbboud/micropython,Peetz0r/micropython-esp32,PappaPeppar/micropython,swegener/micropython,chrisdearman/micropython,tuc-osg/micropython,turbinenreiter/micropython,dmazzella/micropython,hosaka/micropython,bvernoux/micropython,Timmenem/micropython,tobbad/micropython,hiway/micropython,cwyark/micropython,MrSurly/micropython-esp32,tobbad/micropython,adafruit/micropython,blazewicz/micropython,henriknelson/micropython,dmazzella/micropython,kerneltask/micropython,pozetroninc/micropython,ryannathans/micropython,alex-march/micropython,alex-march/micropython,micropython/micropython-esp32,turbinenreiter/micropython,SHA2017-badge/micropython-esp32,torwag/micropython,PappaPeppar/micropython,Peetz0r/micropython-esp32,blazewicz/micropython,AriZuu/micropython,TDAbboud/micropython,hosaka/micropython,SHA2017-badge/micropython-esp32,cwyark/micropython,cwyark/micropython,tralamazza/micropython,alex-robbins/micropython,deshipu/micropython,alex-robbins/micropython,torwag/micropython,turbinenreiter/micropython,SHA2017-badge/micropython-esp32,swegener/micropython,MrSurly/micropython,alex-march/micropython,trezor/micropython,torwag/micropython,pfalcon/micropython,henriknelson/micropython,jmarcelino/pycom-micropython,adafruit/micropython,dxxb/micropython,pramasoul/micropython,trezor/micropython,dmazzella/micropython,adafruit/circuitpython,lowRISC/micropython,dxxb/micropython,hiway/micropython,pfalcon/micropython,swegener/micropython,infinnovation/micropython,adafruit/circuitpython,HenrikSolver/micropython,matthewelse/micropython,adafruit/circuitpython,henriknelson/micropython,SHA2017-badge/micropython-esp32,oopy/micropython,jmarcelino/pycom-micropython,adafruit/micropython,dxxb/micropython,chrisdearman/micropython,trezor/micropython,lowRISC/micropython,jmarcelino/pycom-micropython,hosaka/micropython,toolmacher/micropython,adafruit/circuitpython,tuc-osg/micropython,pozetroninc/micropython,matthewelse/micropython,torwag/micropython,selste/micropython,selste/micropython,tuc-osg/micropython,kerneltask/micropython,micropython/micropython-esp32,tralamazza/micropython,mhoffma/micropython,TDAbboud/micropython,ryannathans/micropython
import pyb + # test basic functionality ext = pyb.ExtInt('X1', pyb.ExtInt.IRQ_RISING, pyb.Pin.PULL_DOWN, lambda l:print('line:', l)) ext.disable() ext.enable() print(ext.line()) ext.swint() + + # test swint while disabled, then again after re-enabled + ext.disable() + ext.swint() + ext.enable() + ext.swint() + + # disable now that the test is finished ext.disable()
Add test for ExtInt when doing swint while disabled.
## Code Before: import pyb ext = pyb.ExtInt('X1', pyb.ExtInt.IRQ_RISING, pyb.Pin.PULL_DOWN, lambda l:print('line:', l)) ext.disable() ext.enable() print(ext.line()) ext.swint() ext.disable() ## Instruction: Add test for ExtInt when doing swint while disabled. ## Code After: import pyb # test basic functionality ext = pyb.ExtInt('X1', pyb.ExtInt.IRQ_RISING, pyb.Pin.PULL_DOWN, lambda l:print('line:', l)) ext.disable() ext.enable() print(ext.line()) ext.swint() # test swint while disabled, then again after re-enabled ext.disable() ext.swint() ext.enable() ext.swint() # disable now that the test is finished ext.disable()
# ... existing code ... # test basic functionality ext = pyb.ExtInt('X1', pyb.ExtInt.IRQ_RISING, pyb.Pin.PULL_DOWN, lambda l:print('line:', l)) # ... modified code ... ext.swint() # test swint while disabled, then again after re-enabled ext.disable() ext.swint() ext.enable() ext.swint() # disable now that the test is finished ext.disable() # ... rest of the code ...
f2ab04ec2eb870e661223fd397d7c5a23935a233
src/apps/employees/schema/types.py
src/apps/employees/schema/types.py
import graphene from graphene_django.types import DjangoObjectType, ObjectType from graphene_django_extras import ( DjangoFilterPaginateListField, LimitOffsetGraphqlPagination ) from apps.employees import models class EmployeeType(DjangoObjectType): class Meta: model = models.Employee filter_fields = { 'first_name': ['icontains', 'istartswith'], 'last_name': ['icontains', 'istartswith'], 'position': ['exact'], 'id': ['exact'] } interfaces = (graphene.relay.Node,) class PositionType(DjangoObjectType): """ Position graphQL type. Implemented total_employees and employees objects. """ employees = DjangoFilterPaginateListField( EmployeeType, pagination=LimitOffsetGraphqlPagination() ) total_employees = graphene.Int() def resolve_total_employees(self, info): return self.employees.count() def resolve_employees(self, info): return self.employees.all() class Meta: model = models.Position filter_fields = { 'name': ['exact', 'icontains', 'istartswith'], 'id': ['exact'] } interfaces = (graphene.relay.Node,) class SpecializationType(DjangoObjectType): class Meta: model = models.Specialization filter_fields = { 'name': ['exact', 'icontains', 'istartswith'], 'id': ['exact'], } interfaces = (graphene.relay.Node,)
import graphene from graphene_django.types import DjangoObjectType, ObjectType from graphene_django_extras import ( DjangoFilterPaginateListField, LimitOffsetGraphqlPagination ) from apps.employees import models class EmployeeType(DjangoObjectType): class Meta: model = models.Employee filter_fields = { 'first_name': ['icontains', 'istartswith'], 'last_name': ['icontains', 'istartswith'], 'position': ['exact'], 'id': ['exact'] } class PositionType(DjangoObjectType): """ Position graphQL type. Implemented total_employees and employees objects. """ employees = DjangoFilterPaginateListField( EmployeeType, pagination=LimitOffsetGraphqlPagination() ) total_employees = graphene.Int() def resolve_total_employees(self, info): return self.employees.count() def resolve_employees(self, info): return self.employees.all() class Meta: model = models.Position filter_fields = { 'name': ['exact', 'icontains', 'istartswith'], 'id': ['exact'] } class SpecializationType(DjangoObjectType): class Meta: model = models.Specialization filter_fields = { 'name': ['exact', 'icontains', 'istartswith'], 'id': ['exact'], }
Remove Node interfaces (use origin id for objects)
Remove Node interfaces (use origin id for objects)
Python
mit
wis-software/office-manager
import graphene from graphene_django.types import DjangoObjectType, ObjectType from graphene_django_extras import ( DjangoFilterPaginateListField, LimitOffsetGraphqlPagination ) from apps.employees import models class EmployeeType(DjangoObjectType): class Meta: model = models.Employee filter_fields = { 'first_name': ['icontains', 'istartswith'], 'last_name': ['icontains', 'istartswith'], 'position': ['exact'], 'id': ['exact'] } - interfaces = (graphene.relay.Node,) class PositionType(DjangoObjectType): """ Position graphQL type. Implemented total_employees and employees objects. """ employees = DjangoFilterPaginateListField( EmployeeType, pagination=LimitOffsetGraphqlPagination() ) total_employees = graphene.Int() def resolve_total_employees(self, info): return self.employees.count() def resolve_employees(self, info): return self.employees.all() class Meta: model = models.Position filter_fields = { 'name': ['exact', 'icontains', 'istartswith'], 'id': ['exact'] } - interfaces = (graphene.relay.Node,) class SpecializationType(DjangoObjectType): class Meta: model = models.Specialization filter_fields = { 'name': ['exact', 'icontains', 'istartswith'], 'id': ['exact'], } - interfaces = (graphene.relay.Node,)
Remove Node interfaces (use origin id for objects)
## Code Before: import graphene from graphene_django.types import DjangoObjectType, ObjectType from graphene_django_extras import ( DjangoFilterPaginateListField, LimitOffsetGraphqlPagination ) from apps.employees import models class EmployeeType(DjangoObjectType): class Meta: model = models.Employee filter_fields = { 'first_name': ['icontains', 'istartswith'], 'last_name': ['icontains', 'istartswith'], 'position': ['exact'], 'id': ['exact'] } interfaces = (graphene.relay.Node,) class PositionType(DjangoObjectType): """ Position graphQL type. Implemented total_employees and employees objects. """ employees = DjangoFilterPaginateListField( EmployeeType, pagination=LimitOffsetGraphqlPagination() ) total_employees = graphene.Int() def resolve_total_employees(self, info): return self.employees.count() def resolve_employees(self, info): return self.employees.all() class Meta: model = models.Position filter_fields = { 'name': ['exact', 'icontains', 'istartswith'], 'id': ['exact'] } interfaces = (graphene.relay.Node,) class SpecializationType(DjangoObjectType): class Meta: model = models.Specialization filter_fields = { 'name': ['exact', 'icontains', 'istartswith'], 'id': ['exact'], } interfaces = (graphene.relay.Node,) ## Instruction: Remove Node interfaces (use origin id for objects) ## Code After: import graphene from graphene_django.types import DjangoObjectType, ObjectType from graphene_django_extras import ( DjangoFilterPaginateListField, LimitOffsetGraphqlPagination ) from apps.employees import models class EmployeeType(DjangoObjectType): class Meta: model = models.Employee filter_fields = { 'first_name': ['icontains', 'istartswith'], 'last_name': ['icontains', 'istartswith'], 'position': ['exact'], 'id': ['exact'] } class PositionType(DjangoObjectType): """ Position graphQL type. Implemented total_employees and employees objects. """ employees = DjangoFilterPaginateListField( EmployeeType, pagination=LimitOffsetGraphqlPagination() ) total_employees = graphene.Int() def resolve_total_employees(self, info): return self.employees.count() def resolve_employees(self, info): return self.employees.all() class Meta: model = models.Position filter_fields = { 'name': ['exact', 'icontains', 'istartswith'], 'id': ['exact'] } class SpecializationType(DjangoObjectType): class Meta: model = models.Specialization filter_fields = { 'name': ['exact', 'icontains', 'istartswith'], 'id': ['exact'], }
... } ... } ... } ...
53c39934e19fdad7926a8ad7833cd1737b47cf58
utilities/errors.py
utilities/errors.py
import os import simulators import numpy as np import json """Calculate Errors on the Spectrum. For a first go using an fixed SNR of 200 for all observations. """ def get_snrinfo(star, obs_num, chip): """Load SNR info from json file.""" snr_file = os.path.join(simulators.paths["spectra"], "detector_snrs.json") with open(snr_file, "r") as f: snr_data = json.load(f) try: return snr_data[str(star)][str(obs_num)][str(chip)] except KeyError as e: print("No snr data present for {0}-{1}_{2}".format(star, obs_num, chip)) raise e def spectrum_error(star, obs_num, chip, error_off=False): """Return the spectrum error. errors = None will perform a normal chi**2 statistic. """ if error_off: errors = None else: snr = get_snrinfo(star, obs_num, chip) if len(snr) == 1: errors = 1 / np.float(snr[0]) else: raise NotImplementedError("Haven't checked if an error array can be handled yet.") return errors
import os import simulators import numpy as np import json import warnings """Calculate Errors on the Spectrum. For a first go using an fixed SNR of 200 for all observations. """ def get_snrinfo(star, obs_num, chip): """Load SNR info from json file.""" snr_file = os.path.join(simulators.paths["spectra"], "detector_snrs.json") with open(snr_file, "r") as f: snr_data = json.load(f) try: return snr_data[str(star)][str(obs_num)][str(chip)] except KeyError as e: warnings.warn("No snr data present for {0}-{1}_{2}. " "Setting error to None instead".format(star, obs_num, chip)) return None def spectrum_error(star, obs_num, chip, error_off=False): """Return the spectrum error. errors = None will perform a normal chi**2 statistic. """ if error_off: errors = None else: snr = get_snrinfo(star, obs_num, chip) if snr is None: errors = None elif len(snr) == 1: errors = 1 / np.float(snr[0]) else: raise NotImplementedError("Haven't checked if an error array can be handled yet.") return errors
Handle no snr information in snr file. (for fake simualtions mainly)
Handle no snr information in snr file. (for fake simualtions mainly)
Python
mit
jason-neal/companion_simulations,jason-neal/companion_simulations
import os import simulators import numpy as np import json + import warnings """Calculate Errors on the Spectrum. For a first go using an fixed SNR of 200 for all observations. """ def get_snrinfo(star, obs_num, chip): """Load SNR info from json file.""" snr_file = os.path.join(simulators.paths["spectra"], "detector_snrs.json") with open(snr_file, "r") as f: snr_data = json.load(f) try: return snr_data[str(star)][str(obs_num)][str(chip)] except KeyError as e: - print("No snr data present for {0}-{1}_{2}".format(star, obs_num, chip)) - raise e + warnings.warn("No snr data present for {0}-{1}_{2}. " + "Setting error to None instead".format(star, obs_num, chip)) + return None def spectrum_error(star, obs_num, chip, error_off=False): """Return the spectrum error. errors = None will perform a normal chi**2 statistic. """ if error_off: errors = None else: snr = get_snrinfo(star, obs_num, chip) + if snr is None: + errors = None - if len(snr) == 1: + elif len(snr) == 1: errors = 1 / np.float(snr[0]) else: raise NotImplementedError("Haven't checked if an error array can be handled yet.") return errors
Handle no snr information in snr file. (for fake simualtions mainly)
## Code Before: import os import simulators import numpy as np import json """Calculate Errors on the Spectrum. For a first go using an fixed SNR of 200 for all observations. """ def get_snrinfo(star, obs_num, chip): """Load SNR info from json file.""" snr_file = os.path.join(simulators.paths["spectra"], "detector_snrs.json") with open(snr_file, "r") as f: snr_data = json.load(f) try: return snr_data[str(star)][str(obs_num)][str(chip)] except KeyError as e: print("No snr data present for {0}-{1}_{2}".format(star, obs_num, chip)) raise e def spectrum_error(star, obs_num, chip, error_off=False): """Return the spectrum error. errors = None will perform a normal chi**2 statistic. """ if error_off: errors = None else: snr = get_snrinfo(star, obs_num, chip) if len(snr) == 1: errors = 1 / np.float(snr[0]) else: raise NotImplementedError("Haven't checked if an error array can be handled yet.") return errors ## Instruction: Handle no snr information in snr file. (for fake simualtions mainly) ## Code After: import os import simulators import numpy as np import json import warnings """Calculate Errors on the Spectrum. For a first go using an fixed SNR of 200 for all observations. """ def get_snrinfo(star, obs_num, chip): """Load SNR info from json file.""" snr_file = os.path.join(simulators.paths["spectra"], "detector_snrs.json") with open(snr_file, "r") as f: snr_data = json.load(f) try: return snr_data[str(star)][str(obs_num)][str(chip)] except KeyError as e: warnings.warn("No snr data present for {0}-{1}_{2}. " "Setting error to None instead".format(star, obs_num, chip)) return None def spectrum_error(star, obs_num, chip, error_off=False): """Return the spectrum error. errors = None will perform a normal chi**2 statistic. """ if error_off: errors = None else: snr = get_snrinfo(star, obs_num, chip) if snr is None: errors = None elif len(snr) == 1: errors = 1 / np.float(snr[0]) else: raise NotImplementedError("Haven't checked if an error array can be handled yet.") return errors
# ... existing code ... import json import warnings # ... modified code ... except KeyError as e: warnings.warn("No snr data present for {0}-{1}_{2}. " "Setting error to None instead".format(star, obs_num, chip)) return None ... snr = get_snrinfo(star, obs_num, chip) if snr is None: errors = None elif len(snr) == 1: errors = 1 / np.float(snr[0]) # ... rest of the code ...
d8a861c47df6b41c27f2ec43474766284ba728af
bot/logger/message_sender/reusable/limiter/group.py
bot/logger/message_sender/reusable/limiter/group.py
from bot.logger.message_sender.message_builder import MessageBuilder from bot.logger.message_sender.reusable.limiter import ReusableMessageLimiter class ReusableMessageLimiterGroup(ReusableMessageLimiter): def __init__(self, *limiters: ReusableMessageLimiter): self.limiters = limiters def should_issue_new_message_pre_add(self, new_text): return self.__any_limiter(lambda limiter: limiter.should_issue_new_message_pre_add(new_text)) def should_issue_new_message_post_add(self, builder: MessageBuilder): return self.__any_limiter(lambda limiter: limiter.should_issue_new_message_post_add(builder)) def __any_limiter(self, func: callable): return any((func(limiter) for limiter in self.limiters)) def notify_new_message_issued(self): for limiter in self.limiters: limiter.notify_new_message_issued()
from bot.logger.message_sender.message_builder import MessageBuilder from bot.logger.message_sender.reusable.limiter import ReusableMessageLimiter class ReusableMessageLimiterGroup(ReusableMessageLimiter): def __init__(self, *limiters: ReusableMessageLimiter): self.limiters = limiters def should_issue_new_message_pre_add(self, new_text): return self.__any_limiter(lambda limiter: limiter.should_issue_new_message_pre_add(new_text)) def should_issue_new_message_post_add(self, builder: MessageBuilder): return self.__any_limiter(lambda limiter: limiter.should_issue_new_message_post_add(builder)) def __any_limiter(self, func: callable): return any((func(limiter) for limiter in self.limiters)) def notify_new_message_issued(self): for limiter in self.limiters: limiter.notify_new_message_issued() def notify_about_to_send_message(self): for limiter in self.limiters: limiter.notify_about_to_send_message()
Make ReusableMessageGroup broadcast the notify_about_to_send_message to all limiters
Make ReusableMessageGroup broadcast the notify_about_to_send_message to all limiters
Python
agpl-3.0
alvarogzp/telegram-bot,alvarogzp/telegram-bot
from bot.logger.message_sender.message_builder import MessageBuilder from bot.logger.message_sender.reusable.limiter import ReusableMessageLimiter class ReusableMessageLimiterGroup(ReusableMessageLimiter): def __init__(self, *limiters: ReusableMessageLimiter): self.limiters = limiters def should_issue_new_message_pre_add(self, new_text): return self.__any_limiter(lambda limiter: limiter.should_issue_new_message_pre_add(new_text)) def should_issue_new_message_post_add(self, builder: MessageBuilder): return self.__any_limiter(lambda limiter: limiter.should_issue_new_message_post_add(builder)) def __any_limiter(self, func: callable): return any((func(limiter) for limiter in self.limiters)) def notify_new_message_issued(self): for limiter in self.limiters: limiter.notify_new_message_issued() + def notify_about_to_send_message(self): + for limiter in self.limiters: + limiter.notify_about_to_send_message() +
Make ReusableMessageGroup broadcast the notify_about_to_send_message to all limiters
## Code Before: from bot.logger.message_sender.message_builder import MessageBuilder from bot.logger.message_sender.reusable.limiter import ReusableMessageLimiter class ReusableMessageLimiterGroup(ReusableMessageLimiter): def __init__(self, *limiters: ReusableMessageLimiter): self.limiters = limiters def should_issue_new_message_pre_add(self, new_text): return self.__any_limiter(lambda limiter: limiter.should_issue_new_message_pre_add(new_text)) def should_issue_new_message_post_add(self, builder: MessageBuilder): return self.__any_limiter(lambda limiter: limiter.should_issue_new_message_post_add(builder)) def __any_limiter(self, func: callable): return any((func(limiter) for limiter in self.limiters)) def notify_new_message_issued(self): for limiter in self.limiters: limiter.notify_new_message_issued() ## Instruction: Make ReusableMessageGroup broadcast the notify_about_to_send_message to all limiters ## Code After: from bot.logger.message_sender.message_builder import MessageBuilder from bot.logger.message_sender.reusable.limiter import ReusableMessageLimiter class ReusableMessageLimiterGroup(ReusableMessageLimiter): def __init__(self, *limiters: ReusableMessageLimiter): self.limiters = limiters def should_issue_new_message_pre_add(self, new_text): return self.__any_limiter(lambda limiter: limiter.should_issue_new_message_pre_add(new_text)) def should_issue_new_message_post_add(self, builder: MessageBuilder): return self.__any_limiter(lambda limiter: limiter.should_issue_new_message_post_add(builder)) def __any_limiter(self, func: callable): return any((func(limiter) for limiter in self.limiters)) def notify_new_message_issued(self): for limiter in self.limiters: limiter.notify_new_message_issued() def notify_about_to_send_message(self): for limiter in self.limiters: limiter.notify_about_to_send_message()
// ... existing code ... limiter.notify_new_message_issued() def notify_about_to_send_message(self): for limiter in self.limiters: limiter.notify_about_to_send_message() // ... rest of the code ...
5702672ab40ef23089c7a2dfee22aaf539b19a54
dpaste/settings/tests.py
dpaste/settings/tests.py
from .base import *
from .base import * DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', } }
Use in-memory sqlite db for testing.
Use in-memory sqlite db for testing.
Python
mit
bartTC/dpaste,bartTC/dpaste,bartTC/dpaste
from .base import * + DATABASES = { + 'default': { + 'ENGINE': 'django.db.backends.sqlite3', + 'NAME': ':memory:', + } + } +
Use in-memory sqlite db for testing.
## Code Before: from .base import * ## Instruction: Use in-memory sqlite db for testing. ## Code After: from .base import * DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', } }
# ... existing code ... from .base import * DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', } } # ... rest of the code ...
f828ac9ee5082a9a0b5e215c4c814e7f35db11b6
planetstack/core/models/__init__.py
planetstack/core/models/__init__.py
from .plcorebase import PlCoreBase from .planetstack import PlanetStack from .project import Project from .singletonmodel import SingletonModel from .service import Service from .service import ServiceAttribute from .tag import Tag from .role import Role from .site import Site,Deployment, DeploymentRole, DeploymentPrivilege, SiteDeployments from .dashboard import DashboardView from .user import User, UserDashboardView from .serviceclass import ServiceClass from .slice import Slice, SliceDeployments from .site import SitePrivilege, SiteDeployments from .userdeployments import UserDeployments from .image import Image, ImageDeployments from .node import Node from .serviceresource import ServiceResource from .slice import SliceRole from .slice import SlicePrivilege from .site import SiteRole from .site import SitePrivilege from .planetstack import PlanetStackRole from .planetstack import PlanetStackPrivilege from .slicetag import SliceTag from .flavor import Flavor from .sliver import Sliver from .reservation import ReservedResource from .reservation import Reservation from .network import Network, NetworkParameterType, NetworkParameter, NetworkSliver, NetworkTemplate, Router, NetworkSlice, NetworkDeployments from .billing import Account, Invoice, Charge, UsableObject, Payment
from .plcorebase import PlCoreBase from .planetstack import PlanetStack from .project import Project from .singletonmodel import SingletonModel from .service import Service from .service import ServiceAttribute from .tag import Tag from .role import Role from .site import Site,Deployment, DeploymentRole, DeploymentPrivilege, SiteDeployments from .dashboard import DashboardView from .user import User, UserDashboardView from .serviceclass import ServiceClass from .slice import Slice, SliceDeployments from .site import SitePrivilege, SiteDeployments from .userdeployments import UserDeployments from .image import Image, ImageDeployments from .node import Node from .serviceresource import ServiceResource from .slice import SliceRole from .slice import SlicePrivilege from .credential import UserCredential,SiteCredential,SliceCredential from .site import SiteRole from .site import SitePrivilege from .planetstack import PlanetStackRole from .planetstack import PlanetStackPrivilege from .slicetag import SliceTag from .flavor import Flavor from .sliver import Sliver from .reservation import ReservedResource from .reservation import Reservation from .network import Network, NetworkParameterType, NetworkParameter, NetworkSliver, NetworkTemplate, Router, NetworkSlice, NetworkDeployments from .billing import Account, Invoice, Charge, UsableObject, Payment
Add credentials module to core list
Add credentials module to core list
Python
apache-2.0
wathsalav/xos,wathsalav/xos,wathsalav/xos,wathsalav/xos
from .plcorebase import PlCoreBase from .planetstack import PlanetStack from .project import Project from .singletonmodel import SingletonModel from .service import Service from .service import ServiceAttribute from .tag import Tag from .role import Role from .site import Site,Deployment, DeploymentRole, DeploymentPrivilege, SiteDeployments from .dashboard import DashboardView from .user import User, UserDashboardView from .serviceclass import ServiceClass from .slice import Slice, SliceDeployments from .site import SitePrivilege, SiteDeployments from .userdeployments import UserDeployments from .image import Image, ImageDeployments from .node import Node from .serviceresource import ServiceResource from .slice import SliceRole from .slice import SlicePrivilege + from .credential import UserCredential,SiteCredential,SliceCredential from .site import SiteRole from .site import SitePrivilege from .planetstack import PlanetStackRole from .planetstack import PlanetStackPrivilege from .slicetag import SliceTag from .flavor import Flavor from .sliver import Sliver from .reservation import ReservedResource from .reservation import Reservation from .network import Network, NetworkParameterType, NetworkParameter, NetworkSliver, NetworkTemplate, Router, NetworkSlice, NetworkDeployments from .billing import Account, Invoice, Charge, UsableObject, Payment
Add credentials module to core list
## Code Before: from .plcorebase import PlCoreBase from .planetstack import PlanetStack from .project import Project from .singletonmodel import SingletonModel from .service import Service from .service import ServiceAttribute from .tag import Tag from .role import Role from .site import Site,Deployment, DeploymentRole, DeploymentPrivilege, SiteDeployments from .dashboard import DashboardView from .user import User, UserDashboardView from .serviceclass import ServiceClass from .slice import Slice, SliceDeployments from .site import SitePrivilege, SiteDeployments from .userdeployments import UserDeployments from .image import Image, ImageDeployments from .node import Node from .serviceresource import ServiceResource from .slice import SliceRole from .slice import SlicePrivilege from .site import SiteRole from .site import SitePrivilege from .planetstack import PlanetStackRole from .planetstack import PlanetStackPrivilege from .slicetag import SliceTag from .flavor import Flavor from .sliver import Sliver from .reservation import ReservedResource from .reservation import Reservation from .network import Network, NetworkParameterType, NetworkParameter, NetworkSliver, NetworkTemplate, Router, NetworkSlice, NetworkDeployments from .billing import Account, Invoice, Charge, UsableObject, Payment ## Instruction: Add credentials module to core list ## Code After: from .plcorebase import PlCoreBase from .planetstack import PlanetStack from .project import Project from .singletonmodel import SingletonModel from .service import Service from .service import ServiceAttribute from .tag import Tag from .role import Role from .site import Site,Deployment, DeploymentRole, DeploymentPrivilege, SiteDeployments from .dashboard import DashboardView from .user import User, UserDashboardView from .serviceclass import ServiceClass from .slice import Slice, SliceDeployments from .site import SitePrivilege, SiteDeployments from .userdeployments import UserDeployments from .image import Image, ImageDeployments from .node import Node from .serviceresource import ServiceResource from .slice import SliceRole from .slice import SlicePrivilege from .credential import UserCredential,SiteCredential,SliceCredential from .site import SiteRole from .site import SitePrivilege from .planetstack import PlanetStackRole from .planetstack import PlanetStackPrivilege from .slicetag import SliceTag from .flavor import Flavor from .sliver import Sliver from .reservation import ReservedResource from .reservation import Reservation from .network import Network, NetworkParameterType, NetworkParameter, NetworkSliver, NetworkTemplate, Router, NetworkSlice, NetworkDeployments from .billing import Account, Invoice, Charge, UsableObject, Payment
# ... existing code ... from .slice import SlicePrivilege from .credential import UserCredential,SiteCredential,SliceCredential from .site import SiteRole # ... rest of the code ...
93fa014ea7a34834ae6bb85ea802879ae0026941
keystone/common/policies/revoke_event.py
keystone/common/policies/revoke_event.py
from oslo_policy import policy from keystone.common.policies import base revoke_event_policies = [ policy.DocumentedRuleDefault( name=base.IDENTITY % 'list_revoke_events', check_str=base.RULE_SERVICE_OR_ADMIN, description='List revocation events.', operations=[{'path': '/v3/OS-REVOKE/events', 'method': 'GET'}]) ] def list_rules(): return revoke_event_policies
from oslo_policy import policy from keystone.common.policies import base revoke_event_policies = [ policy.DocumentedRuleDefault( name=base.IDENTITY % 'list_revoke_events', check_str=base.RULE_SERVICE_OR_ADMIN, # NOTE(lbragstad): This API was originally introduced so that services # could invalidate tokens based on revocation events. This is system # specific so it make sense to associate `system` as the scope type # required for this policy. scope_types=['system'], description='List revocation events.', operations=[{'path': '/v3/OS-REVOKE/events', 'method': 'GET'}]) ] def list_rules(): return revoke_event_policies
Add scope_types for revoke event policies
Add scope_types for revoke event policies This commit associates `system` to revoke event policies, since these policies were developed to assist the system in offline token validation. From now on, a warning will be logged when a project-scoped token is used to get revocation events. Operators can opt into requiring system-scoped tokens for these policies by enabling oslo.policy's `enforce_scope` configuration option, which will result in an HTTP Forbidden exception when mismatching scope is used. Change-Id: I1dddeb216b2523b8471e5f2d5370921bb7a45e7f
Python
apache-2.0
mahak/keystone,openstack/keystone,mahak/keystone,mahak/keystone,openstack/keystone,openstack/keystone
from oslo_policy import policy from keystone.common.policies import base revoke_event_policies = [ policy.DocumentedRuleDefault( name=base.IDENTITY % 'list_revoke_events', check_str=base.RULE_SERVICE_OR_ADMIN, + # NOTE(lbragstad): This API was originally introduced so that services + # could invalidate tokens based on revocation events. This is system + # specific so it make sense to associate `system` as the scope type + # required for this policy. + scope_types=['system'], description='List revocation events.', operations=[{'path': '/v3/OS-REVOKE/events', 'method': 'GET'}]) ] def list_rules(): return revoke_event_policies
Add scope_types for revoke event policies
## Code Before: from oslo_policy import policy from keystone.common.policies import base revoke_event_policies = [ policy.DocumentedRuleDefault( name=base.IDENTITY % 'list_revoke_events', check_str=base.RULE_SERVICE_OR_ADMIN, description='List revocation events.', operations=[{'path': '/v3/OS-REVOKE/events', 'method': 'GET'}]) ] def list_rules(): return revoke_event_policies ## Instruction: Add scope_types for revoke event policies ## Code After: from oslo_policy import policy from keystone.common.policies import base revoke_event_policies = [ policy.DocumentedRuleDefault( name=base.IDENTITY % 'list_revoke_events', check_str=base.RULE_SERVICE_OR_ADMIN, # NOTE(lbragstad): This API was originally introduced so that services # could invalidate tokens based on revocation events. This is system # specific so it make sense to associate `system` as the scope type # required for this policy. scope_types=['system'], description='List revocation events.', operations=[{'path': '/v3/OS-REVOKE/events', 'method': 'GET'}]) ] def list_rules(): return revoke_event_policies
# ... existing code ... check_str=base.RULE_SERVICE_OR_ADMIN, # NOTE(lbragstad): This API was originally introduced so that services # could invalidate tokens based on revocation events. This is system # specific so it make sense to associate `system` as the scope type # required for this policy. scope_types=['system'], description='List revocation events.', # ... rest of the code ...
6daef6533c1cc830aead7d7334f8baf78e8624d1
froide/foirequest/file_utils.py
froide/foirequest/file_utils.py
import os import tempfile import subprocess import logging def convert_to_pdf(filepath, binary_name=None, construct_call=None): if binary_name is None and construct_call is None: return outpath = tempfile.mkdtemp() path, filename = os.path.split(filepath) name, extension = filename.rsplit('.', 1) output_file = os.path.join(outpath, '%s.pdf' % name) arguments = [ binary_name, "--headless", "--convert-to", "pdf", "--outdir", outpath, filepath ] if construct_call is not None: arguments, output_file = construct_call(filepath, outpath) # Set different HOME so libreoffice can write to it env = dict(os.environ) env.update({'HOME': outpath}) logging.info("Running: %s", ' '.join(arguments)) logging.info("Env: %s", env) p = subprocess.Popen( arguments, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env ) out, err = p.communicate() p.wait() if p.returncode == 0: if os.path.exists(output_file): return output_file else: logging.error("Error during Doc to PDF conversion: %s", err) return None
import os import tempfile import subprocess import logging try: TimeoutExpired = subprocess.TimeoutExpired HAS_TIMEOUT = True except AttributeError: TimeoutExpired = Exception HAS_TIMEOUT = False def convert_to_pdf(filepath, binary_name=None, construct_call=None, timeout=50): if binary_name is None and construct_call is None: return outpath = tempfile.mkdtemp() path, filename = os.path.split(filepath) name, extension = filename.rsplit('.', 1) output_file = os.path.join(outpath, '%s.pdf' % name) arguments = [ binary_name, "--headless", "--convert-to", "pdf", "--outdir", outpath, filepath ] if construct_call is not None: arguments, output_file = construct_call(filepath, outpath) # Set different HOME so libreoffice can write to it env = dict(os.environ) env.update({'HOME': outpath}) logging.info("Running: %s", ' '.join(arguments)) logging.info("Env: %s", env) try: p = subprocess.Popen( arguments, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env ) kwargs = {} if HAS_TIMEOUT: kwargs['timeout'] = timeout out, err = p.communicate(**kwargs) p.wait() except TimeoutExpired: p.kill() out, err = p.communicate() finally: if p.returncode is None: p.kill() out, err = p.communicate() if p.returncode == 0: if os.path.exists(output_file): return output_file else: logging.error("Error during Doc to PDF conversion: %s", err) return None
Add better timeout killing to file conversion
Add better timeout killing to file conversion
Python
mit
stefanw/froide,stefanw/froide,stefanw/froide,stefanw/froide,fin/froide,stefanw/froide,fin/froide,fin/froide,fin/froide
import os import tempfile import subprocess import logging + try: + TimeoutExpired = subprocess.TimeoutExpired + HAS_TIMEOUT = True + except AttributeError: + TimeoutExpired = Exception + HAS_TIMEOUT = False + - def convert_to_pdf(filepath, binary_name=None, construct_call=None): + def convert_to_pdf(filepath, binary_name=None, construct_call=None, timeout=50): if binary_name is None and construct_call is None: return outpath = tempfile.mkdtemp() path, filename = os.path.split(filepath) name, extension = filename.rsplit('.', 1) output_file = os.path.join(outpath, '%s.pdf' % name) arguments = [ binary_name, "--headless", "--convert-to", "pdf", "--outdir", outpath, filepath ] if construct_call is not None: arguments, output_file = construct_call(filepath, outpath) # Set different HOME so libreoffice can write to it env = dict(os.environ) env.update({'HOME': outpath}) logging.info("Running: %s", ' '.join(arguments)) logging.info("Env: %s", env) + try: + p = subprocess.Popen( + arguments, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + env=env + ) - p = subprocess.Popen( - arguments, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - env=env - ) + kwargs = {} + if HAS_TIMEOUT: + kwargs['timeout'] = timeout + + out, err = p.communicate(**kwargs) + p.wait() + except TimeoutExpired: + p.kill() - out, err = p.communicate() + out, err = p.communicate() - p.wait() + finally: + if p.returncode is None: + p.kill() + out, err = p.communicate() if p.returncode == 0: if os.path.exists(output_file): return output_file else: logging.error("Error during Doc to PDF conversion: %s", err) return None
Add better timeout killing to file conversion
## Code Before: import os import tempfile import subprocess import logging def convert_to_pdf(filepath, binary_name=None, construct_call=None): if binary_name is None and construct_call is None: return outpath = tempfile.mkdtemp() path, filename = os.path.split(filepath) name, extension = filename.rsplit('.', 1) output_file = os.path.join(outpath, '%s.pdf' % name) arguments = [ binary_name, "--headless", "--convert-to", "pdf", "--outdir", outpath, filepath ] if construct_call is not None: arguments, output_file = construct_call(filepath, outpath) # Set different HOME so libreoffice can write to it env = dict(os.environ) env.update({'HOME': outpath}) logging.info("Running: %s", ' '.join(arguments)) logging.info("Env: %s", env) p = subprocess.Popen( arguments, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env ) out, err = p.communicate() p.wait() if p.returncode == 0: if os.path.exists(output_file): return output_file else: logging.error("Error during Doc to PDF conversion: %s", err) return None ## Instruction: Add better timeout killing to file conversion ## Code After: import os import tempfile import subprocess import logging try: TimeoutExpired = subprocess.TimeoutExpired HAS_TIMEOUT = True except AttributeError: TimeoutExpired = Exception HAS_TIMEOUT = False def convert_to_pdf(filepath, binary_name=None, construct_call=None, timeout=50): if binary_name is None and construct_call is None: return outpath = tempfile.mkdtemp() path, filename = os.path.split(filepath) name, extension = filename.rsplit('.', 1) output_file = os.path.join(outpath, '%s.pdf' % name) arguments = [ binary_name, "--headless", "--convert-to", "pdf", "--outdir", outpath, filepath ] if construct_call is not None: arguments, output_file = construct_call(filepath, outpath) # Set different HOME so libreoffice can write to it env = dict(os.environ) env.update({'HOME': outpath}) logging.info("Running: %s", ' '.join(arguments)) logging.info("Env: %s", env) try: p = subprocess.Popen( arguments, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env ) kwargs = {} if HAS_TIMEOUT: kwargs['timeout'] = timeout out, err = p.communicate(**kwargs) p.wait() except TimeoutExpired: p.kill() out, err = p.communicate() finally: if p.returncode is None: p.kill() out, err = p.communicate() if p.returncode == 0: if os.path.exists(output_file): return output_file else: logging.error("Error during Doc to PDF conversion: %s", err) return None
// ... existing code ... try: TimeoutExpired = subprocess.TimeoutExpired HAS_TIMEOUT = True except AttributeError: TimeoutExpired = Exception HAS_TIMEOUT = False def convert_to_pdf(filepath, binary_name=None, construct_call=None, timeout=50): if binary_name is None and construct_call is None: // ... modified code ... logging.info("Env: %s", env) try: p = subprocess.Popen( arguments, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env ) kwargs = {} if HAS_TIMEOUT: kwargs['timeout'] = timeout out, err = p.communicate(**kwargs) p.wait() except TimeoutExpired: p.kill() out, err = p.communicate() finally: if p.returncode is None: p.kill() out, err = p.communicate() if p.returncode == 0: // ... rest of the code ...
a0e432b0ac31ed74256197b1d5df8b6f8a0987db
product/models.py
product/models.py
from django.db import models from django.utils.translation import pgettext as _ from django_prices.models import PriceField from satchless.util.models import Subtyped from satchless.item import ItemRange from mptt.models import MPTTModel class Category(MPTTModel): name = models.CharField(_('Category field', 'name'), max_length=128) slug = models.SlugField(_('Category field', 'slug'), max_length=50, unique=True) description = models.TextField(_('Category field', 'description'), blank=True) parent = models.ForeignKey('self', null=True, related_name='children', blank=True, verbose_name=_('Category field', 'parent')) def __unicode__(self): return self.name class Product(Subtyped, ItemRange): name = models.CharField(_('Product field', 'name'), max_length=128) slug = models.SlugField(_('Product field', 'slug'), max_length=50, unique=True) price = PriceField(_('Product field', 'price'), currency='USD', max_digits=12, decimal_places=4) category = models.ForeignKey(Category, verbose_name=_('Product field', 'category')) def __unicode__(self): return self.name
from django.db import models from django.utils.safestring import mark_safe from django.utils.translation import pgettext as _ from django_prices.models import PriceField from mptt.models import MPTTModel from satchless.item import ItemRange from satchless.util.models import Subtyped from unidecode import unidecode import re class Category(MPTTModel): name = models.CharField(_('Category field', 'name'), max_length=128) slug = models.SlugField(_('Category field', 'slug'), max_length=50, unique=True) description = models.TextField(_('Category field', 'description'), blank=True) parent = models.ForeignKey('self', null=True, related_name='children', blank=True, verbose_name=_('Category field', 'parent')) def __unicode__(self): return self.name class Product(Subtyped, ItemRange): name = models.CharField(_('Product field', 'name'), max_length=128) price = PriceField(_('Product field', 'price'), currency='USD', max_digits=12, decimal_places=4) category = models.ForeignKey(Category, verbose_name=_('Product field', 'category')) def __unicode__(self): return self.name def get_slug(self): value = unidecode(self.name) value = re.sub('[^\w\s-]', '', value).strip().lower() return mark_safe(re.sub('[-\s]+', '-', value)) @models.permalink def get_absolute_url(self): return ('product:details', [self.get_slug(), self.id])
Replace slug field with get_slug function
Replace slug field with get_slug function
Python
bsd-3-clause
laosunhust/saleor,mociepka/saleor,paweltin/saleor,mociepka/saleor,jreigel/saleor,taedori81/saleor,UITools/saleor,UITools/saleor,spartonia/saleor,car3oon/saleor,Drekscott/Motlaesaleor,UITools/saleor,HyperManTT/ECommerceSaleor,paweltin/saleor,maferelo/saleor,dashmug/saleor,rodrigozn/CW-Shop,laosunhust/saleor,avorio/saleor,hongquan/saleor,taedori81/saleor,paweltin/saleor,tfroehlich82/saleor,rodrigozn/CW-Shop,car3oon/saleor,arth-co/saleor,HyperManTT/ECommerceSaleor,mociepka/saleor,tfroehlich82/saleor,josesanch/saleor,arth-co/saleor,avorio/saleor,itbabu/saleor,HyperManTT/ECommerceSaleor,itbabu/saleor,arth-co/saleor,Drekscott/Motlaesaleor,rodrigozn/CW-Shop,jreigel/saleor,rchav/vinerack,taedori81/saleor,taedori81/saleor,avorio/saleor,spartonia/saleor,itbabu/saleor,avorio/saleor,paweltin/saleor,jreigel/saleor,KenMutemi/saleor,laosunhust/saleor,josesanch/saleor,UITools/saleor,dashmug/saleor,rchav/vinerack,KenMutemi/saleor,arth-co/saleor,KenMutemi/saleor,UITools/saleor,Drekscott/Motlaesaleor,maferelo/saleor,spartonia/saleor,car3oon/saleor,hongquan/saleor,laosunhust/saleor,Drekscott/Motlaesaleor,dashmug/saleor,rchav/vinerack,maferelo/saleor,josesanch/saleor,hongquan/saleor,tfroehlich82/saleor,spartonia/saleor
from django.db import models + from django.utils.safestring import mark_safe from django.utils.translation import pgettext as _ from django_prices.models import PriceField + from mptt.models import MPTTModel + from satchless.item import ItemRange from satchless.util.models import Subtyped - from satchless.item import ItemRange - from mptt.models import MPTTModel + from unidecode import unidecode + import re class Category(MPTTModel): name = models.CharField(_('Category field', 'name'), max_length=128) slug = models.SlugField(_('Category field', 'slug'), max_length=50, unique=True) description = models.TextField(_('Category field', 'description'), blank=True) parent = models.ForeignKey('self', null=True, related_name='children', blank=True, verbose_name=_('Category field', 'parent')) def __unicode__(self): return self.name class Product(Subtyped, ItemRange): name = models.CharField(_('Product field', 'name'), max_length=128) - slug = models.SlugField(_('Product field', 'slug'), max_length=50, - unique=True) price = PriceField(_('Product field', 'price'), currency='USD', max_digits=12, decimal_places=4) category = models.ForeignKey(Category, verbose_name=_('Product field', 'category')) def __unicode__(self): return self.name + def get_slug(self): + value = unidecode(self.name) + value = re.sub('[^\w\s-]', '', value).strip().lower() + + return mark_safe(re.sub('[-\s]+', '-', value)) + + @models.permalink + def get_absolute_url(self): + return ('product:details', [self.get_slug(), self.id]) +
Replace slug field with get_slug function
## Code Before: from django.db import models from django.utils.translation import pgettext as _ from django_prices.models import PriceField from satchless.util.models import Subtyped from satchless.item import ItemRange from mptt.models import MPTTModel class Category(MPTTModel): name = models.CharField(_('Category field', 'name'), max_length=128) slug = models.SlugField(_('Category field', 'slug'), max_length=50, unique=True) description = models.TextField(_('Category field', 'description'), blank=True) parent = models.ForeignKey('self', null=True, related_name='children', blank=True, verbose_name=_('Category field', 'parent')) def __unicode__(self): return self.name class Product(Subtyped, ItemRange): name = models.CharField(_('Product field', 'name'), max_length=128) slug = models.SlugField(_('Product field', 'slug'), max_length=50, unique=True) price = PriceField(_('Product field', 'price'), currency='USD', max_digits=12, decimal_places=4) category = models.ForeignKey(Category, verbose_name=_('Product field', 'category')) def __unicode__(self): return self.name ## Instruction: Replace slug field with get_slug function ## Code After: from django.db import models from django.utils.safestring import mark_safe from django.utils.translation import pgettext as _ from django_prices.models import PriceField from mptt.models import MPTTModel from satchless.item import ItemRange from satchless.util.models import Subtyped from unidecode import unidecode import re class Category(MPTTModel): name = models.CharField(_('Category field', 'name'), max_length=128) slug = models.SlugField(_('Category field', 'slug'), max_length=50, unique=True) description = models.TextField(_('Category field', 'description'), blank=True) parent = models.ForeignKey('self', null=True, related_name='children', blank=True, verbose_name=_('Category field', 'parent')) def __unicode__(self): return self.name class Product(Subtyped, ItemRange): name = models.CharField(_('Product field', 'name'), max_length=128) price = PriceField(_('Product field', 'price'), currency='USD', max_digits=12, decimal_places=4) category = models.ForeignKey(Category, verbose_name=_('Product field', 'category')) def __unicode__(self): return self.name def get_slug(self): value = unidecode(self.name) value = re.sub('[^\w\s-]', '', value).strip().lower() return mark_safe(re.sub('[-\s]+', '-', value)) @models.permalink def get_absolute_url(self): return ('product:details', [self.get_slug(), self.id])
# ... existing code ... from django.db import models from django.utils.safestring import mark_safe from django.utils.translation import pgettext as _ # ... modified code ... from django_prices.models import PriceField from mptt.models import MPTTModel from satchless.item import ItemRange from satchless.util.models import Subtyped from unidecode import unidecode import re ... name = models.CharField(_('Product field', 'name'), max_length=128) price = PriceField(_('Product field', 'price'), currency='USD', ... return self.name def get_slug(self): value = unidecode(self.name) value = re.sub('[^\w\s-]', '', value).strip().lower() return mark_safe(re.sub('[-\s]+', '-', value)) @models.permalink def get_absolute_url(self): return ('product:details', [self.get_slug(), self.id]) # ... rest of the code ...
aa436864f53a4c77b4869baabfb1478d7fea36f0
tests/products/__init__.py
tests/products/__init__.py
import arrow import six from tilezilla.core import BoundingBox, Band MAPPING = { 'timeseries_id': str, 'acquired': arrow.Arrow, 'processed': arrow.Arrow, 'platform': str, 'instrument': str, 'bounds': BoundingBox, 'bands': [Band], 'metadata': dict, 'metadata_files': dict } def check_attributes(product): for attr, _type in six.iteritems(MAPPING): assert hasattr(product, attr) value = getattr(product, attr) if isinstance(_type, type): assert isinstance(value, _type) else: assert isinstance(value, type(_type)) for item in value: assert isinstance(item, tuple(_type))
import arrow import six from tilezilla.core import BoundingBox, Band MAPPING = { 'timeseries_id': six.string_types, 'acquired': arrow.Arrow, 'processed': arrow.Arrow, 'platform': six.string_types, 'instrument': six.string_types, 'bounds': BoundingBox, 'bands': [Band], 'metadata': dict, 'metadata_files': dict } def check_attributes(product): for attr, _type in six.iteritems(MAPPING): assert hasattr(product, attr) value = getattr(product, attr) if isinstance(_type, (type, tuple)): # Type declaration one or more types assert isinstance(value, _type) else: # Type declaration list of types assert isinstance(value, type(_type)) for item in value: assert isinstance(item, tuple(_type))
Allow str type comparison in py2/3
Allow str type comparison in py2/3
Python
bsd-3-clause
ceholden/landsat_tile,ceholden/landsat_tiles,ceholden/landsat_tiles,ceholden/tilezilla,ceholden/landsat_tile
import arrow import six from tilezilla.core import BoundingBox, Band MAPPING = { - 'timeseries_id': str, + 'timeseries_id': six.string_types, 'acquired': arrow.Arrow, 'processed': arrow.Arrow, - 'platform': str, + 'platform': six.string_types, - 'instrument': str, + 'instrument': six.string_types, 'bounds': BoundingBox, 'bands': [Band], 'metadata': dict, 'metadata_files': dict } def check_attributes(product): for attr, _type in six.iteritems(MAPPING): assert hasattr(product, attr) value = getattr(product, attr) - if isinstance(_type, type): + if isinstance(_type, (type, tuple)): + # Type declaration one or more types assert isinstance(value, _type) else: + # Type declaration list of types assert isinstance(value, type(_type)) for item in value: assert isinstance(item, tuple(_type))
Allow str type comparison in py2/3
## Code Before: import arrow import six from tilezilla.core import BoundingBox, Band MAPPING = { 'timeseries_id': str, 'acquired': arrow.Arrow, 'processed': arrow.Arrow, 'platform': str, 'instrument': str, 'bounds': BoundingBox, 'bands': [Band], 'metadata': dict, 'metadata_files': dict } def check_attributes(product): for attr, _type in six.iteritems(MAPPING): assert hasattr(product, attr) value = getattr(product, attr) if isinstance(_type, type): assert isinstance(value, _type) else: assert isinstance(value, type(_type)) for item in value: assert isinstance(item, tuple(_type)) ## Instruction: Allow str type comparison in py2/3 ## Code After: import arrow import six from tilezilla.core import BoundingBox, Band MAPPING = { 'timeseries_id': six.string_types, 'acquired': arrow.Arrow, 'processed': arrow.Arrow, 'platform': six.string_types, 'instrument': six.string_types, 'bounds': BoundingBox, 'bands': [Band], 'metadata': dict, 'metadata_files': dict } def check_attributes(product): for attr, _type in six.iteritems(MAPPING): assert hasattr(product, attr) value = getattr(product, attr) if isinstance(_type, (type, tuple)): # Type declaration one or more types assert isinstance(value, _type) else: # Type declaration list of types assert isinstance(value, type(_type)) for item in value: assert isinstance(item, tuple(_type))
... MAPPING = { 'timeseries_id': six.string_types, 'acquired': arrow.Arrow, ... 'processed': arrow.Arrow, 'platform': six.string_types, 'instrument': six.string_types, 'bounds': BoundingBox, ... if isinstance(_type, (type, tuple)): # Type declaration one or more types assert isinstance(value, _type) ... else: # Type declaration list of types assert isinstance(value, type(_type)) ...
1056c3f489b162d77b6c117fad2b45bfa06beee1
app/urls.py
app/urls.py
from django.conf.urls import patterns, include, url from django.contrib import admin from django.conf import settings #from . import views urlpatterns = patterns('', # Examples: # url(r'^$', 'app.views.home', name='home'), # url(r'^blog/', include('blog.urls')), url(r'^$', 'app.views.splash', name='splash'), url(r'^feed', 'app.views.feed', name='feed'), url(r'^about', 'app.views.about', name='about'), url(r'^explore', 'app.views.explore', name='explore'), url(r'^profile_picture', 'app.views.profile_picture', name='profile_picture'), url(r'^dashboard', 'app.views.dashboard', name='dashboard'), url(r'^login', 'app.views.login', name='login'), url(r'^logout', 'app.views.logout', name='logout'), url(r'^temp', 'app.views.temp', name='temp'), #delete eventually url(r'^posts', 'app.views.posts', name='posts'), url(r'^admin/', include(admin.site.urls)) )
from django.conf.urls import patterns, include, url from django.contrib import admin from django.conf import settings #from . import views urlpatterns = patterns('', # Examples: # url(r'^$', 'app.views.home', name='home'), # url(r'^blog/', include('blog.urls')), url(r'^$', 'app.views.splash', name='splash'), url(r'^feed', 'app.views.feed', name='feed'), url(r'^about', 'app.views.about', name='about'), url(r'^explore', 'app.views.explore', name='explore'), url(r'^profile_picture', 'app.views.profile_picture', name='profile_picture'), url(r'^dashboard', 'app.views.dashboard', name='dashboard'), url(r'^login', 'app.views.login', name='login'), url(r'^logout', 'app.views.logout', name='logout'), url(r'^temp', 'app.views.temp', name='temp'), url(r'^admin/', include(admin.site.urls)) )
Revert "Added a post view"
Revert "Added a post view" This reverts commit b1063480e7b2e1128c457e9e65c52f742109d90d.
Python
unlicense
yourbuddyconner/cs399-social,yourbuddyconner/cs399-social
from django.conf.urls import patterns, include, url from django.contrib import admin from django.conf import settings #from . import views - - urlpatterns = patterns('', # Examples: # url(r'^$', 'app.views.home', name='home'), # url(r'^blog/', include('blog.urls')), url(r'^$', 'app.views.splash', name='splash'), url(r'^feed', 'app.views.feed', name='feed'), url(r'^about', 'app.views.about', name='about'), url(r'^explore', 'app.views.explore', name='explore'), url(r'^profile_picture', 'app.views.profile_picture', name='profile_picture'), url(r'^dashboard', 'app.views.dashboard', name='dashboard'), url(r'^login', 'app.views.login', name='login'), url(r'^logout', 'app.views.logout', name='logout'), - url(r'^temp', 'app.views.temp', name='temp'), #delete eventually + url(r'^temp', 'app.views.temp', name='temp'), - url(r'^posts', 'app.views.posts', name='posts'), + url(r'^admin/', include(admin.site.urls)) ) -
Revert "Added a post view"
## Code Before: from django.conf.urls import patterns, include, url from django.contrib import admin from django.conf import settings #from . import views urlpatterns = patterns('', # Examples: # url(r'^$', 'app.views.home', name='home'), # url(r'^blog/', include('blog.urls')), url(r'^$', 'app.views.splash', name='splash'), url(r'^feed', 'app.views.feed', name='feed'), url(r'^about', 'app.views.about', name='about'), url(r'^explore', 'app.views.explore', name='explore'), url(r'^profile_picture', 'app.views.profile_picture', name='profile_picture'), url(r'^dashboard', 'app.views.dashboard', name='dashboard'), url(r'^login', 'app.views.login', name='login'), url(r'^logout', 'app.views.logout', name='logout'), url(r'^temp', 'app.views.temp', name='temp'), #delete eventually url(r'^posts', 'app.views.posts', name='posts'), url(r'^admin/', include(admin.site.urls)) ) ## Instruction: Revert "Added a post view" ## Code After: from django.conf.urls import patterns, include, url from django.contrib import admin from django.conf import settings #from . import views urlpatterns = patterns('', # Examples: # url(r'^$', 'app.views.home', name='home'), # url(r'^blog/', include('blog.urls')), url(r'^$', 'app.views.splash', name='splash'), url(r'^feed', 'app.views.feed', name='feed'), url(r'^about', 'app.views.about', name='about'), url(r'^explore', 'app.views.explore', name='explore'), url(r'^profile_picture', 'app.views.profile_picture', name='profile_picture'), url(r'^dashboard', 'app.views.dashboard', name='dashboard'), url(r'^login', 'app.views.login', name='login'), url(r'^logout', 'app.views.logout', name='logout'), url(r'^temp', 'app.views.temp', name='temp'), url(r'^admin/', include(admin.site.urls)) )
// ... existing code ... #from . import views // ... modified code ... url(r'^logout', 'app.views.logout', name='logout'), url(r'^temp', 'app.views.temp', name='temp'), url(r'^admin/', include(admin.site.urls)) ... ) // ... rest of the code ...
39b6868042e95a5a412d3d3b9fa5f735e35ddb2c
umodbus/__init__.py
umodbus/__init__.py
from logging import getLogger try: from logging import NullHandler # For Python 2.7 compatibility. except ImportError: from logging import Handler class NullHandler(Handler): def emit(self, record): pass log = getLogger('uModbus') log.addHandler(NullHandler()) from .server import get_server # NOQA
from logging import getLogger, NullHandler log = getLogger('uModbus') log.addHandler(NullHandler()) from .server import get_server # NOQA
Remove another piece of unreachable code.
Remove another piece of unreachable code.
Python
mpl-2.0
AdvancedClimateSystems/python-modbus,AdvancedClimateSystems/uModbus
- from logging import getLogger + from logging import getLogger, NullHandler - - try: - from logging import NullHandler - # For Python 2.7 compatibility. - except ImportError: - from logging import Handler - - class NullHandler(Handler): - def emit(self, record): - pass log = getLogger('uModbus') log.addHandler(NullHandler()) from .server import get_server # NOQA
Remove another piece of unreachable code.
## Code Before: from logging import getLogger try: from logging import NullHandler # For Python 2.7 compatibility. except ImportError: from logging import Handler class NullHandler(Handler): def emit(self, record): pass log = getLogger('uModbus') log.addHandler(NullHandler()) from .server import get_server # NOQA ## Instruction: Remove another piece of unreachable code. ## Code After: from logging import getLogger, NullHandler log = getLogger('uModbus') log.addHandler(NullHandler()) from .server import get_server # NOQA
... from logging import getLogger, NullHandler ...
516bebe37212e72362b416bd1d9c87a83726fa5f
changes/api/cluster_nodes.py
changes/api/cluster_nodes.py
from __future__ import absolute_import from datetime import datetime, timedelta from flask.ext.restful import reqparse from changes.api.base import APIView from changes.models import Cluster, JobStep, Node class ClusterNodesAPIView(APIView): parser = reqparse.RequestParser() parser.add_argument('since', type=int, location='args') def get(self, cluster_id): cluster = Cluster.query.get(cluster_id) if cluster is None: return '', 404 queryset = Node.query.filter( Node.clusters.contains(cluster), ) args = self.parser.parse_args() if args.since: cutoff = datetime.utcnow() - timedelta(days=args.since) queryset = queryset.join( JobStep, JobStep.node_id == Node.id, ).filter( JobStep.date_created > cutoff, ).group_by(Node) return self.paginate(queryset)
from __future__ import absolute_import from datetime import datetime, timedelta from flask.ext.restful import reqparse from changes.api.base import APIView from changes.models import Cluster, JobStep, Node class ClusterNodesAPIView(APIView): parser = reqparse.RequestParser() parser.add_argument('since', type=int, location='args') def get(self, cluster_id): cluster = Cluster.query.get(cluster_id) if cluster is None: return '', 404 queryset = Node.query.filter( Node.clusters.contains(cluster), ).order_by(Node.label.asc()) args = self.parser.parse_args() if args.since: cutoff = datetime.utcnow() - timedelta(days=args.since) queryset = queryset.join( JobStep, JobStep.node_id == Node.id, ).filter( JobStep.date_created > cutoff, ).group_by(Node) return self.paginate(queryset)
Enforce ordering on cluster nodes endpoint
Enforce ordering on cluster nodes endpoint
Python
apache-2.0
bowlofstew/changes,bowlofstew/changes,dropbox/changes,wfxiang08/changes,wfxiang08/changes,dropbox/changes,dropbox/changes,dropbox/changes,bowlofstew/changes,bowlofstew/changes,wfxiang08/changes,wfxiang08/changes
from __future__ import absolute_import from datetime import datetime, timedelta from flask.ext.restful import reqparse from changes.api.base import APIView from changes.models import Cluster, JobStep, Node class ClusterNodesAPIView(APIView): parser = reqparse.RequestParser() parser.add_argument('since', type=int, location='args') def get(self, cluster_id): cluster = Cluster.query.get(cluster_id) if cluster is None: return '', 404 queryset = Node.query.filter( Node.clusters.contains(cluster), - ) + ).order_by(Node.label.asc()) args = self.parser.parse_args() if args.since: cutoff = datetime.utcnow() - timedelta(days=args.since) queryset = queryset.join( JobStep, JobStep.node_id == Node.id, ).filter( JobStep.date_created > cutoff, ).group_by(Node) return self.paginate(queryset)
Enforce ordering on cluster nodes endpoint
## Code Before: from __future__ import absolute_import from datetime import datetime, timedelta from flask.ext.restful import reqparse from changes.api.base import APIView from changes.models import Cluster, JobStep, Node class ClusterNodesAPIView(APIView): parser = reqparse.RequestParser() parser.add_argument('since', type=int, location='args') def get(self, cluster_id): cluster = Cluster.query.get(cluster_id) if cluster is None: return '', 404 queryset = Node.query.filter( Node.clusters.contains(cluster), ) args = self.parser.parse_args() if args.since: cutoff = datetime.utcnow() - timedelta(days=args.since) queryset = queryset.join( JobStep, JobStep.node_id == Node.id, ).filter( JobStep.date_created > cutoff, ).group_by(Node) return self.paginate(queryset) ## Instruction: Enforce ordering on cluster nodes endpoint ## Code After: from __future__ import absolute_import from datetime import datetime, timedelta from flask.ext.restful import reqparse from changes.api.base import APIView from changes.models import Cluster, JobStep, Node class ClusterNodesAPIView(APIView): parser = reqparse.RequestParser() parser.add_argument('since', type=int, location='args') def get(self, cluster_id): cluster = Cluster.query.get(cluster_id) if cluster is None: return '', 404 queryset = Node.query.filter( Node.clusters.contains(cluster), ).order_by(Node.label.asc()) args = self.parser.parse_args() if args.since: cutoff = datetime.utcnow() - timedelta(days=args.since) queryset = queryset.join( JobStep, JobStep.node_id == Node.id, ).filter( JobStep.date_created > cutoff, ).group_by(Node) return self.paginate(queryset)
... Node.clusters.contains(cluster), ).order_by(Node.label.asc()) ...
7c4b19fee9a50804921fc1084655d05ea3b7e89b
setup.py
setup.py
from distutils.core import setup setup( name='django-robots', version=__import__('robots').__version__, description='Robots exclusion application for Django, complementing Sitemaps.', long_description=open('docs/overview.txt').read(), author='Jannis Leidel', author_email='[email protected]', url='http://code.google.com/p/django-robots/', download_url='http://github.com/jezdez/django-dbtemplates/zipball/0.5.4', packages=['robots'], package_dir={'dbtemplates': 'dbtemplates'}, classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Framework :: Django', ] )
from distutils.core import setup setup( name='django-robots', version=__import__('robots').__version__, description='Robots exclusion application for Django, complementing Sitemaps.', long_description=open('docs/overview.txt').read(), author='Jannis Leidel', author_email='[email protected]', url='http://code.google.com/p/django-robots/', packages=['robots'], package_dir={'dbtemplates': 'dbtemplates'}, classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Framework :: Django', ] )
Remove download URL since Github doesn't get his act together. Damnit
Remove download URL since Github doesn't get his act together. Damnit committer: Jannis Leidel <[email protected]> --HG-- extra : convert_revision : 410200249f2c4981c9e0e8e5cf9334b0e17ec3d4
Python
bsd-3-clause
amitu/django-robots,amitu/django-robots,jscott1971/django-robots,jazzband/django-robots,freakboy3742/django-robots,philippeowagner/django-robots,freakboy3742/django-robots,pbs/django-robots,pbs/django-robots,jscott1971/django-robots,pbs/django-robots,jezdez/django-robots,philippeowagner/django-robots,jezdez/django-robots,jazzband/django-robots,gbezyuk/django-robots,gbezyuk/django-robots
from distutils.core import setup setup( name='django-robots', version=__import__('robots').__version__, description='Robots exclusion application for Django, complementing Sitemaps.', long_description=open('docs/overview.txt').read(), author='Jannis Leidel', author_email='[email protected]', url='http://code.google.com/p/django-robots/', - download_url='http://github.com/jezdez/django-dbtemplates/zipball/0.5.4', packages=['robots'], package_dir={'dbtemplates': 'dbtemplates'}, classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Framework :: Django', ] )
Remove download URL since Github doesn't get his act together. Damnit
## Code Before: from distutils.core import setup setup( name='django-robots', version=__import__('robots').__version__, description='Robots exclusion application for Django, complementing Sitemaps.', long_description=open('docs/overview.txt').read(), author='Jannis Leidel', author_email='[email protected]', url='http://code.google.com/p/django-robots/', download_url='http://github.com/jezdez/django-dbtemplates/zipball/0.5.4', packages=['robots'], package_dir={'dbtemplates': 'dbtemplates'}, classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Framework :: Django', ] ) ## Instruction: Remove download URL since Github doesn't get his act together. Damnit ## Code After: from distutils.core import setup setup( name='django-robots', version=__import__('robots').__version__, description='Robots exclusion application for Django, complementing Sitemaps.', long_description=open('docs/overview.txt').read(), author='Jannis Leidel', author_email='[email protected]', url='http://code.google.com/p/django-robots/', packages=['robots'], package_dir={'dbtemplates': 'dbtemplates'}, classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Framework :: Django', ] )
# ... existing code ... url='http://code.google.com/p/django-robots/', packages=['robots'], # ... rest of the code ...
a09822a4a8422493c5cb98eb9518ab0112c565d7
techism2/service.py
techism2/service.py
from techism2.models import Event from datetime import datetime def get_tags(): # TODO: cache, use django cache which uses GAE memcache dict_list = Event.objects.values('tags') tags = dict() for dictionary in dict_list: for tag_list in dictionary.itervalues(): if tag_list: for tag in tag_list: if tag not in tags: tags[tag] = 0 tags[tag] += 1 return tags
from techism2.models import Event from datetime import datetime from django.core.cache import cache tags_cache_key = "tags" def get_tags(): # Note: no synchronization, propably not possible on GAE tags = cache.get(tags_cache_key) if tags: return tags else: tags = __fetch_tags() cache.set(tags_cache_key, tags, 1800) # expire after 30 min return tags def __fetch_tags(): dict_list = Event.objects.values('tags') tags = dict() for dictionary in dict_list: for tag_list in dictionary.itervalues(): if tag_list: for tag in tag_list: if tag not in tags: tags[tag] = 0 tags[tag] += 1 return tags
Use Memcache to cache tags
Use Memcache to cache tags
Python
apache-2.0
gimler/techism2,gimler/techism2
from techism2.models import Event from datetime import datetime + from django.core.cache import cache + tags_cache_key = "tags" def get_tags(): - # TODO: cache, use django cache which uses GAE memcache + # Note: no synchronization, propably not possible on GAE + tags = cache.get(tags_cache_key) + + if tags: + return tags + else: + tags = __fetch_tags() + cache.set(tags_cache_key, tags, 1800) # expire after 30 min + return tags + + def __fetch_tags(): dict_list = Event.objects.values('tags') tags = dict() for dictionary in dict_list: for tag_list in dictionary.itervalues(): if tag_list: for tag in tag_list: if tag not in tags: tags[tag] = 0 tags[tag] += 1 return tags
Use Memcache to cache tags
## Code Before: from techism2.models import Event from datetime import datetime def get_tags(): # TODO: cache, use django cache which uses GAE memcache dict_list = Event.objects.values('tags') tags = dict() for dictionary in dict_list: for tag_list in dictionary.itervalues(): if tag_list: for tag in tag_list: if tag not in tags: tags[tag] = 0 tags[tag] += 1 return tags ## Instruction: Use Memcache to cache tags ## Code After: from techism2.models import Event from datetime import datetime from django.core.cache import cache tags_cache_key = "tags" def get_tags(): # Note: no synchronization, propably not possible on GAE tags = cache.get(tags_cache_key) if tags: return tags else: tags = __fetch_tags() cache.set(tags_cache_key, tags, 1800) # expire after 30 min return tags def __fetch_tags(): dict_list = Event.objects.values('tags') tags = dict() for dictionary in dict_list: for tag_list in dictionary.itervalues(): if tag_list: for tag in tag_list: if tag not in tags: tags[tag] = 0 tags[tag] += 1 return tags
... from datetime import datetime from django.core.cache import cache tags_cache_key = "tags" ... def get_tags(): # Note: no synchronization, propably not possible on GAE tags = cache.get(tags_cache_key) if tags: return tags else: tags = __fetch_tags() cache.set(tags_cache_key, tags, 1800) # expire after 30 min return tags def __fetch_tags(): dict_list = Event.objects.values('tags') ...
8eed621a15dafc8b0965c59b8da2296f8193d0ca
karabo_data/tests/test_agipd_geometry.py
karabo_data/tests/test_agipd_geometry.py
import numpy as np from karabo_data.geometry2 import AGIPD_1MGeometry def test_snap_assemble_data(): geom = AGIPD_1MGeometry.from_quad_positions(quad_pos=[ (-525, 625), (-550, -10), (520, -160), (542.5, 475), ]) snap_geom = geom.snap() stacked_data = np.zeros((16, 512, 128)) img, centre = snap_geom.position_all_modules(stacked_data) assert img.shape == (1296, 1132) assert tuple(centre) == (651, 570) assert np.isnan(img[0, 0]) assert img[50, 50] == 0
import numpy as np from karabo_data.geometry2 import AGIPD_1MGeometry def test_snap_assemble_data(): geom = AGIPD_1MGeometry.from_quad_positions(quad_pos=[ (-525, 625), (-550, -10), (520, -160), (542.5, 475), ]) snap_geom = geom.snap() stacked_data = np.zeros((16, 512, 128)) img, centre = snap_geom.position_all_modules(stacked_data) assert img.shape == (1296, 1132) assert tuple(centre) == (651, 570) assert np.isnan(img[0, 0]) assert img[50, 50] == 0 def test_write_read_crystfel_file(tmpdir): geom = AGIPD_1MGeometry.from_quad_positions(quad_pos=[ (-525, 625), (-550, -10), (520, -160), (542.5, 475), ]) path = str(tmpdir / 'test.geom') geom.write_crystfel_geom(path) # We need to add some experiment details before cfelpyutils will read the # file with open(path, 'r') as f: contents = f.read() with open(path, 'w') as f: f.write('clen = 0.119\n') f.write('adu_per_eV = 0.0075\n') f.write(contents) loaded = AGIPD_1MGeometry.from_crystfel_geom(path) np.testing.assert_allclose(loaded.modules[0][0].corner_pos, geom.modules[0][0].corner_pos) np.testing.assert_allclose(loaded.modules[0][0].fs_vec, geom.modules[0][0].fs_vec)
Add test of reading & writing CrystFEL geometry
Add test of reading & writing CrystFEL geometry
Python
bsd-3-clause
European-XFEL/h5tools-py
import numpy as np from karabo_data.geometry2 import AGIPD_1MGeometry def test_snap_assemble_data(): geom = AGIPD_1MGeometry.from_quad_positions(quad_pos=[ (-525, 625), (-550, -10), (520, -160), (542.5, 475), ]) snap_geom = geom.snap() stacked_data = np.zeros((16, 512, 128)) img, centre = snap_geom.position_all_modules(stacked_data) assert img.shape == (1296, 1132) assert tuple(centre) == (651, 570) assert np.isnan(img[0, 0]) assert img[50, 50] == 0 + def test_write_read_crystfel_file(tmpdir): + geom = AGIPD_1MGeometry.from_quad_positions(quad_pos=[ + (-525, 625), + (-550, -10), + (520, -160), + (542.5, 475), + ]) + path = str(tmpdir / 'test.geom') + geom.write_crystfel_geom(path) + + # We need to add some experiment details before cfelpyutils will read the + # file + with open(path, 'r') as f: + contents = f.read() + with open(path, 'w') as f: + f.write('clen = 0.119\n') + f.write('adu_per_eV = 0.0075\n') + f.write(contents) + + loaded = AGIPD_1MGeometry.from_crystfel_geom(path) + np.testing.assert_allclose(loaded.modules[0][0].corner_pos, + geom.modules[0][0].corner_pos) + np.testing.assert_allclose(loaded.modules[0][0].fs_vec, + geom.modules[0][0].fs_vec) +
Add test of reading & writing CrystFEL geometry
## Code Before: import numpy as np from karabo_data.geometry2 import AGIPD_1MGeometry def test_snap_assemble_data(): geom = AGIPD_1MGeometry.from_quad_positions(quad_pos=[ (-525, 625), (-550, -10), (520, -160), (542.5, 475), ]) snap_geom = geom.snap() stacked_data = np.zeros((16, 512, 128)) img, centre = snap_geom.position_all_modules(stacked_data) assert img.shape == (1296, 1132) assert tuple(centre) == (651, 570) assert np.isnan(img[0, 0]) assert img[50, 50] == 0 ## Instruction: Add test of reading & writing CrystFEL geometry ## Code After: import numpy as np from karabo_data.geometry2 import AGIPD_1MGeometry def test_snap_assemble_data(): geom = AGIPD_1MGeometry.from_quad_positions(quad_pos=[ (-525, 625), (-550, -10), (520, -160), (542.5, 475), ]) snap_geom = geom.snap() stacked_data = np.zeros((16, 512, 128)) img, centre = snap_geom.position_all_modules(stacked_data) assert img.shape == (1296, 1132) assert tuple(centre) == (651, 570) assert np.isnan(img[0, 0]) assert img[50, 50] == 0 def test_write_read_crystfel_file(tmpdir): geom = AGIPD_1MGeometry.from_quad_positions(quad_pos=[ (-525, 625), (-550, -10), (520, -160), (542.5, 475), ]) path = str(tmpdir / 'test.geom') geom.write_crystfel_geom(path) # We need to add some experiment details before cfelpyutils will read the # file with open(path, 'r') as f: contents = f.read() with open(path, 'w') as f: f.write('clen = 0.119\n') f.write('adu_per_eV = 0.0075\n') f.write(contents) loaded = AGIPD_1MGeometry.from_crystfel_geom(path) np.testing.assert_allclose(loaded.modules[0][0].corner_pos, geom.modules[0][0].corner_pos) np.testing.assert_allclose(loaded.modules[0][0].fs_vec, geom.modules[0][0].fs_vec)
// ... existing code ... assert img[50, 50] == 0 def test_write_read_crystfel_file(tmpdir): geom = AGIPD_1MGeometry.from_quad_positions(quad_pos=[ (-525, 625), (-550, -10), (520, -160), (542.5, 475), ]) path = str(tmpdir / 'test.geom') geom.write_crystfel_geom(path) # We need to add some experiment details before cfelpyutils will read the # file with open(path, 'r') as f: contents = f.read() with open(path, 'w') as f: f.write('clen = 0.119\n') f.write('adu_per_eV = 0.0075\n') f.write(contents) loaded = AGIPD_1MGeometry.from_crystfel_geom(path) np.testing.assert_allclose(loaded.modules[0][0].corner_pos, geom.modules[0][0].corner_pos) np.testing.assert_allclose(loaded.modules[0][0].fs_vec, geom.modules[0][0].fs_vec) // ... rest of the code ...
039d7bc7f19add23670fc387b3091293d2ed94ce
parser.py
parser.py
from config import MongoSource from manager import PluginManager from log import LogDocGenerator def main(): # 1. load all plugins plugin_manager = PluginManager() # 2. get one or more mongodb collection ms = MongoSource() collection = ms.get_collection("net-test", "ename_access") # 3. make a log_generator log_generator = LogDocGenerator(collection) # 4. use condition to get filtered logs #condition = {"host":"192.168.1.57"} condition = {} # 5. use keywords plugins to parse logs keywords = ['ip'] for log_doc in log_generator.get_log_docs(condition): plugin_manager.call_method('process', args=log_doc, keywords=keywords) # 6. give a report plugin_manager.call_method('report', args={}, keywords=keywords) if __name__ == '__main__': main()
from config import MongoSource from manager import PluginManager from log import LogDocGenerator import datetime def main(): # 1. load all plugins plugin_manager = PluginManager() # 2. get one or more mongodb collection ms = MongoSource() collection = ms.get_collection("net-test", "ename_access") # 3. make a log_generator log_generator = LogDocGenerator(collection) # 4. use condition to get filtered logs #condition = {"host":"192.168.1.57"} now = datetime.datetime.now() start = now - datetime.timedelta(hours=8, minutes=10) end = now - datetime.timedelta(hours=8) condition = {"time":{"$gte":start, "$lt":end}} # 5. use keywords plugins to parse logs keywords = ['ip'] for log_doc in log_generator.get_log_docs(condition): plugin_manager.call_method('process', args=log_doc, keywords=keywords) # 6. give a report plugin_manager.call_method('report', args={}, keywords=keywords) if __name__ == '__main__': main()
Add example for query datetime range
Add example for query datetime range
Python
apache-2.0
keepzero/fluent-mongo-parser
from config import MongoSource from manager import PluginManager from log import LogDocGenerator + import datetime def main(): # 1. load all plugins plugin_manager = PluginManager() # 2. get one or more mongodb collection ms = MongoSource() collection = ms.get_collection("net-test", "ename_access") # 3. make a log_generator log_generator = LogDocGenerator(collection) # 4. use condition to get filtered logs #condition = {"host":"192.168.1.57"} - condition = {} + now = datetime.datetime.now() + start = now - datetime.timedelta(hours=8, minutes=10) + end = now - datetime.timedelta(hours=8) + condition = {"time":{"$gte":start, "$lt":end}} # 5. use keywords plugins to parse logs keywords = ['ip'] for log_doc in log_generator.get_log_docs(condition): plugin_manager.call_method('process', args=log_doc, keywords=keywords) # 6. give a report plugin_manager.call_method('report', args={}, keywords=keywords) if __name__ == '__main__': main()
Add example for query datetime range
## Code Before: from config import MongoSource from manager import PluginManager from log import LogDocGenerator def main(): # 1. load all plugins plugin_manager = PluginManager() # 2. get one or more mongodb collection ms = MongoSource() collection = ms.get_collection("net-test", "ename_access") # 3. make a log_generator log_generator = LogDocGenerator(collection) # 4. use condition to get filtered logs #condition = {"host":"192.168.1.57"} condition = {} # 5. use keywords plugins to parse logs keywords = ['ip'] for log_doc in log_generator.get_log_docs(condition): plugin_manager.call_method('process', args=log_doc, keywords=keywords) # 6. give a report plugin_manager.call_method('report', args={}, keywords=keywords) if __name__ == '__main__': main() ## Instruction: Add example for query datetime range ## Code After: from config import MongoSource from manager import PluginManager from log import LogDocGenerator import datetime def main(): # 1. load all plugins plugin_manager = PluginManager() # 2. get one or more mongodb collection ms = MongoSource() collection = ms.get_collection("net-test", "ename_access") # 3. make a log_generator log_generator = LogDocGenerator(collection) # 4. use condition to get filtered logs #condition = {"host":"192.168.1.57"} now = datetime.datetime.now() start = now - datetime.timedelta(hours=8, minutes=10) end = now - datetime.timedelta(hours=8) condition = {"time":{"$gte":start, "$lt":end}} # 5. use keywords plugins to parse logs keywords = ['ip'] for log_doc in log_generator.get_log_docs(condition): plugin_manager.call_method('process', args=log_doc, keywords=keywords) # 6. give a report plugin_manager.call_method('report', args={}, keywords=keywords) if __name__ == '__main__': main()
... from log import LogDocGenerator import datetime ... #condition = {"host":"192.168.1.57"} now = datetime.datetime.now() start = now - datetime.timedelta(hours=8, minutes=10) end = now - datetime.timedelta(hours=8) condition = {"time":{"$gte":start, "$lt":end}} ...
0e7d1df97590152781e364b3acea34e0bb42bc2a
tests/constants_test.py
tests/constants_test.py
import unittest from mtglib.constants import base_url, card_flags class DescribeConstants(unittest.TestCase): def should_have_base_url(self): url = ('http://gatherer.wizards.com/Pages/Search/Default.aspx' '?output=standard&') assert base_url == url def should_have_card_flags(self): assert card_flags == ['text', 'color', 'subtype', 'type', 'set', 'cmc', 'power', 'tough', 'rarity', 'name', 'block']
import unittest from mtglib.constants import base_url, card_flags class DescribeConstants(unittest.TestCase): def should_have_base_url(self): url = ('http://gatherer.wizards.com/Pages/Search/Default.aspx' '?output=standard&action=advanced&') assert base_url == url def should_have_card_flags(self): assert card_flags == ['text', 'color', 'subtype', 'type', 'set', 'cmc', 'power', 'tough', 'rarity', 'name', 'block']
Update base url in tests.
Update base url in tests.
Python
mit
chigby/mtg,chigby/mtg
import unittest from mtglib.constants import base_url, card_flags class DescribeConstants(unittest.TestCase): def should_have_base_url(self): url = ('http://gatherer.wizards.com/Pages/Search/Default.aspx' - '?output=standard&') + '?output=standard&action=advanced&') assert base_url == url def should_have_card_flags(self): assert card_flags == ['text', 'color', 'subtype', 'type', 'set', 'cmc', 'power', 'tough', 'rarity', 'name', 'block']
Update base url in tests.
## Code Before: import unittest from mtglib.constants import base_url, card_flags class DescribeConstants(unittest.TestCase): def should_have_base_url(self): url = ('http://gatherer.wizards.com/Pages/Search/Default.aspx' '?output=standard&') assert base_url == url def should_have_card_flags(self): assert card_flags == ['text', 'color', 'subtype', 'type', 'set', 'cmc', 'power', 'tough', 'rarity', 'name', 'block'] ## Instruction: Update base url in tests. ## Code After: import unittest from mtglib.constants import base_url, card_flags class DescribeConstants(unittest.TestCase): def should_have_base_url(self): url = ('http://gatherer.wizards.com/Pages/Search/Default.aspx' '?output=standard&action=advanced&') assert base_url == url def should_have_card_flags(self): assert card_flags == ['text', 'color', 'subtype', 'type', 'set', 'cmc', 'power', 'tough', 'rarity', 'name', 'block']
// ... existing code ... url = ('http://gatherer.wizards.com/Pages/Search/Default.aspx' '?output=standard&action=advanced&') assert base_url == url // ... rest of the code ...
0428522c8df724ce49a32686676b2c5345abfda9
sdklib/util/timetizer.py
sdklib/util/timetizer.py
import time import datetime def get_current_utc(time_format="%Y-%m-%d %H:%M:%S"): """ @return a string representation of the current time in UTC. """ return time.strftime(time_format, time.gmtime()) def today_strf(): t = datetime.date.today() return t.strftime("%d/%m/%Y") def tomorrow_strf(): t = datetime.date.today() + datetime.timedelta(days=1) return t.strftime("%d/%m/%Y") def yesterday_strf(): t = datetime.date.today() - datetime.timedelta(days=1) return t.strftime("%d/%m/%Y") def seconds_to_milliseconds_timestamp(seconds_timestamp): return int(round(seconds_timestamp * 1000)) def current_milliseconds_timestamp(): return seconds_to_milliseconds_timestamp(time.time()) def datetime_to_milliseconds_timestamp(datetime_obj): seconds_timestamp = time.mktime(datetime_obj.timetuple()) return seconds_to_milliseconds_timestamp(seconds_timestamp)
import time import datetime def get_current_utc(time_format="%Y-%m-%d %H:%M:%S"): """ @return a string representation of the current time in UTC. """ return time.strftime(time_format, time.gmtime()) def today_strf(format="%d/%m/%Y"): t = datetime.date.today() return t.strftime(format) def tomorrow_strf(format="%d/%m/%Y"): t = datetime.date.today() + datetime.timedelta(days=1) return t.strftime(format) def yesterday_strf(format="%d/%m/%Y"): t = datetime.date.today() - datetime.timedelta(days=1) return t.strftime(format) def seconds_to_milliseconds_timestamp(seconds_timestamp): return int(round(seconds_timestamp * 1000)) def current_milliseconds_timestamp(): return seconds_to_milliseconds_timestamp(time.time()) def datetime_to_milliseconds_timestamp(datetime_obj): seconds_timestamp = time.mktime(datetime_obj.timetuple()) return seconds_to_milliseconds_timestamp(seconds_timestamp)
Add format parameter to strf functions
Add format parameter to strf functions
Python
bsd-2-clause
ivanprjcts/sdklib,ivanprjcts/sdklib
import time import datetime def get_current_utc(time_format="%Y-%m-%d %H:%M:%S"): """ @return a string representation of the current time in UTC. """ return time.strftime(time_format, time.gmtime()) - def today_strf(): + def today_strf(format="%d/%m/%Y"): t = datetime.date.today() - return t.strftime("%d/%m/%Y") + return t.strftime(format) - def tomorrow_strf(): + def tomorrow_strf(format="%d/%m/%Y"): t = datetime.date.today() + datetime.timedelta(days=1) - return t.strftime("%d/%m/%Y") + return t.strftime(format) - def yesterday_strf(): + def yesterday_strf(format="%d/%m/%Y"): t = datetime.date.today() - datetime.timedelta(days=1) - return t.strftime("%d/%m/%Y") + return t.strftime(format) def seconds_to_milliseconds_timestamp(seconds_timestamp): return int(round(seconds_timestamp * 1000)) def current_milliseconds_timestamp(): return seconds_to_milliseconds_timestamp(time.time()) def datetime_to_milliseconds_timestamp(datetime_obj): seconds_timestamp = time.mktime(datetime_obj.timetuple()) return seconds_to_milliseconds_timestamp(seconds_timestamp)
Add format parameter to strf functions
## Code Before: import time import datetime def get_current_utc(time_format="%Y-%m-%d %H:%M:%S"): """ @return a string representation of the current time in UTC. """ return time.strftime(time_format, time.gmtime()) def today_strf(): t = datetime.date.today() return t.strftime("%d/%m/%Y") def tomorrow_strf(): t = datetime.date.today() + datetime.timedelta(days=1) return t.strftime("%d/%m/%Y") def yesterday_strf(): t = datetime.date.today() - datetime.timedelta(days=1) return t.strftime("%d/%m/%Y") def seconds_to_milliseconds_timestamp(seconds_timestamp): return int(round(seconds_timestamp * 1000)) def current_milliseconds_timestamp(): return seconds_to_milliseconds_timestamp(time.time()) def datetime_to_milliseconds_timestamp(datetime_obj): seconds_timestamp = time.mktime(datetime_obj.timetuple()) return seconds_to_milliseconds_timestamp(seconds_timestamp) ## Instruction: Add format parameter to strf functions ## Code After: import time import datetime def get_current_utc(time_format="%Y-%m-%d %H:%M:%S"): """ @return a string representation of the current time in UTC. """ return time.strftime(time_format, time.gmtime()) def today_strf(format="%d/%m/%Y"): t = datetime.date.today() return t.strftime(format) def tomorrow_strf(format="%d/%m/%Y"): t = datetime.date.today() + datetime.timedelta(days=1) return t.strftime(format) def yesterday_strf(format="%d/%m/%Y"): t = datetime.date.today() - datetime.timedelta(days=1) return t.strftime(format) def seconds_to_milliseconds_timestamp(seconds_timestamp): return int(round(seconds_timestamp * 1000)) def current_milliseconds_timestamp(): return seconds_to_milliseconds_timestamp(time.time()) def datetime_to_milliseconds_timestamp(datetime_obj): seconds_timestamp = time.mktime(datetime_obj.timetuple()) return seconds_to_milliseconds_timestamp(seconds_timestamp)
# ... existing code ... def today_strf(format="%d/%m/%Y"): t = datetime.date.today() return t.strftime(format) # ... modified code ... def tomorrow_strf(format="%d/%m/%Y"): t = datetime.date.today() + datetime.timedelta(days=1) return t.strftime(format) ... def yesterday_strf(format="%d/%m/%Y"): t = datetime.date.today() - datetime.timedelta(days=1) return t.strftime(format) # ... rest of the code ...
371be140dfbecff72d72cda580cd299badc6bc15
aws_list_all/client.py
aws_list_all/client.py
import boto3 _CLIENTS = {} def get_regions_for_service(service, regions=()): """Given a service name, return a list of region names where this service can have resources, restricted by a possible set of regions.""" if service == "s3": return ['us-east-1'] # s3 ListBuckets is a global request, so no region required. service_regions = boto3.Session().get_available_regions(service) if regions: # If regions were passed, return the intersecion. return [r for r in regions if r in service_regions] else: return service_regions def get_client(service, region=None): """Return (cached) boto3 clients for this service and this region""" if (service, region) not in _CLIENTS: _CLIENTS[(service, region)] = boto3.Session(region_name=region).client(service) return _CLIENTS[(service, region)]
import boto3 _CLIENTS = {} def get_regions_for_service(service, regions=()): """Given a service name, return a list of region names where this service can have resources, restricted by a possible set of regions.""" if service == "s3": return ['us-east-1'] # s3 ListBuckets is a global request, so no region required. if service == "route53": return ['us-east-1'] # route53 is a global service, but the endpoint is in us-east-1. service_regions = boto3.Session().get_available_regions(service) if regions: # If regions were passed, return the intersecion. return [r for r in regions if r in service_regions] else: return service_regions def get_client(service, region=None): """Return (cached) boto3 clients for this service and this region""" if (service, region) not in _CLIENTS: _CLIENTS[(service, region)] = boto3.Session(region_name=region).client(service) return _CLIENTS[(service, region)]
Use us-east-1 to query route53
Use us-east-1 to query route53 Route53 is a global service so doesn't belong to a region, but the API endpoint is in us-east-1. This makes various listings now work, but not record sets. Updates #4.
Python
mit
JohannesEbke/aws_list_all
import boto3 _CLIENTS = {} def get_regions_for_service(service, regions=()): """Given a service name, return a list of region names where this service can have resources, restricted by a possible set of regions.""" if service == "s3": return ['us-east-1'] # s3 ListBuckets is a global request, so no region required. + if service == "route53": + return ['us-east-1'] # route53 is a global service, but the endpoint is in us-east-1. service_regions = boto3.Session().get_available_regions(service) if regions: # If regions were passed, return the intersecion. return [r for r in regions if r in service_regions] else: return service_regions def get_client(service, region=None): """Return (cached) boto3 clients for this service and this region""" if (service, region) not in _CLIENTS: _CLIENTS[(service, region)] = boto3.Session(region_name=region).client(service) return _CLIENTS[(service, region)]
Use us-east-1 to query route53
## Code Before: import boto3 _CLIENTS = {} def get_regions_for_service(service, regions=()): """Given a service name, return a list of region names where this service can have resources, restricted by a possible set of regions.""" if service == "s3": return ['us-east-1'] # s3 ListBuckets is a global request, so no region required. service_regions = boto3.Session().get_available_regions(service) if regions: # If regions were passed, return the intersecion. return [r for r in regions if r in service_regions] else: return service_regions def get_client(service, region=None): """Return (cached) boto3 clients for this service and this region""" if (service, region) not in _CLIENTS: _CLIENTS[(service, region)] = boto3.Session(region_name=region).client(service) return _CLIENTS[(service, region)] ## Instruction: Use us-east-1 to query route53 ## Code After: import boto3 _CLIENTS = {} def get_regions_for_service(service, regions=()): """Given a service name, return a list of region names where this service can have resources, restricted by a possible set of regions.""" if service == "s3": return ['us-east-1'] # s3 ListBuckets is a global request, so no region required. if service == "route53": return ['us-east-1'] # route53 is a global service, but the endpoint is in us-east-1. service_regions = boto3.Session().get_available_regions(service) if regions: # If regions were passed, return the intersecion. return [r for r in regions if r in service_regions] else: return service_regions def get_client(service, region=None): """Return (cached) boto3 clients for this service and this region""" if (service, region) not in _CLIENTS: _CLIENTS[(service, region)] = boto3.Session(region_name=region).client(service) return _CLIENTS[(service, region)]
// ... existing code ... return ['us-east-1'] # s3 ListBuckets is a global request, so no region required. if service == "route53": return ['us-east-1'] # route53 is a global service, but the endpoint is in us-east-1. service_regions = boto3.Session().get_available_regions(service) // ... rest of the code ...
bf0b3cb27fa2b518fcc3f5116da0e4dbde25aae8
src/django_richenum/__init__.py
src/django_richenum/__init__.py
import forms # noqa import models # noqa __all__ = ( 'forms', 'models', ) __version__ = 'unknown' try: __version__ = __import__('pkg_resources').get_distribution('django_richenum').version except Exception as e: pass
__version__ = 'unknown' try: __version__ = __import__('pkg_resources').get_distribution('django_richenum').version except Exception as e: pass
Remove unnecessary import of form submodule
Remove unnecessary import of form submodule
Python
mit
hearsaycorp/django-richenum,dhui/django-richenum,hearsaycorp/django-richenum,asherf/django-richenum,adepue/django-richenum
- import forms # noqa - import models # noqa - - - __all__ = ( - 'forms', - 'models', - ) - - __version__ = 'unknown' try: __version__ = __import__('pkg_resources').get_distribution('django_richenum').version except Exception as e: pass
Remove unnecessary import of form submodule
## Code Before: import forms # noqa import models # noqa __all__ = ( 'forms', 'models', ) __version__ = 'unknown' try: __version__ = __import__('pkg_resources').get_distribution('django_richenum').version except Exception as e: pass ## Instruction: Remove unnecessary import of form submodule ## Code After: __version__ = 'unknown' try: __version__ = __import__('pkg_resources').get_distribution('django_richenum').version except Exception as e: pass
... __version__ = 'unknown' ...
db7df35458ac132bb84355df1cf2a5e329ca1d84
quickphotos/templatetags/quickphotos_tags.py
quickphotos/templatetags/quickphotos_tags.py
from django import template from quickphotos.models import Photo register = template.Library() @register.assignment_tag def get_latest_photos(user, limit=None): photos = Photo.objects.filter(user=user) if limit is not None: photos = photos[:limit] return photos
from django import template from quickphotos.models import Photo register = template.Library() @register.assignment_tag def get_latest_photos(*args, **kwargs): limit = kwargs.pop('limit', None) photos = Photo.objects.all() if args: photos = photos.filter(user__in=args) if limit is not None: photos = photos[:limit] return photos
Add support for multiple users photos
Add support for multiple users photos
Python
bsd-3-clause
blancltd/django-quick-photos,kmlebedev/mezzanine-instagram-quickphotos
from django import template from quickphotos.models import Photo register = template.Library() @register.assignment_tag - def get_latest_photos(user, limit=None): + def get_latest_photos(*args, **kwargs): + limit = kwargs.pop('limit', None) - photos = Photo.objects.filter(user=user) + photos = Photo.objects.all() + + if args: + photos = photos.filter(user__in=args) if limit is not None: photos = photos[:limit] return photos
Add support for multiple users photos
## Code Before: from django import template from quickphotos.models import Photo register = template.Library() @register.assignment_tag def get_latest_photos(user, limit=None): photos = Photo.objects.filter(user=user) if limit is not None: photos = photos[:limit] return photos ## Instruction: Add support for multiple users photos ## Code After: from django import template from quickphotos.models import Photo register = template.Library() @register.assignment_tag def get_latest_photos(*args, **kwargs): limit = kwargs.pop('limit', None) photos = Photo.objects.all() if args: photos = photos.filter(user__in=args) if limit is not None: photos = photos[:limit] return photos
# ... existing code ... @register.assignment_tag def get_latest_photos(*args, **kwargs): limit = kwargs.pop('limit', None) photos = Photo.objects.all() if args: photos = photos.filter(user__in=args) # ... rest of the code ...
f50ef6d331afa5a55467a104bc307edbdb2cd650
tests/test_auth.py
tests/test_auth.py
from __future__ import absolute_import, division, print_function import json from unittest import TestCase import httpretty from faker import Faker from polyaxon_schemas.user import UserConfig from polyaxon_client.auth import AuthClient faker = Faker() class TestAuthClient(TestCase): def setUp(self): self.client = AuthClient(host='http://localhost', version='v1', token=faker.uuid4()) @httpretty.activate def test_get_user(self): user = UserConfig('user', '[email protected]').to_dict() httpretty.register_uri( httpretty.GET, AuthClient._build_url( AuthClient.BASE_URL.format('http://localhost', 'v1'), AuthClient.ENDPOINT), body=json.dumps(user), content_type='application/json', status=200) user_result = self.client.get_user() assert user == user_result.to_dict()
from __future__ import absolute_import, division, print_function import json import uuid from unittest import TestCase import httpretty from faker import Faker from polyaxon_schemas.authentication import CredentialsConfig from polyaxon_schemas.user import UserConfig from polyaxon_client.auth import AuthClient faker = Faker() class TestAuthClient(TestCase): def setUp(self): self.client = AuthClient(host='http://localhost', version='v1', token=faker.uuid4()) self.base_url = AuthClient.BASE_URL.format('http://localhost', 'v1') @httpretty.activate def test_get_user(self): user = UserConfig('user', '[email protected]').to_dict() httpretty.register_uri( httpretty.GET, AuthClient._build_url( self.base_url, AuthClient.ENDPOINT), body=json.dumps(user), content_type='application/json', status=200) user_result = self.client.get_user() assert user == user_result.to_dict() @httpretty.activate def test_login(self): token = uuid.uuid4().hex httpretty.register_uri( httpretty.POST, AuthClient._build_url( self.base_url, AuthClient.ENDPOINT, 'token' ), body=json.dumps({'token': token}), content_type='application/json', status=200) credentials = CredentialsConfig('user', 'password') assert token == self.client.login(credentials=credentials)
Fix auth tests and add login test
Fix auth tests and add login test
Python
apache-2.0
polyaxon/polyaxon,polyaxon/polyaxon,polyaxon/polyaxon
from __future__ import absolute_import, division, print_function import json + import uuid from unittest import TestCase import httpretty from faker import Faker + from polyaxon_schemas.authentication import CredentialsConfig from polyaxon_schemas.user import UserConfig from polyaxon_client.auth import AuthClient faker = Faker() class TestAuthClient(TestCase): def setUp(self): self.client = AuthClient(host='http://localhost', version='v1', token=faker.uuid4()) + self.base_url = AuthClient.BASE_URL.format('http://localhost', 'v1') @httpretty.activate def test_get_user(self): user = UserConfig('user', '[email protected]').to_dict() httpretty.register_uri( httpretty.GET, AuthClient._build_url( - AuthClient.BASE_URL.format('http://localhost', 'v1'), + self.base_url, AuthClient.ENDPOINT), body=json.dumps(user), content_type='application/json', status=200) user_result = self.client.get_user() assert user == user_result.to_dict() + @httpretty.activate + def test_login(self): + token = uuid.uuid4().hex + httpretty.register_uri( + httpretty.POST, + AuthClient._build_url( + self.base_url, + AuthClient.ENDPOINT, + 'token' + ), + body=json.dumps({'token': token}), + content_type='application/json', status=200) + + credentials = CredentialsConfig('user', 'password') + assert token == self.client.login(credentials=credentials) +
Fix auth tests and add login test
## Code Before: from __future__ import absolute_import, division, print_function import json from unittest import TestCase import httpretty from faker import Faker from polyaxon_schemas.user import UserConfig from polyaxon_client.auth import AuthClient faker = Faker() class TestAuthClient(TestCase): def setUp(self): self.client = AuthClient(host='http://localhost', version='v1', token=faker.uuid4()) @httpretty.activate def test_get_user(self): user = UserConfig('user', '[email protected]').to_dict() httpretty.register_uri( httpretty.GET, AuthClient._build_url( AuthClient.BASE_URL.format('http://localhost', 'v1'), AuthClient.ENDPOINT), body=json.dumps(user), content_type='application/json', status=200) user_result = self.client.get_user() assert user == user_result.to_dict() ## Instruction: Fix auth tests and add login test ## Code After: from __future__ import absolute_import, division, print_function import json import uuid from unittest import TestCase import httpretty from faker import Faker from polyaxon_schemas.authentication import CredentialsConfig from polyaxon_schemas.user import UserConfig from polyaxon_client.auth import AuthClient faker = Faker() class TestAuthClient(TestCase): def setUp(self): self.client = AuthClient(host='http://localhost', version='v1', token=faker.uuid4()) self.base_url = AuthClient.BASE_URL.format('http://localhost', 'v1') @httpretty.activate def test_get_user(self): user = UserConfig('user', '[email protected]').to_dict() httpretty.register_uri( httpretty.GET, AuthClient._build_url( self.base_url, AuthClient.ENDPOINT), body=json.dumps(user), content_type='application/json', status=200) user_result = self.client.get_user() assert user == user_result.to_dict() @httpretty.activate def test_login(self): token = uuid.uuid4().hex httpretty.register_uri( httpretty.POST, AuthClient._build_url( self.base_url, AuthClient.ENDPOINT, 'token' ), body=json.dumps({'token': token}), content_type='application/json', status=200) credentials = CredentialsConfig('user', 'password') assert token == self.client.login(credentials=credentials)
... import json import uuid from unittest import TestCase ... from faker import Faker from polyaxon_schemas.authentication import CredentialsConfig ... self.client = AuthClient(host='http://localhost', version='v1', token=faker.uuid4()) self.base_url = AuthClient.BASE_URL.format('http://localhost', 'v1') ... AuthClient._build_url( self.base_url, AuthClient.ENDPOINT), ... assert user == user_result.to_dict() @httpretty.activate def test_login(self): token = uuid.uuid4().hex httpretty.register_uri( httpretty.POST, AuthClient._build_url( self.base_url, AuthClient.ENDPOINT, 'token' ), body=json.dumps({'token': token}), content_type='application/json', status=200) credentials = CredentialsConfig('user', 'password') assert token == self.client.login(credentials=credentials) ...
889eed552f4e17797764a9d9a2da6bbaa6d5dd33
admin_panel/views.py
admin_panel/views.py
from django.views import View from django.views.generic import TemplateView from django.contrib import auth from django.contrib import messages from django import http class LoginView(TemplateView): template_name = "admin/login.html" def post(self, request): username = request.POST['username'] password = request.POST['password'] user_object = auth.authenticate(request, username=username, password=password) if user_object is None: messages.error(request, "Invalid credentials") return self.get(request) auth.login(request, user_object) messages.success(request, "You've been logged in") return http.HttpResponseRedirect(self.get_next_url(request)) def get_next_url(self, request): if "next" in request.GET: return request.GET['next'] else: return "/administration/panel" class Panel(TemplateView): template_name = "admin/panel.html" class LogoutView(View): def get(self, request): auth.logout(request) return http.HttpResponseRedirect("/administration/login")
from django.views import View from django.views.generic import TemplateView from django.contrib import auth from django.contrib import messages from django import http from django.urls import reverse class LoginView(TemplateView): template_name = "admin/login.html" def post(self, request): username = request.POST['username'] password = request.POST['password'] user_object = auth.authenticate(request, username=username, password=password) if user_object is None: messages.error(request, "Invalid credentials") return self.get(request) auth.login(request, user_object) messages.success(request, "You've been logged in") return http.HttpResponseRedirect(self.get_next_url(request)) def get_next_url(self, request): if "next" in request.GET: return request.GET['next'] else: return reverse("admin:Panel") class Panel(TemplateView): template_name = "admin/panel.html" class LogoutView(View): def get(self, request): auth.logout(request) return http.HttpResponseRedirect("/administration/login")
Use django reverse function to obtain url instead of hard-coding
Use django reverse function to obtain url instead of hard-coding
Python
mpl-2.0
Apo11onian/Apollo-Blog,Apo11onian/Apollo-Blog,Apo11onian/Apollo-Blog
from django.views import View from django.views.generic import TemplateView from django.contrib import auth from django.contrib import messages from django import http + from django.urls import reverse class LoginView(TemplateView): template_name = "admin/login.html" def post(self, request): username = request.POST['username'] password = request.POST['password'] user_object = auth.authenticate(request, username=username, password=password) if user_object is None: messages.error(request, "Invalid credentials") return self.get(request) auth.login(request, user_object) messages.success(request, "You've been logged in") return http.HttpResponseRedirect(self.get_next_url(request)) def get_next_url(self, request): if "next" in request.GET: return request.GET['next'] else: - return "/administration/panel" + return reverse("admin:Panel") class Panel(TemplateView): template_name = "admin/panel.html" class LogoutView(View): def get(self, request): auth.logout(request) return http.HttpResponseRedirect("/administration/login")
Use django reverse function to obtain url instead of hard-coding
## Code Before: from django.views import View from django.views.generic import TemplateView from django.contrib import auth from django.contrib import messages from django import http class LoginView(TemplateView): template_name = "admin/login.html" def post(self, request): username = request.POST['username'] password = request.POST['password'] user_object = auth.authenticate(request, username=username, password=password) if user_object is None: messages.error(request, "Invalid credentials") return self.get(request) auth.login(request, user_object) messages.success(request, "You've been logged in") return http.HttpResponseRedirect(self.get_next_url(request)) def get_next_url(self, request): if "next" in request.GET: return request.GET['next'] else: return "/administration/panel" class Panel(TemplateView): template_name = "admin/panel.html" class LogoutView(View): def get(self, request): auth.logout(request) return http.HttpResponseRedirect("/administration/login") ## Instruction: Use django reverse function to obtain url instead of hard-coding ## Code After: from django.views import View from django.views.generic import TemplateView from django.contrib import auth from django.contrib import messages from django import http from django.urls import reverse class LoginView(TemplateView): template_name = "admin/login.html" def post(self, request): username = request.POST['username'] password = request.POST['password'] user_object = auth.authenticate(request, username=username, password=password) if user_object is None: messages.error(request, "Invalid credentials") return self.get(request) auth.login(request, user_object) messages.success(request, "You've been logged in") return http.HttpResponseRedirect(self.get_next_url(request)) def get_next_url(self, request): if "next" in request.GET: return request.GET['next'] else: return reverse("admin:Panel") class Panel(TemplateView): template_name = "admin/panel.html" class LogoutView(View): def get(self, request): auth.logout(request) return http.HttpResponseRedirect("/administration/login")
# ... existing code ... from django import http from django.urls import reverse # ... modified code ... else: return reverse("admin:Panel") # ... rest of the code ...
31d0cd541980ef6bf15d3a29b68cc0cc994c28a4
packs/st2cd/actions/kvstore.py
packs/st2cd/actions/kvstore.py
from st2actions.runners.pythonrunner import Action from st2client.client import Client from st2client.models.datastore import KeyValuePair class KVPAction(Action): def run(self, key, action, st2host='localhost', value=""): st2_endpoints = { 'action': "http://%s:9101" % st2host, 'reactor': "http://%s:9102" % st2host, 'datastore': "http://%s:9103" % st2host } try: client = Client(st2_endpoints) except Exception as e: return e if action == 'get': kvp = client.keys.get_by_name(key) if not kvp: raise Exception('Key error with %s.' % key) return kvp.value else: instance = KeyValuePair() instance.id = client.keys.get_by_name(key).name instance.name = key instance.value = value try: kvstore = getattr(client.keys, action) kvp = kvstore(instance) except Exception as e: raise if action == 'delete': return kvp else: return kvp.serialize()
from st2actions.runners.pythonrunner import Action from st2client.client import Client from st2client.models.datastore import KeyValuePair class KVPAction(Action): def run(self, key, action, st2host='localhost', value=""): st2_endpoints = { 'action': "http://%s:9101" % st2host, 'reactor': "http://%s:9102" % st2host, 'datastore': "http://%s:9103" % st2host } try: client = Client(st2_endpoints) except Exception as e: return e if action == 'get': kvp = client.keys.get_by_name(key) if not kvp: raise Exception('Key error with %s.' % key) return kvp.value else: instance = client.keys.get_by_name(key) or KeyValuePair() instance.id = key instance.name = key instance.value = value kvp = client.keys.update(instance) if action in ['create', 'update'] else None if action == 'delete': return kvp else: return kvp.serialize()
Fix create action for key value pair
Fix create action for key value pair
Python
apache-2.0
StackStorm/st2incubator,pinterb/st2incubator,pinterb/st2incubator,pinterb/st2incubator,StackStorm/st2incubator
from st2actions.runners.pythonrunner import Action from st2client.client import Client from st2client.models.datastore import KeyValuePair class KVPAction(Action): def run(self, key, action, st2host='localhost', value=""): st2_endpoints = { 'action': "http://%s:9101" % st2host, 'reactor': "http://%s:9102" % st2host, 'datastore': "http://%s:9103" % st2host } try: client = Client(st2_endpoints) except Exception as e: return e if action == 'get': kvp = client.keys.get_by_name(key) if not kvp: raise Exception('Key error with %s.' % key) return kvp.value else: - instance = KeyValuePair() - instance.id = client.keys.get_by_name(key).name + instance = client.keys.get_by_name(key) or KeyValuePair() + instance.id = key instance.name = key instance.value = value + kvp = client.keys.update(instance) if action in ['create', 'update'] else None - try: - kvstore = getattr(client.keys, action) - kvp = kvstore(instance) - except Exception as e: - raise if action == 'delete': return kvp else: return kvp.serialize()
Fix create action for key value pair
## Code Before: from st2actions.runners.pythonrunner import Action from st2client.client import Client from st2client.models.datastore import KeyValuePair class KVPAction(Action): def run(self, key, action, st2host='localhost', value=""): st2_endpoints = { 'action': "http://%s:9101" % st2host, 'reactor': "http://%s:9102" % st2host, 'datastore': "http://%s:9103" % st2host } try: client = Client(st2_endpoints) except Exception as e: return e if action == 'get': kvp = client.keys.get_by_name(key) if not kvp: raise Exception('Key error with %s.' % key) return kvp.value else: instance = KeyValuePair() instance.id = client.keys.get_by_name(key).name instance.name = key instance.value = value try: kvstore = getattr(client.keys, action) kvp = kvstore(instance) except Exception as e: raise if action == 'delete': return kvp else: return kvp.serialize() ## Instruction: Fix create action for key value pair ## Code After: from st2actions.runners.pythonrunner import Action from st2client.client import Client from st2client.models.datastore import KeyValuePair class KVPAction(Action): def run(self, key, action, st2host='localhost', value=""): st2_endpoints = { 'action': "http://%s:9101" % st2host, 'reactor': "http://%s:9102" % st2host, 'datastore': "http://%s:9103" % st2host } try: client = Client(st2_endpoints) except Exception as e: return e if action == 'get': kvp = client.keys.get_by_name(key) if not kvp: raise Exception('Key error with %s.' % key) return kvp.value else: instance = client.keys.get_by_name(key) or KeyValuePair() instance.id = key instance.name = key instance.value = value kvp = client.keys.update(instance) if action in ['create', 'update'] else None if action == 'delete': return kvp else: return kvp.serialize()
... else: instance = client.keys.get_by_name(key) or KeyValuePair() instance.id = key instance.name = key ... kvp = client.keys.update(instance) if action in ['create', 'update'] else None ...
3b768fdc642471446092a08446ec8f2ab08281c3
clean.py
clean.py
import GutterColor.settings as settings class Clean: """Clean up the cache and generated icons""" def __init__(self, view): pass
import GutterColor.settings as settings from os import walk, remove, path, listdir from shutil import rmtree from threading import Thread class Clean(Thread): """Clean up the cache and generated icons""" def __init__(self, files): Thread.__init__(self) self.files = files def run(self): self.remove_folders() self.remove_files() def folder_ids(self, name): """Return all the open folder ids""" name = name.split('/')[-1] return int(name) if not name == 'icons' else None def file_ids(self, name): """Return all file ids""" name = name.split('/')[-1] return int(name) if not name == 'icons' else None def remove_folders(self): """Remove all the icon folders which are not currently open""" # Get all the folder ids folders = list(filter(None, map(self.folder_ids, [x[0] for x in walk(settings.ICON_PATH)]))) # Delete the folders for folder in folders: if folder not in self.files: rmtree(path.join(settings.ICON_PATH, str(folder))) def remove_files(self): """Remove all the cached files which are not currently open""" files = [ f for f in listdir(settings.CACHE_PATH) if path.isfile(path.join(settings.CACHE_PATH,f)) ] for f in files: if f == '.keep': pass if int(f) not in self.files: remove(path.join(settings.CACHE_PATH, f))
Add Clean class to remove files/folders.
Add Clean class to remove files/folders.
Python
mit
ggordan/GutterColor,ggordan/GutterColor
import GutterColor.settings as settings + from os import walk, remove, path, listdir + from shutil import rmtree + from threading import Thread - class Clean: + class Clean(Thread): """Clean up the cache and generated icons""" - def __init__(self, view): + def __init__(self, files): - pass + Thread.__init__(self) + self.files = files + def run(self): + self.remove_folders() + self.remove_files() + + + def folder_ids(self, name): + """Return all the open folder ids""" + name = name.split('/')[-1] + return int(name) if not name == 'icons' else None + + + def file_ids(self, name): + """Return all file ids""" + name = name.split('/')[-1] + return int(name) if not name == 'icons' else None + + + def remove_folders(self): + """Remove all the icon folders which are not currently open""" + # Get all the folder ids + folders = list(filter(None, map(self.folder_ids, [x[0] for x in walk(settings.ICON_PATH)]))) + # Delete the folders + for folder in folders: + if folder not in self.files: + rmtree(path.join(settings.ICON_PATH, str(folder))) + + + def remove_files(self): + """Remove all the cached files which are not currently open""" + files = [ f for f in listdir(settings.CACHE_PATH) if path.isfile(path.join(settings.CACHE_PATH,f)) ] + for f in files: + if f == '.keep': pass + if int(f) not in self.files: + remove(path.join(settings.CACHE_PATH, f)) +
Add Clean class to remove files/folders.
## Code Before: import GutterColor.settings as settings class Clean: """Clean up the cache and generated icons""" def __init__(self, view): pass ## Instruction: Add Clean class to remove files/folders. ## Code After: import GutterColor.settings as settings from os import walk, remove, path, listdir from shutil import rmtree from threading import Thread class Clean(Thread): """Clean up the cache and generated icons""" def __init__(self, files): Thread.__init__(self) self.files = files def run(self): self.remove_folders() self.remove_files() def folder_ids(self, name): """Return all the open folder ids""" name = name.split('/')[-1] return int(name) if not name == 'icons' else None def file_ids(self, name): """Return all file ids""" name = name.split('/')[-1] return int(name) if not name == 'icons' else None def remove_folders(self): """Remove all the icon folders which are not currently open""" # Get all the folder ids folders = list(filter(None, map(self.folder_ids, [x[0] for x in walk(settings.ICON_PATH)]))) # Delete the folders for folder in folders: if folder not in self.files: rmtree(path.join(settings.ICON_PATH, str(folder))) def remove_files(self): """Remove all the cached files which are not currently open""" files = [ f for f in listdir(settings.CACHE_PATH) if path.isfile(path.join(settings.CACHE_PATH,f)) ] for f in files: if f == '.keep': pass if int(f) not in self.files: remove(path.join(settings.CACHE_PATH, f))
// ... existing code ... import GutterColor.settings as settings from os import walk, remove, path, listdir from shutil import rmtree from threading import Thread class Clean(Thread): """Clean up the cache and generated icons""" // ... modified code ... def __init__(self, files): Thread.__init__(self) self.files = files def run(self): self.remove_folders() self.remove_files() def folder_ids(self, name): """Return all the open folder ids""" name = name.split('/')[-1] return int(name) if not name == 'icons' else None def file_ids(self, name): """Return all file ids""" name = name.split('/')[-1] return int(name) if not name == 'icons' else None def remove_folders(self): """Remove all the icon folders which are not currently open""" # Get all the folder ids folders = list(filter(None, map(self.folder_ids, [x[0] for x in walk(settings.ICON_PATH)]))) # Delete the folders for folder in folders: if folder not in self.files: rmtree(path.join(settings.ICON_PATH, str(folder))) def remove_files(self): """Remove all the cached files which are not currently open""" files = [ f for f in listdir(settings.CACHE_PATH) if path.isfile(path.join(settings.CACHE_PATH,f)) ] for f in files: if f == '.keep': pass if int(f) not in self.files: remove(path.join(settings.CACHE_PATH, f)) // ... rest of the code ...
f735cd9f9cfdcfba54005151fee3deb7741282c3
show.py
show.py
import numpy as np import scipy as sp import matplotlib.pyplot as plt def sweep(x, sweep_time, fs): t = np.arange(0, sweep_time, 1 / fs) p = 20 * np.log10(abs(sp.fft(x))) f = np.linspace(0, fs / 2, len(p)) plt.figure(1) plt.subplot(211) plt.plot(t, x) plt.grid() plt.subplot(212) plt.plot(f, p) plt.xscale('log') plt.grid() plt.show()
import numpy as np import matplotlib.pyplot as plt def sweep(x, sweep_time, fs): plt.subplots_adjust(hspace=0.4) t = np.arange(0, sweep_time, 1 / fs) p = 20 * np.log10(abs(np.fft.rfft(x))) f = np.linspace(0, fs / 2, len(p)) plt.figure(1) plt.subplot(211) plt.plot(t, x) plt.grid() plt.xlabel('t / s') plt.ylabel('x(t)') plt.title('time domain') plt.subplot(212) plt.plot(f, p) plt.xscale('log') plt.grid() plt.xlabel('f / Hz') plt.ylabel('A / dB') plt.title('frequency domain') plt.show()
Add axis label and change import
Add axis label and change import
Python
mit
franzpl/sweep,spatialaudio/sweep
import numpy as np - import scipy as sp import matplotlib.pyplot as plt def sweep(x, sweep_time, fs): + plt.subplots_adjust(hspace=0.4) t = np.arange(0, sweep_time, 1 / fs) - p = 20 * np.log10(abs(sp.fft(x))) + p = 20 * np.log10(abs(np.fft.rfft(x))) f = np.linspace(0, fs / 2, len(p)) plt.figure(1) plt.subplot(211) plt.plot(t, x) plt.grid() + plt.xlabel('t / s') + plt.ylabel('x(t)') + plt.title('time domain') plt.subplot(212) plt.plot(f, p) plt.xscale('log') plt.grid() + plt.xlabel('f / Hz') + plt.ylabel('A / dB') + plt.title('frequency domain') plt.show()
Add axis label and change import
## Code Before: import numpy as np import scipy as sp import matplotlib.pyplot as plt def sweep(x, sweep_time, fs): t = np.arange(0, sweep_time, 1 / fs) p = 20 * np.log10(abs(sp.fft(x))) f = np.linspace(0, fs / 2, len(p)) plt.figure(1) plt.subplot(211) plt.plot(t, x) plt.grid() plt.subplot(212) plt.plot(f, p) plt.xscale('log') plt.grid() plt.show() ## Instruction: Add axis label and change import ## Code After: import numpy as np import matplotlib.pyplot as plt def sweep(x, sweep_time, fs): plt.subplots_adjust(hspace=0.4) t = np.arange(0, sweep_time, 1 / fs) p = 20 * np.log10(abs(np.fft.rfft(x))) f = np.linspace(0, fs / 2, len(p)) plt.figure(1) plt.subplot(211) plt.plot(t, x) plt.grid() plt.xlabel('t / s') plt.ylabel('x(t)') plt.title('time domain') plt.subplot(212) plt.plot(f, p) plt.xscale('log') plt.grid() plt.xlabel('f / Hz') plt.ylabel('A / dB') plt.title('frequency domain') plt.show()
# ... existing code ... import numpy as np import matplotlib.pyplot as plt # ... modified code ... plt.subplots_adjust(hspace=0.4) t = np.arange(0, sweep_time, 1 / fs) p = 20 * np.log10(abs(np.fft.rfft(x))) f = np.linspace(0, fs / 2, len(p)) ... plt.grid() plt.xlabel('t / s') plt.ylabel('x(t)') plt.title('time domain') plt.subplot(212) ... plt.grid() plt.xlabel('f / Hz') plt.ylabel('A / dB') plt.title('frequency domain') plt.show() # ... rest of the code ...
93cefdc2c309ed0b81fe4ec7d49c0c8bead783a9
lib/path_utils.py
lib/path_utils.py
"""Originally from funfactory (funfactory/path_utils.py) on a380a54""" import os from os.path import abspath, dirname def path(*a): return os.path.join(ROOT, *a) def import_mod_by_name(target): # stolen from mock :) components = target.split('.') import_path = components.pop(0) thing = __import__(import_path) for comp in components: import_path += ".%s" % comp thing = _dot_lookup(thing, comp, import_path) return thing def _dot_lookup(thing, comp, import_path): try: return getattr(thing, comp) except AttributeError: __import__(import_path) return getattr(thing, comp) ROOT = dirname(abspath(__name__))
"""Originally from funfactory (funfactory/path_utils.py) on a380a54""" import os from os.path import abspath, dirname def path(*a): return os.path.join(ROOT, *a) def import_mod_by_name(target): # stolen from mock :) components = target.split('.') import_path = components.pop(0) thing = __import__(import_path) for comp in components: import_path += ".%s" % comp thing = _dot_lookup(thing, comp, import_path) return thing def _dot_lookup(thing, comp, import_path): try: return getattr(thing, comp) except AttributeError: __import__(import_path) return getattr(thing, comp) ROOT = dirname(dirname(abspath(__file__)))
Use __file__ instead of __name__
Use __file__ instead of __name__
Python
bsd-3-clause
akeym/cyder,zeeman/cyder,murrown/cyder,murrown/cyder,OSU-Net/cyder,drkitty/cyder,OSU-Net/cyder,akeym/cyder,akeym/cyder,OSU-Net/cyder,akeym/cyder,drkitty/cyder,murrown/cyder,drkitty/cyder,zeeman/cyder,OSU-Net/cyder,zeeman/cyder,murrown/cyder,drkitty/cyder,zeeman/cyder
"""Originally from funfactory (funfactory/path_utils.py) on a380a54""" import os from os.path import abspath, dirname def path(*a): return os.path.join(ROOT, *a) def import_mod_by_name(target): # stolen from mock :) components = target.split('.') import_path = components.pop(0) thing = __import__(import_path) for comp in components: import_path += ".%s" % comp thing = _dot_lookup(thing, comp, import_path) return thing def _dot_lookup(thing, comp, import_path): try: return getattr(thing, comp) except AttributeError: __import__(import_path) return getattr(thing, comp) - ROOT = dirname(abspath(__name__)) + ROOT = dirname(dirname(abspath(__file__)))
Use __file__ instead of __name__
## Code Before: """Originally from funfactory (funfactory/path_utils.py) on a380a54""" import os from os.path import abspath, dirname def path(*a): return os.path.join(ROOT, *a) def import_mod_by_name(target): # stolen from mock :) components = target.split('.') import_path = components.pop(0) thing = __import__(import_path) for comp in components: import_path += ".%s" % comp thing = _dot_lookup(thing, comp, import_path) return thing def _dot_lookup(thing, comp, import_path): try: return getattr(thing, comp) except AttributeError: __import__(import_path) return getattr(thing, comp) ROOT = dirname(abspath(__name__)) ## Instruction: Use __file__ instead of __name__ ## Code After: """Originally from funfactory (funfactory/path_utils.py) on a380a54""" import os from os.path import abspath, dirname def path(*a): return os.path.join(ROOT, *a) def import_mod_by_name(target): # stolen from mock :) components = target.split('.') import_path = components.pop(0) thing = __import__(import_path) for comp in components: import_path += ".%s" % comp thing = _dot_lookup(thing, comp, import_path) return thing def _dot_lookup(thing, comp, import_path): try: return getattr(thing, comp) except AttributeError: __import__(import_path) return getattr(thing, comp) ROOT = dirname(dirname(abspath(__file__)))
... ROOT = dirname(dirname(abspath(__file__))) ...
b5fc8db375e7273fb3b7cbb2318f57f141e25045
src/commoner/profiles/models.py
src/commoner/profiles/models.py
import urlparse from django.db import models from django.db.models import permalink from django.core.urlresolvers import reverse from django.contrib.auth.models import User from commoner.util import getBaseURL class CommonerProfile(models.Model): user = models.ForeignKey(User, unique=True) nickname = models.CharField(max_length=255, blank=True) photo = models.ImageField(upload_to='p') homepage = models.URLField(max_length=255, blank=True) location = models.CharField(max_length=255, blank=True) story = models.TextField(blank=True) def __unicode__(self): if self.nickname: return u"%s (%s)" % (self.user.username, self.nickname) return self.user.username def display_name(self): return self.nickname or self.user.username def get_absolute_url(self, request=None): if request is None: return reverse('profile_view', args=(self.user.username, ) ) else: return urlparse.urljoin( getBaseURL(request), reverse('profile_view', args=(self.user.username, ) ) )
import urlparse from django.db import models from django.db.models import permalink from django.core.urlresolvers import reverse from django.contrib.auth.models import User from commoner.util import getBaseURL class CommonerProfile(models.Model): user = models.ForeignKey(User, unique=True) nickname = models.CharField(max_length=255, blank=True) photo = models.ImageField(upload_to='p', blank=True, null=True) homepage = models.URLField(max_length=255, blank=True) location = models.CharField(max_length=255, blank=True) story = models.TextField(blank=True) def __unicode__(self): if self.nickname: return u"%s (%s)" % (self.user.username, self.nickname) return self.user.username def display_name(self): return self.nickname or self.user.username def get_absolute_url(self, request=None): if request is None: return reverse('profile_view', args=(self.user.username, ) ) else: return urlparse.urljoin( getBaseURL(request), reverse('profile_view', args=(self.user.username, ) ) )
Allow the photo to be blank.
Allow the photo to be blank.
Python
agpl-3.0
cc-archive/commoner,cc-archive/commoner
import urlparse from django.db import models from django.db.models import permalink from django.core.urlresolvers import reverse from django.contrib.auth.models import User from commoner.util import getBaseURL class CommonerProfile(models.Model): user = models.ForeignKey(User, unique=True) nickname = models.CharField(max_length=255, blank=True) - photo = models.ImageField(upload_to='p') + photo = models.ImageField(upload_to='p', blank=True, null=True) homepage = models.URLField(max_length=255, blank=True) location = models.CharField(max_length=255, blank=True) story = models.TextField(blank=True) def __unicode__(self): if self.nickname: return u"%s (%s)" % (self.user.username, self.nickname) return self.user.username def display_name(self): return self.nickname or self.user.username def get_absolute_url(self, request=None): if request is None: return reverse('profile_view', args=(self.user.username, ) ) else: return urlparse.urljoin( getBaseURL(request), reverse('profile_view', args=(self.user.username, ) ) )
Allow the photo to be blank.
## Code Before: import urlparse from django.db import models from django.db.models import permalink from django.core.urlresolvers import reverse from django.contrib.auth.models import User from commoner.util import getBaseURL class CommonerProfile(models.Model): user = models.ForeignKey(User, unique=True) nickname = models.CharField(max_length=255, blank=True) photo = models.ImageField(upload_to='p') homepage = models.URLField(max_length=255, blank=True) location = models.CharField(max_length=255, blank=True) story = models.TextField(blank=True) def __unicode__(self): if self.nickname: return u"%s (%s)" % (self.user.username, self.nickname) return self.user.username def display_name(self): return self.nickname or self.user.username def get_absolute_url(self, request=None): if request is None: return reverse('profile_view', args=(self.user.username, ) ) else: return urlparse.urljoin( getBaseURL(request), reverse('profile_view', args=(self.user.username, ) ) ) ## Instruction: Allow the photo to be blank. ## Code After: import urlparse from django.db import models from django.db.models import permalink from django.core.urlresolvers import reverse from django.contrib.auth.models import User from commoner.util import getBaseURL class CommonerProfile(models.Model): user = models.ForeignKey(User, unique=True) nickname = models.CharField(max_length=255, blank=True) photo = models.ImageField(upload_to='p', blank=True, null=True) homepage = models.URLField(max_length=255, blank=True) location = models.CharField(max_length=255, blank=True) story = models.TextField(blank=True) def __unicode__(self): if self.nickname: return u"%s (%s)" % (self.user.username, self.nickname) return self.user.username def display_name(self): return self.nickname or self.user.username def get_absolute_url(self, request=None): if request is None: return reverse('profile_view', args=(self.user.username, ) ) else: return urlparse.urljoin( getBaseURL(request), reverse('profile_view', args=(self.user.username, ) ) )
// ... existing code ... nickname = models.CharField(max_length=255, blank=True) photo = models.ImageField(upload_to='p', blank=True, null=True) homepage = models.URLField(max_length=255, blank=True) // ... rest of the code ...
69d2620ee64d367331edcf0260c73034384aae8e
subprocrunner/retry.py
subprocrunner/retry.py
import time from random import uniform from typing import Callable, Optional class Retry: def __init__(self, total: int = 3, backoff_factor: float = 0.2, jitter: float = 0.2) -> None: self.total = total self.__backoff_factor = backoff_factor self.__jitter = jitter if self.total <= 0: raise ValueError("total must be greater than zero") if self.__backoff_factor <= 0: raise ValueError("backoff_factor must be greater than zero") if self.__jitter <= 0: raise ValueError("jitter must be greater than zero") def calc_backoff_time(self, attempt: int) -> float: sleep_duration = self.__backoff_factor * (2 ** max(0, attempt - 1)) sleep_duration += uniform(0.5 * self.__jitter, 1.5 * self.__jitter) return sleep_duration def sleep_before_retry( self, attempt: int, logging_method: Optional[Callable] = None, retry_target: Optional[str] = None, ) -> float: sleep_duration = self.calc_backoff_time(attempt) if logging_method: if retry_target: msg = "Retrying '{}' in ".format(retry_target) else: msg = "Retrying in " msg += "{:.2f} seconds ... (attempt={})".format(sleep_duration, attempt) logging_method(msg) time.sleep(sleep_duration) return sleep_duration
import time from random import uniform from typing import Callable, Optional class Retry: def __init__( self, total: int = 3, backoff_factor: float = 0.2, jitter: float = 0.2, quiet: bool = False ) -> None: self.total = total self.__backoff_factor = backoff_factor self.__jitter = jitter self.__quiet = quiet if self.total <= 0: raise ValueError("total must be greater than zero") if self.__backoff_factor <= 0: raise ValueError("backoff_factor must be greater than zero") if self.__jitter <= 0: raise ValueError("jitter must be greater than zero") def calc_backoff_time(self, attempt: int) -> float: sleep_duration = self.__backoff_factor * (2 ** max(0, attempt - 1)) sleep_duration += uniform(0.5 * self.__jitter, 1.5 * self.__jitter) return sleep_duration def sleep_before_retry( self, attempt: int, logging_method: Optional[Callable] = None, retry_target: Optional[str] = None, ) -> float: sleep_duration = self.calc_backoff_time(attempt) if logging_method and not self.__quiet: if retry_target: msg = "Retrying '{}' in ".format(retry_target) else: msg = "Retrying in " msg += "{:.2f} seconds ... (attempt={})".format(sleep_duration, attempt) logging_method(msg) time.sleep(sleep_duration) return sleep_duration
Add quiet mode support for Retry
Add quiet mode support for Retry
Python
mit
thombashi/subprocrunner,thombashi/subprocrunner
import time from random import uniform from typing import Callable, Optional class Retry: + def __init__( - def __init__(self, total: int = 3, backoff_factor: float = 0.2, jitter: float = 0.2) -> None: + self, total: int = 3, backoff_factor: float = 0.2, jitter: float = 0.2, quiet: bool = False + ) -> None: self.total = total self.__backoff_factor = backoff_factor self.__jitter = jitter + self.__quiet = quiet if self.total <= 0: raise ValueError("total must be greater than zero") if self.__backoff_factor <= 0: raise ValueError("backoff_factor must be greater than zero") if self.__jitter <= 0: raise ValueError("jitter must be greater than zero") def calc_backoff_time(self, attempt: int) -> float: sleep_duration = self.__backoff_factor * (2 ** max(0, attempt - 1)) sleep_duration += uniform(0.5 * self.__jitter, 1.5 * self.__jitter) return sleep_duration def sleep_before_retry( self, attempt: int, logging_method: Optional[Callable] = None, retry_target: Optional[str] = None, ) -> float: sleep_duration = self.calc_backoff_time(attempt) - if logging_method: + if logging_method and not self.__quiet: if retry_target: msg = "Retrying '{}' in ".format(retry_target) else: msg = "Retrying in " msg += "{:.2f} seconds ... (attempt={})".format(sleep_duration, attempt) logging_method(msg) time.sleep(sleep_duration) return sleep_duration
Add quiet mode support for Retry
## Code Before: import time from random import uniform from typing import Callable, Optional class Retry: def __init__(self, total: int = 3, backoff_factor: float = 0.2, jitter: float = 0.2) -> None: self.total = total self.__backoff_factor = backoff_factor self.__jitter = jitter if self.total <= 0: raise ValueError("total must be greater than zero") if self.__backoff_factor <= 0: raise ValueError("backoff_factor must be greater than zero") if self.__jitter <= 0: raise ValueError("jitter must be greater than zero") def calc_backoff_time(self, attempt: int) -> float: sleep_duration = self.__backoff_factor * (2 ** max(0, attempt - 1)) sleep_duration += uniform(0.5 * self.__jitter, 1.5 * self.__jitter) return sleep_duration def sleep_before_retry( self, attempt: int, logging_method: Optional[Callable] = None, retry_target: Optional[str] = None, ) -> float: sleep_duration = self.calc_backoff_time(attempt) if logging_method: if retry_target: msg = "Retrying '{}' in ".format(retry_target) else: msg = "Retrying in " msg += "{:.2f} seconds ... (attempt={})".format(sleep_duration, attempt) logging_method(msg) time.sleep(sleep_duration) return sleep_duration ## Instruction: Add quiet mode support for Retry ## Code After: import time from random import uniform from typing import Callable, Optional class Retry: def __init__( self, total: int = 3, backoff_factor: float = 0.2, jitter: float = 0.2, quiet: bool = False ) -> None: self.total = total self.__backoff_factor = backoff_factor self.__jitter = jitter self.__quiet = quiet if self.total <= 0: raise ValueError("total must be greater than zero") if self.__backoff_factor <= 0: raise ValueError("backoff_factor must be greater than zero") if self.__jitter <= 0: raise ValueError("jitter must be greater than zero") def calc_backoff_time(self, attempt: int) -> float: sleep_duration = self.__backoff_factor * (2 ** max(0, attempt - 1)) sleep_duration += uniform(0.5 * self.__jitter, 1.5 * self.__jitter) return sleep_duration def sleep_before_retry( self, attempt: int, logging_method: Optional[Callable] = None, retry_target: Optional[str] = None, ) -> float: sleep_duration = self.calc_backoff_time(attempt) if logging_method and not self.__quiet: if retry_target: msg = "Retrying '{}' in ".format(retry_target) else: msg = "Retrying in " msg += "{:.2f} seconds ... (attempt={})".format(sleep_duration, attempt) logging_method(msg) time.sleep(sleep_duration) return sleep_duration
// ... existing code ... class Retry: def __init__( self, total: int = 3, backoff_factor: float = 0.2, jitter: float = 0.2, quiet: bool = False ) -> None: self.total = total // ... modified code ... self.__jitter = jitter self.__quiet = quiet ... if logging_method and not self.__quiet: if retry_target: // ... rest of the code ...
72358efa2bf9ff45377ef8ab3478b9433c67c574
candidates/feeds.py
candidates/feeds.py
from django.contrib.syndication.views import Feed from django.core.urlresolvers import reverse from django.utils.feedgenerator import Atom1Feed from .models import LoggedAction class RecentChangesFeed(Feed): title = "YourNextMP recent changes" description = "Changes to YNMP candidates" link = "/feeds/changes.xml" feed_type = Atom1Feed def items(self): return LoggedAction.objects.order_by('-updated')[:50] def item_title(self, item): return "{0} - {1}".format( item.popit_person_id, item.action_type ) def item_description(self, item): description = """ {0} Updated by {1} at {2} """.format( item.source, item.ip_address, str(item.updated), ) return description def item_link(self, item): return reverse('person-view', args=[item.popit_person_id])
from django.contrib.syndication.views import Feed from django.core.urlresolvers import reverse from django.utils.feedgenerator import Atom1Feed from .models import LoggedAction class RecentChangesFeed(Feed): title = "YourNextMP recent changes" description = "Changes to YNMP candidates" link = "/feeds/changes.xml" feed_type = Atom1Feed def items(self): return LoggedAction.objects.order_by('-updated')[:50] def item_title(self, item): return "{0} - {1}".format( item.popit_person_id, item.action_type ) def item_description(self, item): description = """ {0} Updated at {1} """.format( item.source, str(item.updated), ) return description def item_link(self, item): return reverse('person-view', args=[item.popit_person_id])
Remove IP address from feed description
Remove IP address from feed description
Python
agpl-3.0
DemocracyClub/yournextrepresentative,neavouli/yournextrepresentative,YoQuieroSaber/yournextrepresentative,neavouli/yournextrepresentative,openstate/yournextrepresentative,mysociety/yournextmp-popit,mysociety/yournextrepresentative,openstate/yournextrepresentative,YoQuieroSaber/yournextrepresentative,YoQuieroSaber/yournextrepresentative,DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative,YoQuieroSaber/yournextrepresentative,datamade/yournextmp-popit,openstate/yournextrepresentative,neavouli/yournextrepresentative,openstate/yournextrepresentative,mysociety/yournextrepresentative,mysociety/yournextrepresentative,YoQuieroSaber/yournextrepresentative,datamade/yournextmp-popit,mysociety/yournextrepresentative,mysociety/yournextmp-popit,openstate/yournextrepresentative,mysociety/yournextmp-popit,datamade/yournextmp-popit,datamade/yournextmp-popit,mysociety/yournextmp-popit,neavouli/yournextrepresentative,neavouli/yournextrepresentative,mysociety/yournextmp-popit,datamade/yournextmp-popit,mysociety/yournextrepresentative
from django.contrib.syndication.views import Feed from django.core.urlresolvers import reverse from django.utils.feedgenerator import Atom1Feed from .models import LoggedAction class RecentChangesFeed(Feed): title = "YourNextMP recent changes" description = "Changes to YNMP candidates" link = "/feeds/changes.xml" feed_type = Atom1Feed def items(self): return LoggedAction.objects.order_by('-updated')[:50] def item_title(self, item): return "{0} - {1}".format( item.popit_person_id, item.action_type ) def item_description(self, item): description = """ {0} - Updated by {1} at {2} + Updated at {1} """.format( item.source, - item.ip_address, str(item.updated), ) return description def item_link(self, item): return reverse('person-view', args=[item.popit_person_id])
Remove IP address from feed description
## Code Before: from django.contrib.syndication.views import Feed from django.core.urlresolvers import reverse from django.utils.feedgenerator import Atom1Feed from .models import LoggedAction class RecentChangesFeed(Feed): title = "YourNextMP recent changes" description = "Changes to YNMP candidates" link = "/feeds/changes.xml" feed_type = Atom1Feed def items(self): return LoggedAction.objects.order_by('-updated')[:50] def item_title(self, item): return "{0} - {1}".format( item.popit_person_id, item.action_type ) def item_description(self, item): description = """ {0} Updated by {1} at {2} """.format( item.source, item.ip_address, str(item.updated), ) return description def item_link(self, item): return reverse('person-view', args=[item.popit_person_id]) ## Instruction: Remove IP address from feed description ## Code After: from django.contrib.syndication.views import Feed from django.core.urlresolvers import reverse from django.utils.feedgenerator import Atom1Feed from .models import LoggedAction class RecentChangesFeed(Feed): title = "YourNextMP recent changes" description = "Changes to YNMP candidates" link = "/feeds/changes.xml" feed_type = Atom1Feed def items(self): return LoggedAction.objects.order_by('-updated')[:50] def item_title(self, item): return "{0} - {1}".format( item.popit_person_id, item.action_type ) def item_description(self, item): description = """ {0} Updated at {1} """.format( item.source, str(item.updated), ) return description def item_link(self, item): return reverse('person-view', args=[item.popit_person_id])
// ... existing code ... Updated at {1} """.format( // ... modified code ... item.source, str(item.updated), // ... rest of the code ...
726370913332fd5e27bb04446b75ef59fb711a9c
broadgauge/main.py
broadgauge/main.py
import os import sys import web import yaml from . import default_settings def load_default_config(): # take all vars defined in default_config config = dict((k, v) for k, v in default_settings.__dict__.items() if not k.startswith("_")) web.config.update(config) def load_config_from_env(): keys = [ 'SITE_TITLE', 'GITHUB_CLIENT_ID', 'GITHUB_CLIENT_SECRET', 'SECRET_KEY', 'DATABASE_URL', 'ADMIN_USER', ] for k in keys: if k in os.environ: web.config[k.lower()] = os.environ[k] load_default_config() load_config_from_env() from . import webapp application = webapp.app.wsgifunc() # Heroku doesn't handle static files, use StaticMiddleware. application = web.httpserver.StaticMiddleware(application) def load_config_from_file(configfile): web.config.update(yaml.load(open(configfile))) def main(): if "--config" in sys.argv: index = sys.argv.index("--config") configfile = sys.argv[index+1] sys.argv = sys.argv[:index] + sys.argv[index+2:] load_config_from_file(configfile) webapp.app.run() if __name__ == '__main__': main()
import os import sys import web import yaml from . import default_settings def load_default_config(): # take all vars defined in default_config config = dict((k, v) for k, v in default_settings.__dict__.items() if not k.startswith("_")) web.config.update(config) def load_config_from_env(): keys = [ 'SITE_TITLE', 'GITHUB_CLIENT_ID', 'GITHUB_CLIENT_SECRET', 'SECRET_KEY', 'DATABASE_URL', 'ADMIN_USER', 'MAIL_SERVER', 'MAIL_USERNAME', 'MAIL_PASSWORD', 'MAIL_TLS', 'FROM_ADDRESS', ] for k in keys: if k in os.environ: web.config[k.lower()] = os.environ[k] load_default_config() load_config_from_env() from . import webapp application = webapp.app.wsgifunc() # Heroku doesn't handle static files, use StaticMiddleware. application = web.httpserver.StaticMiddleware(application) def load_config_from_file(configfile): web.config.update(yaml.load(open(configfile))) def main(): if "--config" in sys.argv: index = sys.argv.index("--config") configfile = sys.argv[index+1] sys.argv = sys.argv[:index] + sys.argv[index+2:] load_config_from_file(configfile) webapp.app.run() if __name__ == '__main__': main()
Read mail settings from config.
Read mail settings from config.
Python
bsd-3-clause
fsmk/fsmkschool,anandology/broadgauge
import os import sys import web import yaml from . import default_settings def load_default_config(): # take all vars defined in default_config config = dict((k, v) for k, v in default_settings.__dict__.items() if not k.startswith("_")) web.config.update(config) def load_config_from_env(): keys = [ 'SITE_TITLE', 'GITHUB_CLIENT_ID', 'GITHUB_CLIENT_SECRET', 'SECRET_KEY', 'DATABASE_URL', 'ADMIN_USER', + 'MAIL_SERVER', + 'MAIL_USERNAME', + 'MAIL_PASSWORD', + 'MAIL_TLS', + 'FROM_ADDRESS', ] for k in keys: if k in os.environ: web.config[k.lower()] = os.environ[k] load_default_config() load_config_from_env() from . import webapp application = webapp.app.wsgifunc() # Heroku doesn't handle static files, use StaticMiddleware. application = web.httpserver.StaticMiddleware(application) def load_config_from_file(configfile): web.config.update(yaml.load(open(configfile))) def main(): if "--config" in sys.argv: index = sys.argv.index("--config") configfile = sys.argv[index+1] sys.argv = sys.argv[:index] + sys.argv[index+2:] load_config_from_file(configfile) webapp.app.run() if __name__ == '__main__': main()
Read mail settings from config.
## Code Before: import os import sys import web import yaml from . import default_settings def load_default_config(): # take all vars defined in default_config config = dict((k, v) for k, v in default_settings.__dict__.items() if not k.startswith("_")) web.config.update(config) def load_config_from_env(): keys = [ 'SITE_TITLE', 'GITHUB_CLIENT_ID', 'GITHUB_CLIENT_SECRET', 'SECRET_KEY', 'DATABASE_URL', 'ADMIN_USER', ] for k in keys: if k in os.environ: web.config[k.lower()] = os.environ[k] load_default_config() load_config_from_env() from . import webapp application = webapp.app.wsgifunc() # Heroku doesn't handle static files, use StaticMiddleware. application = web.httpserver.StaticMiddleware(application) def load_config_from_file(configfile): web.config.update(yaml.load(open(configfile))) def main(): if "--config" in sys.argv: index = sys.argv.index("--config") configfile = sys.argv[index+1] sys.argv = sys.argv[:index] + sys.argv[index+2:] load_config_from_file(configfile) webapp.app.run() if __name__ == '__main__': main() ## Instruction: Read mail settings from config. ## Code After: import os import sys import web import yaml from . import default_settings def load_default_config(): # take all vars defined in default_config config = dict((k, v) for k, v in default_settings.__dict__.items() if not k.startswith("_")) web.config.update(config) def load_config_from_env(): keys = [ 'SITE_TITLE', 'GITHUB_CLIENT_ID', 'GITHUB_CLIENT_SECRET', 'SECRET_KEY', 'DATABASE_URL', 'ADMIN_USER', 'MAIL_SERVER', 'MAIL_USERNAME', 'MAIL_PASSWORD', 'MAIL_TLS', 'FROM_ADDRESS', ] for k in keys: if k in os.environ: web.config[k.lower()] = os.environ[k] load_default_config() load_config_from_env() from . import webapp application = webapp.app.wsgifunc() # Heroku doesn't handle static files, use StaticMiddleware. application = web.httpserver.StaticMiddleware(application) def load_config_from_file(configfile): web.config.update(yaml.load(open(configfile))) def main(): if "--config" in sys.argv: index = sys.argv.index("--config") configfile = sys.argv[index+1] sys.argv = sys.argv[:index] + sys.argv[index+2:] load_config_from_file(configfile) webapp.app.run() if __name__ == '__main__': main()
// ... existing code ... 'ADMIN_USER', 'MAIL_SERVER', 'MAIL_USERNAME', 'MAIL_PASSWORD', 'MAIL_TLS', 'FROM_ADDRESS', ] // ... rest of the code ...