commit
stringlengths
40
40
old_file
stringlengths
4
106
new_file
stringlengths
4
106
old_contents
stringlengths
10
2.94k
new_contents
stringlengths
21
2.95k
subject
stringlengths
16
444
message
stringlengths
17
2.63k
lang
stringclasses
1 value
license
stringclasses
13 values
repos
stringlengths
7
43k
ndiff
stringlengths
52
3.31k
instruction
stringlengths
16
444
content
stringlengths
133
4.32k
diff
stringlengths
49
3.61k
e1dc2c3e2515daf3aae51242221fab4fbd5c553f
aim/db/migration/alembic_migrations/versions/72fa5bce100b_tree_model.py
aim/db/migration/alembic_migrations/versions/72fa5bce100b_tree_model.py
# revision identifiers, used by Alembic. revision = '72fa5bce100b' down_revision = '40855b7eb958' branch_labels = None depends_on = None from alembic import op import sqlalchemy as sa def upgrade(): op.create_table( 'aim_tenant_trees', sa.Column('rn', sa.String(64), nullable=False), sa.Column('root_full_hash', sa.String(64), nullable=False), sa.Column('tree', sa.LargeBinary, nullable=False), sa.PrimaryKeyConstraint('rn')) def downgrade(): pass
# revision identifiers, used by Alembic. revision = '72fa5bce100b' down_revision = '40855b7eb958' branch_labels = None depends_on = None from alembic import op import sqlalchemy as sa def upgrade(): op.create_table( 'aim_tenant_trees', sa.Column('tenant_rn', sa.String(64), nullable=False), sa.Column('root_full_hash', sa.String(64), nullable=False), sa.Column('tree', sa.LargeBinary, nullable=False), sa.PrimaryKeyConstraint('tenant_rn')) def downgrade(): pass
Fix db migration for tree model
Fix db migration for tree model
Python
apache-2.0
noironetworks/aci-integration-module,noironetworks/aci-integration-module
# revision identifiers, used by Alembic. revision = '72fa5bce100b' down_revision = '40855b7eb958' branch_labels = None depends_on = None from alembic import op import sqlalchemy as sa def upgrade(): op.create_table( 'aim_tenant_trees', - sa.Column('rn', sa.String(64), nullable=False), + sa.Column('tenant_rn', sa.String(64), nullable=False), sa.Column('root_full_hash', sa.String(64), nullable=False), sa.Column('tree', sa.LargeBinary, nullable=False), - sa.PrimaryKeyConstraint('rn')) + sa.PrimaryKeyConstraint('tenant_rn')) def downgrade(): pass
Fix db migration for tree model
## Code Before: # revision identifiers, used by Alembic. revision = '72fa5bce100b' down_revision = '40855b7eb958' branch_labels = None depends_on = None from alembic import op import sqlalchemy as sa def upgrade(): op.create_table( 'aim_tenant_trees', sa.Column('rn', sa.String(64), nullable=False), sa.Column('root_full_hash', sa.String(64), nullable=False), sa.Column('tree', sa.LargeBinary, nullable=False), sa.PrimaryKeyConstraint('rn')) def downgrade(): pass ## Instruction: Fix db migration for tree model ## Code After: # revision identifiers, used by Alembic. revision = '72fa5bce100b' down_revision = '40855b7eb958' branch_labels = None depends_on = None from alembic import op import sqlalchemy as sa def upgrade(): op.create_table( 'aim_tenant_trees', sa.Column('tenant_rn', sa.String(64), nullable=False), sa.Column('root_full_hash', sa.String(64), nullable=False), sa.Column('tree', sa.LargeBinary, nullable=False), sa.PrimaryKeyConstraint('tenant_rn')) def downgrade(): pass
# revision identifiers, used by Alembic. revision = '72fa5bce100b' down_revision = '40855b7eb958' branch_labels = None depends_on = None from alembic import op import sqlalchemy as sa def upgrade(): op.create_table( 'aim_tenant_trees', - sa.Column('rn', sa.String(64), nullable=False), + sa.Column('tenant_rn', sa.String(64), nullable=False), ? +++++++ sa.Column('root_full_hash', sa.String(64), nullable=False), sa.Column('tree', sa.LargeBinary, nullable=False), - sa.PrimaryKeyConstraint('rn')) + sa.PrimaryKeyConstraint('tenant_rn')) ? +++++++ def downgrade(): pass
20d766de4d20355303b5b423dd30bf0cd4c8ee8e
createGlyphsPDF.py
createGlyphsPDF.py
from fontTools.pens.cocoaPen import CocoaPen # Some configuration page_format = 'A4' # See http://drawbot.readthedocs.org/content/canvas/pages.html#size for other size-values my_selection = CurrentFont() # May also be CurrentFont.selection or else class RegisterGlyph(object): def __init__(self, glyph): self.glyph = glyph print 'Registered glyph:', self.glyph.name self.proportion_ratio = self.getProportionRatio() def getProportionRatio(self): xMin, yMin, xMax, yMax = self.glyph.box self.w = xMax - xMin self.h = yMax - yMin ratio = self.w/self.h return ratio def drawGlyphOnNewPage(self): newPage(page_format) self._drawGlyph() def _drawGlyph(self): pen = CocoaPen(self.glyph.getParent()) self.glyph.draw(pen) drawPath(pen.path) for g in my_selection: if len(g) > 0: # Ignore whitespace glyphs glyph = RegisterGlyph(g) glyph.drawGlyphOnNewPage()
from fontTools.pens.cocoaPen import CocoaPen # Some configuration page_format = 'A4' # See http://drawbot.readthedocs.org/content/canvas/pages.html#size for other size-values margins = (50,50,50,50) # left, top, right, bottom my_selection = CurrentFont() # May also be CurrentFont.selection or else # Init size(page_format) class RegisterGlyph(object): def __init__(self, glyph): self.glyph = glyph #print 'Registered glyph:', self.glyph.name self.proportion_ratio = self.getProportionRatio() def getProportionRatio(self): xMin, yMin, xMax, yMax = self.glyph.box self.w = xMax - xMin self.h = yMax - yMin ratio = self.w/self.h return ratio def drawGlyphOnNewPage(self): newPage() print self._drawGlyph() def _drawGlyph(self): pen = CocoaPen(self.glyph.getParent()) self.glyph.draw(pen) drawPath(pen.path) for g in my_selection: if len(g) > 0: # Ignore whitespace glyphs glyph = RegisterGlyph(g) glyph.drawGlyphOnNewPage()
Set size nonce at the beginning of the script
Set size nonce at the beginning of the script
Python
mit
AlphabetType/DrawBot-Scripts
from fontTools.pens.cocoaPen import CocoaPen # Some configuration page_format = 'A4' # See http://drawbot.readthedocs.org/content/canvas/pages.html#size for other size-values + margins = (50,50,50,50) # left, top, right, bottom my_selection = CurrentFont() # May also be CurrentFont.selection or else + + # Init + size(page_format) class RegisterGlyph(object): def __init__(self, glyph): self.glyph = glyph - print 'Registered glyph:', self.glyph.name + #print 'Registered glyph:', self.glyph.name self.proportion_ratio = self.getProportionRatio() def getProportionRatio(self): xMin, yMin, xMax, yMax = self.glyph.box self.w = xMax - xMin self.h = yMax - yMin ratio = self.w/self.h return ratio def drawGlyphOnNewPage(self): - newPage(page_format) + newPage() + print self._drawGlyph() def _drawGlyph(self): pen = CocoaPen(self.glyph.getParent()) self.glyph.draw(pen) drawPath(pen.path) for g in my_selection: if len(g) > 0: # Ignore whitespace glyphs glyph = RegisterGlyph(g) glyph.drawGlyphOnNewPage()
Set size nonce at the beginning of the script
## Code Before: from fontTools.pens.cocoaPen import CocoaPen # Some configuration page_format = 'A4' # See http://drawbot.readthedocs.org/content/canvas/pages.html#size for other size-values my_selection = CurrentFont() # May also be CurrentFont.selection or else class RegisterGlyph(object): def __init__(self, glyph): self.glyph = glyph print 'Registered glyph:', self.glyph.name self.proportion_ratio = self.getProportionRatio() def getProportionRatio(self): xMin, yMin, xMax, yMax = self.glyph.box self.w = xMax - xMin self.h = yMax - yMin ratio = self.w/self.h return ratio def drawGlyphOnNewPage(self): newPage(page_format) self._drawGlyph() def _drawGlyph(self): pen = CocoaPen(self.glyph.getParent()) self.glyph.draw(pen) drawPath(pen.path) for g in my_selection: if len(g) > 0: # Ignore whitespace glyphs glyph = RegisterGlyph(g) glyph.drawGlyphOnNewPage() ## Instruction: Set size nonce at the beginning of the script ## Code After: from fontTools.pens.cocoaPen import CocoaPen # Some configuration page_format = 'A4' # See http://drawbot.readthedocs.org/content/canvas/pages.html#size for other size-values margins = (50,50,50,50) # left, top, right, bottom my_selection = CurrentFont() # May also be CurrentFont.selection or else # Init size(page_format) class RegisterGlyph(object): def __init__(self, glyph): self.glyph = glyph #print 'Registered glyph:', self.glyph.name self.proportion_ratio = self.getProportionRatio() def getProportionRatio(self): xMin, yMin, xMax, yMax = self.glyph.box self.w = xMax - xMin self.h = yMax - yMin ratio = self.w/self.h return ratio def drawGlyphOnNewPage(self): newPage() print self._drawGlyph() def _drawGlyph(self): pen = CocoaPen(self.glyph.getParent()) self.glyph.draw(pen) drawPath(pen.path) for g in my_selection: if len(g) > 0: # Ignore whitespace glyphs glyph = RegisterGlyph(g) glyph.drawGlyphOnNewPage()
from fontTools.pens.cocoaPen import CocoaPen # Some configuration page_format = 'A4' # See http://drawbot.readthedocs.org/content/canvas/pages.html#size for other size-values + margins = (50,50,50,50) # left, top, right, bottom my_selection = CurrentFont() # May also be CurrentFont.selection or else + + # Init + size(page_format) class RegisterGlyph(object): def __init__(self, glyph): self.glyph = glyph - print 'Registered glyph:', self.glyph.name + #print 'Registered glyph:', self.glyph.name ? + self.proportion_ratio = self.getProportionRatio() def getProportionRatio(self): xMin, yMin, xMax, yMax = self.glyph.box self.w = xMax - xMin self.h = yMax - yMin ratio = self.w/self.h return ratio def drawGlyphOnNewPage(self): - newPage(page_format) ? ----------- + newPage() + print self._drawGlyph() def _drawGlyph(self): pen = CocoaPen(self.glyph.getParent()) self.glyph.draw(pen) drawPath(pen.path) for g in my_selection: if len(g) > 0: # Ignore whitespace glyphs glyph = RegisterGlyph(g) glyph.drawGlyphOnNewPage()
eb4cda636a0b0ceb5312b161e97ae5f8376c9f8e
indra/tests/test_biolookup_client.py
indra/tests/test_biolookup_client.py
from indra.databases import biolookup_client def test_lookup_curie(): curie = 'pubchem.compound:40976' res = biolookup_client.lookup_curie(curie) assert res['name'] == '(17R)-13-ethyl-17-ethynyl-17-hydroxy-11-' \ 'methylidene-2,6,7,8,9,10,12,14,15,16-decahydro-1H-' \ 'cyclopenta[a]phenanthren-3-one', res def test_lookup(): res = biolookup_client.lookup('FPLX', 'ERK') assert res['name'] == 'ERK', res def test_get_name(): res = biolookup_client.get_name('CHEBI', 'CHEBI:408174') assert res == 'arformoterol', res
from indra.databases import biolookup_client def test_lookup_curie(): curie = 'pubchem.compound:40976' res = biolookup_client.lookup_curie(curie) assert res['name'] == '(17R)-13-ethyl-17-ethynyl-17-hydroxy-11-' \ 'methylidene-2,6,7,8,9,10,12,14,15,16-decahydro-1H-' \ 'cyclopenta[a]phenanthren-3-one', res def test_lookup(): res = biolookup_client.lookup('HGNC', '1097') assert res['name'] == 'BRAF', res def test_get_name(): res = biolookup_client.get_name('CHEBI', 'CHEBI:408174') assert res == 'arformoterol', res
Change biolookup test to work around service bug
Change biolookup test to work around service bug
Python
bsd-2-clause
johnbachman/indra,bgyori/indra,johnbachman/indra,bgyori/indra,bgyori/indra,sorgerlab/indra,johnbachman/indra,sorgerlab/indra,sorgerlab/indra
from indra.databases import biolookup_client def test_lookup_curie(): curie = 'pubchem.compound:40976' res = biolookup_client.lookup_curie(curie) assert res['name'] == '(17R)-13-ethyl-17-ethynyl-17-hydroxy-11-' \ 'methylidene-2,6,7,8,9,10,12,14,15,16-decahydro-1H-' \ 'cyclopenta[a]phenanthren-3-one', res def test_lookup(): - res = biolookup_client.lookup('FPLX', 'ERK') + res = biolookup_client.lookup('HGNC', '1097') - assert res['name'] == 'ERK', res + assert res['name'] == 'BRAF', res def test_get_name(): res = biolookup_client.get_name('CHEBI', 'CHEBI:408174') assert res == 'arformoterol', res
Change biolookup test to work around service bug
## Code Before: from indra.databases import biolookup_client def test_lookup_curie(): curie = 'pubchem.compound:40976' res = biolookup_client.lookup_curie(curie) assert res['name'] == '(17R)-13-ethyl-17-ethynyl-17-hydroxy-11-' \ 'methylidene-2,6,7,8,9,10,12,14,15,16-decahydro-1H-' \ 'cyclopenta[a]phenanthren-3-one', res def test_lookup(): res = biolookup_client.lookup('FPLX', 'ERK') assert res['name'] == 'ERK', res def test_get_name(): res = biolookup_client.get_name('CHEBI', 'CHEBI:408174') assert res == 'arformoterol', res ## Instruction: Change biolookup test to work around service bug ## Code After: from indra.databases import biolookup_client def test_lookup_curie(): curie = 'pubchem.compound:40976' res = biolookup_client.lookup_curie(curie) assert res['name'] == '(17R)-13-ethyl-17-ethynyl-17-hydroxy-11-' \ 'methylidene-2,6,7,8,9,10,12,14,15,16-decahydro-1H-' \ 'cyclopenta[a]phenanthren-3-one', res def test_lookup(): res = biolookup_client.lookup('HGNC', '1097') assert res['name'] == 'BRAF', res def test_get_name(): res = biolookup_client.get_name('CHEBI', 'CHEBI:408174') assert res == 'arformoterol', res
from indra.databases import biolookup_client def test_lookup_curie(): curie = 'pubchem.compound:40976' res = biolookup_client.lookup_curie(curie) assert res['name'] == '(17R)-13-ethyl-17-ethynyl-17-hydroxy-11-' \ 'methylidene-2,6,7,8,9,10,12,14,15,16-decahydro-1H-' \ 'cyclopenta[a]phenanthren-3-one', res def test_lookup(): - res = biolookup_client.lookup('FPLX', 'ERK') ? ^^^^ ^^^ + res = biolookup_client.lookup('HGNC', '1097') ? ^^^^ ^^^^ - assert res['name'] == 'ERK', res ? ^ ^ + assert res['name'] == 'BRAF', res ? ^ ^^ def test_get_name(): res = biolookup_client.get_name('CHEBI', 'CHEBI:408174') assert res == 'arformoterol', res
a2ee6106a6c98dae102cf14902c6b82f480e6cbe
python/main.py
python/main.py
import sys from enum import Enum class Furniture(Enum): bed = 1 couce = 2 desk = 3 chair = 4 tv = 5 table = 6 rug = 7 shelf = 8 f = open(sys.argv[1], 'r') print(f.read())
import sys from enum import Enum from furniture import * #class Furniture(Enum): # bed = 1 # couce = 2 # desk = 3 # chair = 4 # tv = 5 # table = 6 # rug = 7 # shelf = 8 f = open(sys.argv[1], 'r') print(f.read()) placeDesksAndChairs() placeCouchesTablesAndTv() placeBeds() placeShelves() placeRugs()
Add calls to furniture placement functions
Add calls to furniture placement functions
Python
apache-2.0
TheZoq2/VRHack,TheZoq2/VRHack,TheZoq2/VRHack,TheZoq2/VRHack
import sys from enum import Enum + from furniture import * + - class Furniture(Enum): + #class Furniture(Enum): - bed = 1 + # bed = 1 - couce = 2 + # couce = 2 - desk = 3 + # desk = 3 - chair = 4 + # chair = 4 - tv = 5 + # tv = 5 - table = 6 + # table = 6 - rug = 7 + # rug = 7 - shelf = 8 + # shelf = 8 f = open(sys.argv[1], 'r') print(f.read()) + placeDesksAndChairs() + placeCouchesTablesAndTv() + placeBeds() + placeShelves() + placeRugs() +
Add calls to furniture placement functions
## Code Before: import sys from enum import Enum class Furniture(Enum): bed = 1 couce = 2 desk = 3 chair = 4 tv = 5 table = 6 rug = 7 shelf = 8 f = open(sys.argv[1], 'r') print(f.read()) ## Instruction: Add calls to furniture placement functions ## Code After: import sys from enum import Enum from furniture import * #class Furniture(Enum): # bed = 1 # couce = 2 # desk = 3 # chair = 4 # tv = 5 # table = 6 # rug = 7 # shelf = 8 f = open(sys.argv[1], 'r') print(f.read()) placeDesksAndChairs() placeCouchesTablesAndTv() placeBeds() placeShelves() placeRugs()
import sys from enum import Enum + from furniture import * + - class Furniture(Enum): + #class Furniture(Enum): ? + - bed = 1 + # bed = 1 ? + - couce = 2 + # couce = 2 ? + - desk = 3 + # desk = 3 ? + - chair = 4 + # chair = 4 ? + - tv = 5 + # tv = 5 ? + - table = 6 + # table = 6 ? + - rug = 7 + # rug = 7 ? + - shelf = 8 + # shelf = 8 ? + f = open(sys.argv[1], 'r') print(f.read()) + + placeDesksAndChairs() + placeCouchesTablesAndTv() + placeBeds() + placeShelves() + placeRugs()
f48afc99a7e7aa076aa27b33deda824b5509bab2
test_qt_helpers_qt5.py
test_qt_helpers_qt5.py
from __future__ import absolute_import, division, print_function import os import sys import pytest from mock import MagicMock # At the moment it is not possible to have PyQt5 and PyQt4 installed # simultaneously because one requires the Qt4 libraries while the other # requires the Qt5 libraries class TestQT5(object): def setup_class(cls): print('-' * 72) os.environ['QT_API'] = 'pyqt5' import qt_helpers as qt def _load_qt5(self): import qt_helpers as qt def test_main_import_qt5(self): self._load_qt5() from qt_helpers import QtCore from qt_helpers import QtGui from PyQt5 import QtCore as core, QtGui as gui assert QtCore is core assert QtGui is gui def test_load_ui_qt5(self): self._load_qt5() from qt_helpers import load_ui, get_qapp qpp = get_qapp() load_ui('test.ui') def test_submodule_import_qt5(self): self._load_qt5() from qt_helpers.QtGui import QMessageBox from qt_helpers.QtCore import Qt from PyQt5.QtWidgets import QMessageBox as qmb from PyQt5.QtCore import Qt as _qt assert qmb is QMessageBox assert _qt is Qt def test_submodule_import_pyside(self): self._load_pyside() from qt_helpers.QtGui import QMessageBox from qt_helpers.QtCore import Qt from PySide.QtGui import QMessageBox as qmb from PySide.QtCore import Qt as _qt assert qmb is QMessageBox assert _qt is Qt
from __future__ import absolute_import, division, print_function import os import sys import pytest from mock import MagicMock # At the moment it is not possible to have PyQt5 and PyQt4 installed # simultaneously because one requires the Qt4 libraries while the other # requires the Qt5 libraries class TestQT5(object): def setup_class(cls): os.environ['QT_API'] = 'pyqt5' import qt_helpers as qt def _load_qt5(self): import qt_helpers as qt def test_main_import_qt5(self): self._load_qt5() from qt_helpers import QtCore from qt_helpers import QtGui from PyQt5 import QtCore as core, QtGui as gui assert QtCore is core assert QtGui is gui # At the moment, PyQt5 does not run correctly on Travis so we can't run # this without causing an Abort Trap. # def test_load_ui_qt5(self): # self._load_qt5() # from qt_helpers import load_ui, get_qapp # qpp = get_qapp() # load_ui('test.ui') def test_submodule_import_qt5(self): self._load_qt5() from qt_helpers.QtGui import QMessageBox from qt_helpers.QtCore import Qt from PyQt5.QtWidgets import QMessageBox as qmb from PyQt5.QtCore import Qt as _qt assert qmb is QMessageBox assert _qt is Qt
Comment out problematic test for now
Comment out problematic test for now
Python
bsd-3-clause
glue-viz/qt-helpers
from __future__ import absolute_import, division, print_function import os import sys import pytest from mock import MagicMock # At the moment it is not possible to have PyQt5 and PyQt4 installed - # simultaneously because one requires the Qt4 libraries while the other + # simultaneously because one requires the Qt4 libraries while the other # requires the Qt5 libraries class TestQT5(object): def setup_class(cls): - print('-' * 72) os.environ['QT_API'] = 'pyqt5' import qt_helpers as qt def _load_qt5(self): import qt_helpers as qt def test_main_import_qt5(self): self._load_qt5() + from qt_helpers import QtCore from qt_helpers import QtGui from PyQt5 import QtCore as core, QtGui as gui assert QtCore is core assert QtGui is gui + # At the moment, PyQt5 does not run correctly on Travis so we can't run + # this without causing an Abort Trap. - def test_load_ui_qt5(self): + # def test_load_ui_qt5(self): - self._load_qt5() + # self._load_qt5() - from qt_helpers import load_ui, get_qapp + # from qt_helpers import load_ui, get_qapp - qpp = get_qapp() + # qpp = get_qapp() - load_ui('test.ui') + # load_ui('test.ui') def test_submodule_import_qt5(self): self._load_qt5() from qt_helpers.QtGui import QMessageBox from qt_helpers.QtCore import Qt from PyQt5.QtWidgets import QMessageBox as qmb from PyQt5.QtCore import Qt as _qt assert qmb is QMessageBox assert _qt is Qt - def test_submodule_import_pyside(self): - - self._load_pyside() - - from qt_helpers.QtGui import QMessageBox - from qt_helpers.QtCore import Qt - - from PySide.QtGui import QMessageBox as qmb - from PySide.QtCore import Qt as _qt - assert qmb is QMessageBox - assert _qt is Qt - - -
Comment out problematic test for now
## Code Before: from __future__ import absolute_import, division, print_function import os import sys import pytest from mock import MagicMock # At the moment it is not possible to have PyQt5 and PyQt4 installed # simultaneously because one requires the Qt4 libraries while the other # requires the Qt5 libraries class TestQT5(object): def setup_class(cls): print('-' * 72) os.environ['QT_API'] = 'pyqt5' import qt_helpers as qt def _load_qt5(self): import qt_helpers as qt def test_main_import_qt5(self): self._load_qt5() from qt_helpers import QtCore from qt_helpers import QtGui from PyQt5 import QtCore as core, QtGui as gui assert QtCore is core assert QtGui is gui def test_load_ui_qt5(self): self._load_qt5() from qt_helpers import load_ui, get_qapp qpp = get_qapp() load_ui('test.ui') def test_submodule_import_qt5(self): self._load_qt5() from qt_helpers.QtGui import QMessageBox from qt_helpers.QtCore import Qt from PyQt5.QtWidgets import QMessageBox as qmb from PyQt5.QtCore import Qt as _qt assert qmb is QMessageBox assert _qt is Qt def test_submodule_import_pyside(self): self._load_pyside() from qt_helpers.QtGui import QMessageBox from qt_helpers.QtCore import Qt from PySide.QtGui import QMessageBox as qmb from PySide.QtCore import Qt as _qt assert qmb is QMessageBox assert _qt is Qt ## Instruction: Comment out problematic test for now ## Code After: from __future__ import absolute_import, division, print_function import os import sys import pytest from mock import MagicMock # At the moment it is not possible to have PyQt5 and PyQt4 installed # simultaneously because one requires the Qt4 libraries while the other # requires the Qt5 libraries class TestQT5(object): def setup_class(cls): os.environ['QT_API'] = 'pyqt5' import qt_helpers as qt def _load_qt5(self): import qt_helpers as qt def test_main_import_qt5(self): self._load_qt5() from qt_helpers import QtCore from qt_helpers import QtGui from PyQt5 import QtCore as core, QtGui as gui assert QtCore is core assert QtGui is gui # At the moment, PyQt5 does not run correctly on Travis so we can't run # this without causing an Abort Trap. # def test_load_ui_qt5(self): # self._load_qt5() # from qt_helpers import load_ui, get_qapp # qpp = get_qapp() # load_ui('test.ui') def test_submodule_import_qt5(self): self._load_qt5() from qt_helpers.QtGui import QMessageBox from qt_helpers.QtCore import Qt from PyQt5.QtWidgets import QMessageBox as qmb from PyQt5.QtCore import Qt as _qt assert qmb is QMessageBox assert _qt is Qt
from __future__ import absolute_import, division, print_function import os import sys import pytest from mock import MagicMock # At the moment it is not possible to have PyQt5 and PyQt4 installed - # simultaneously because one requires the Qt4 libraries while the other ? - + # simultaneously because one requires the Qt4 libraries while the other # requires the Qt5 libraries class TestQT5(object): def setup_class(cls): - print('-' * 72) os.environ['QT_API'] = 'pyqt5' import qt_helpers as qt def _load_qt5(self): import qt_helpers as qt def test_main_import_qt5(self): self._load_qt5() + from qt_helpers import QtCore from qt_helpers import QtGui from PyQt5 import QtCore as core, QtGui as gui assert QtCore is core assert QtGui is gui + # At the moment, PyQt5 does not run correctly on Travis so we can't run + # this without causing an Abort Trap. - def test_load_ui_qt5(self): + # def test_load_ui_qt5(self): ? ++ - self._load_qt5() + # self._load_qt5() ? ++ - from qt_helpers import load_ui, get_qapp + # from qt_helpers import load_ui, get_qapp ? ++ - qpp = get_qapp() + # qpp = get_qapp() ? ++ - load_ui('test.ui') + # load_ui('test.ui') ? ++ def test_submodule_import_qt5(self): self._load_qt5() from qt_helpers.QtGui import QMessageBox from qt_helpers.QtCore import Qt from PyQt5.QtWidgets import QMessageBox as qmb from PyQt5.QtCore import Qt as _qt assert qmb is QMessageBox assert _qt is Qt - - def test_submodule_import_pyside(self): - - self._load_pyside() - - from qt_helpers.QtGui import QMessageBox - from qt_helpers.QtCore import Qt - - from PySide.QtGui import QMessageBox as qmb - from PySide.QtCore import Qt as _qt - assert qmb is QMessageBox - assert _qt is Qt - -
5b8a39dc0fc5e383c30451a819c8e1542fd6f7ed
OpenSearchInNewTab.py
OpenSearchInNewTab.py
import re from threading import Timer import sublime_plugin import sublime DEFAULT_NAME = 'Find Results' ALT_NAME_BASE = 'Find Results ' class OpenSearchInNewTab(sublime_plugin.EventListener): # set a bit changed name # so the tab won't be bothered # during new search def on_activated(self, view): if self.is_search_view(view): t = Timer(.001, self.update_view, (view,)) t.start() # these hooks will help other plugins # to understand that we are in search results file def on_text_command(self, view, command_name, args): if self.is_search_view(view): self.update_view(view) def get_alt_name(self, view): first_line_coords = view.full_line(sublime.Region(0, 0)) first_line = view.substr(sublime.Region(*first_line_coords)) match = re.search('^Searching \d+ files for "(.*)"$', first_line) if match: query = match.group(1) return ALT_NAME_BASE + 'for "' + query + '"' return ALT_NAME_BASE def update_view(self, view): view.set_name(self.get_alt_name(view)) view.set_scratch(True) def is_search_view(self, view): name = view.name() return ( name == DEFAULT_NAME or name == ALT_NAME_BASE or name == self.get_alt_name(view) )
import re from threading import Timer import sublime_plugin import sublime DEFAULT_NAME = 'Find Results' ALT_NAME_BASE = 'Find Results ' class OpenSearchInNewTab(sublime_plugin.EventListener): # set a bit changed name # so the tab won't be bothered # during new search def on_activated(self, view): if self.is_search_view(view): t = Timer(.001, self.update_view, (view,)) t.start() # these hooks will help other plugins # to understand that we are in search results file def on_text_command(self, view, command_name, args): if self.is_search_view(view): self.update_view(view) def get_alt_name(self, view): first_line_coords = view.full_line(sublime.Region(0, 0)) first_line = view.substr(sublime.Region(*first_line_coords)) match = re.search('^Searching \d+ files for "(.*)"$', first_line) if match: query = match.group(1) return ALT_NAME_BASE + 'for "' + query + '"' return ALT_NAME_BASE def update_view(self, view): view.set_name(self.get_alt_name(view)) def is_search_view(self, view): name = view.name() return ( name == DEFAULT_NAME or name == ALT_NAME_BASE or name == self.get_alt_name(view) )
Remove unnecessary scratch file flag
Remove unnecessary scratch file flag
Python
mit
everyonesdesign/OpenSearchInNewTab
import re from threading import Timer import sublime_plugin import sublime DEFAULT_NAME = 'Find Results' ALT_NAME_BASE = 'Find Results ' class OpenSearchInNewTab(sublime_plugin.EventListener): # set a bit changed name # so the tab won't be bothered # during new search def on_activated(self, view): if self.is_search_view(view): t = Timer(.001, self.update_view, (view,)) t.start() # these hooks will help other plugins # to understand that we are in search results file def on_text_command(self, view, command_name, args): if self.is_search_view(view): self.update_view(view) def get_alt_name(self, view): first_line_coords = view.full_line(sublime.Region(0, 0)) first_line = view.substr(sublime.Region(*first_line_coords)) match = re.search('^Searching \d+ files for "(.*)"$', first_line) if match: query = match.group(1) return ALT_NAME_BASE + 'for "' + query + '"' return ALT_NAME_BASE def update_view(self, view): view.set_name(self.get_alt_name(view)) - view.set_scratch(True) def is_search_view(self, view): name = view.name() return ( name == DEFAULT_NAME or name == ALT_NAME_BASE or name == self.get_alt_name(view) )
Remove unnecessary scratch file flag
## Code Before: import re from threading import Timer import sublime_plugin import sublime DEFAULT_NAME = 'Find Results' ALT_NAME_BASE = 'Find Results ' class OpenSearchInNewTab(sublime_plugin.EventListener): # set a bit changed name # so the tab won't be bothered # during new search def on_activated(self, view): if self.is_search_view(view): t = Timer(.001, self.update_view, (view,)) t.start() # these hooks will help other plugins # to understand that we are in search results file def on_text_command(self, view, command_name, args): if self.is_search_view(view): self.update_view(view) def get_alt_name(self, view): first_line_coords = view.full_line(sublime.Region(0, 0)) first_line = view.substr(sublime.Region(*first_line_coords)) match = re.search('^Searching \d+ files for "(.*)"$', first_line) if match: query = match.group(1) return ALT_NAME_BASE + 'for "' + query + '"' return ALT_NAME_BASE def update_view(self, view): view.set_name(self.get_alt_name(view)) view.set_scratch(True) def is_search_view(self, view): name = view.name() return ( name == DEFAULT_NAME or name == ALT_NAME_BASE or name == self.get_alt_name(view) ) ## Instruction: Remove unnecessary scratch file flag ## Code After: import re from threading import Timer import sublime_plugin import sublime DEFAULT_NAME = 'Find Results' ALT_NAME_BASE = 'Find Results ' class OpenSearchInNewTab(sublime_plugin.EventListener): # set a bit changed name # so the tab won't be bothered # during new search def on_activated(self, view): if self.is_search_view(view): t = Timer(.001, self.update_view, (view,)) t.start() # these hooks will help other plugins # to understand that we are in search results file def on_text_command(self, view, command_name, args): if self.is_search_view(view): self.update_view(view) def get_alt_name(self, view): first_line_coords = view.full_line(sublime.Region(0, 0)) first_line = view.substr(sublime.Region(*first_line_coords)) match = re.search('^Searching \d+ files for "(.*)"$', first_line) if match: query = match.group(1) return ALT_NAME_BASE + 'for "' + query + '"' return ALT_NAME_BASE def update_view(self, view): view.set_name(self.get_alt_name(view)) def is_search_view(self, view): name = view.name() return ( name == DEFAULT_NAME or name == ALT_NAME_BASE or name == self.get_alt_name(view) )
import re from threading import Timer import sublime_plugin import sublime DEFAULT_NAME = 'Find Results' ALT_NAME_BASE = 'Find Results ' class OpenSearchInNewTab(sublime_plugin.EventListener): # set a bit changed name # so the tab won't be bothered # during new search def on_activated(self, view): if self.is_search_view(view): t = Timer(.001, self.update_view, (view,)) t.start() # these hooks will help other plugins # to understand that we are in search results file def on_text_command(self, view, command_name, args): if self.is_search_view(view): self.update_view(view) def get_alt_name(self, view): first_line_coords = view.full_line(sublime.Region(0, 0)) first_line = view.substr(sublime.Region(*first_line_coords)) match = re.search('^Searching \d+ files for "(.*)"$', first_line) if match: query = match.group(1) return ALT_NAME_BASE + 'for "' + query + '"' return ALT_NAME_BASE def update_view(self, view): view.set_name(self.get_alt_name(view)) - view.set_scratch(True) def is_search_view(self, view): name = view.name() return ( name == DEFAULT_NAME or name == ALT_NAME_BASE or name == self.get_alt_name(view) )
f42ba1bebb0e7f92222d8a66f94e2550b4dde9e1
helpers/custom_filters.py
helpers/custom_filters.py
import json def strslice(s, length): if not isinstance(s, basestring): s = str(s) return s[:length] def urlencode(s): if isinstance(s, unicode): s = s.encode('utf-8') import urllib return urllib.quote(s) def json_filter(data): return json.dumps(data) def datetimeformat(value, format='%H:%M / %d-%m-%Y'): return value.strftime(format) filters = { 'strslice': strslice, 'urlencode': urlencode, 'json': json_filter, 'datetime': datetimeformat, }
import json def strslice(s, length): if not isinstance(s, basestring): s = str(s) return s[:length] def urlencode(s): if isinstance(s, unicode): s = s.encode('utf-8') import urllib return urllib.quote(s) def json_filter(data): return json.dumps(data) def datetimeformat(value, format='%H:%M / %d-%m-%Y'): return value.strftime(format) def neat_time(dt): """Return the time in dt as a neat string. Examples: >>> neat_time(time(7, 30)) 7:30AM >>> neat_time(time(14, 00)) 2PM """ if dt.minute: timestring = dt.strftime('%I:%M%p') else: timestring = dt.strftime('%I%p') if timestring[0] == '0': timestring = timestring[1:] return timestring filters = { 'strslice': strslice, 'urlencode': urlencode, 'json': json_filter, 'datetime': datetimeformat, 'neattime': neat_time, }
Add a neattime custom filter to give me pretty times.
Add a neattime custom filter to give me pretty times.
Python
agpl-3.0
watchcat/cbu-rotterdam,codeforamerica/Change-By-Us,watchcat/cbu-rotterdam,watchcat/cbu-rotterdam,localprojects/Change-By-Us,watchcat/cbu-rotterdam,localprojects/Change-By-Us,codeforeurope/Change-By-Us,codeforeurope/Change-By-Us,watchcat/cbu-rotterdam,codeforeurope/Change-By-Us,codeforamerica/Change-By-Us,codeforeurope/Change-By-Us,codeforamerica/Change-By-Us,localprojects/Change-By-Us,localprojects/Change-By-Us,codeforamerica/Change-By-Us
import json def strslice(s, length): if not isinstance(s, basestring): s = str(s) return s[:length] def urlencode(s): if isinstance(s, unicode): s = s.encode('utf-8') import urllib return urllib.quote(s) def json_filter(data): return json.dumps(data) def datetimeformat(value, format='%H:%M / %d-%m-%Y'): return value.strftime(format) + def neat_time(dt): + """Return the time in dt as a neat string. + + Examples: + + >>> neat_time(time(7, 30)) + 7:30AM + >>> neat_time(time(14, 00)) + 2PM + + """ + if dt.minute: + timestring = dt.strftime('%I:%M%p') + else: + timestring = dt.strftime('%I%p') + + if timestring[0] == '0': + timestring = timestring[1:] + + return timestring + filters = { 'strslice': strslice, 'urlencode': urlencode, 'json': json_filter, 'datetime': datetimeformat, + 'neattime': neat_time, }
Add a neattime custom filter to give me pretty times.
## Code Before: import json def strslice(s, length): if not isinstance(s, basestring): s = str(s) return s[:length] def urlencode(s): if isinstance(s, unicode): s = s.encode('utf-8') import urllib return urllib.quote(s) def json_filter(data): return json.dumps(data) def datetimeformat(value, format='%H:%M / %d-%m-%Y'): return value.strftime(format) filters = { 'strslice': strslice, 'urlencode': urlencode, 'json': json_filter, 'datetime': datetimeformat, } ## Instruction: Add a neattime custom filter to give me pretty times. ## Code After: import json def strslice(s, length): if not isinstance(s, basestring): s = str(s) return s[:length] def urlencode(s): if isinstance(s, unicode): s = s.encode('utf-8') import urllib return urllib.quote(s) def json_filter(data): return json.dumps(data) def datetimeformat(value, format='%H:%M / %d-%m-%Y'): return value.strftime(format) def neat_time(dt): """Return the time in dt as a neat string. Examples: >>> neat_time(time(7, 30)) 7:30AM >>> neat_time(time(14, 00)) 2PM """ if dt.minute: timestring = dt.strftime('%I:%M%p') else: timestring = dt.strftime('%I%p') if timestring[0] == '0': timestring = timestring[1:] return timestring filters = { 'strslice': strslice, 'urlencode': urlencode, 'json': json_filter, 'datetime': datetimeformat, 'neattime': neat_time, }
import json def strslice(s, length): if not isinstance(s, basestring): s = str(s) return s[:length] def urlencode(s): if isinstance(s, unicode): s = s.encode('utf-8') import urllib return urllib.quote(s) def json_filter(data): return json.dumps(data) def datetimeformat(value, format='%H:%M / %d-%m-%Y'): return value.strftime(format) + def neat_time(dt): + """Return the time in dt as a neat string. + + Examples: + + >>> neat_time(time(7, 30)) + 7:30AM + >>> neat_time(time(14, 00)) + 2PM + + """ + if dt.minute: + timestring = dt.strftime('%I:%M%p') + else: + timestring = dt.strftime('%I%p') + + if timestring[0] == '0': + timestring = timestring[1:] + + return timestring + filters = { 'strslice': strslice, 'urlencode': urlencode, 'json': json_filter, 'datetime': datetimeformat, + 'neattime': neat_time, }
fa5bb37159d09c5bff53b83a4821e3f154892d1d
numba/cuda/device_init.py
numba/cuda/device_init.py
from __future__ import print_function, absolute_import, division # Re export from .stubs import (threadIdx, blockIdx, blockDim, gridDim, syncthreads, shared, local, const, grid, gridsize, atomic, threadfence_block, threadfence_system, threadfence) from .cudadrv.error import CudaSupportError from .cudadrv import nvvm from . import initialize from .errors import KernelRuntimeError from .decorators import jit, autojit, declare_device from .api import * from .api import _auto_device from .kernels import reduction reduce = Reduce = reduction.Reduce def is_available(): """Returns a boolean to indicate the availability of a CUDA GPU. This will initialize the driver if it hasn't been initialized. """ return driver.driver.is_available and nvvm.is_available() def cuda_error(): """Returns None or an exception if the CUDA driver fails to initialize. """ return driver.driver.initialization_error initialize.initialize_all()
from __future__ import print_function, absolute_import, division # Re export from .stubs import (threadIdx, blockIdx, blockDim, gridDim, syncthreads, shared, local, const, grid, gridsize, atomic, threadfence_block, threadfence_system, threadfence) from .cudadrv.error import CudaSupportError from .cudadrv import nvvm from . import initialize from .errors import KernelRuntimeError from .decorators import jit, autojit, declare_device from .api import * from .api import _auto_device from .kernels import reduction reduce = Reduce = reduction.Reduce def is_available(): """Returns a boolean to indicate the availability of a CUDA GPU. This will initialize the driver if it hasn't been initialized. """ # whilst `driver.is_available` will init the driver itself, # the driver initialization may raise and as a result break # test discovery/orchestration as `cuda.is_available` is often # used as a guard for whether to run a CUDA test, the try/except # below is to handle this case. driver_is_available = False try: driver_is_available = driver.driver.is_available except CudaSupportError: pass return driver_is_available and nvvm.is_available() def cuda_error(): """Returns None or an exception if the CUDA driver fails to initialize. """ return driver.driver.initialization_error initialize.initialize_all()
Fix issue with test discovery and broken CUDA drivers.
Fix issue with test discovery and broken CUDA drivers. This patch allows the test discovery mechanism to work even in the case of a broken/misconfigured CUDA driver. Fixes #2841
Python
bsd-2-clause
sklam/numba,cpcloud/numba,sklam/numba,numba/numba,seibert/numba,jriehl/numba,numba/numba,stuartarchibald/numba,jriehl/numba,IntelLabs/numba,cpcloud/numba,stuartarchibald/numba,gmarkall/numba,IntelLabs/numba,jriehl/numba,seibert/numba,numba/numba,jriehl/numba,gmarkall/numba,numba/numba,sklam/numba,seibert/numba,stuartarchibald/numba,IntelLabs/numba,sklam/numba,IntelLabs/numba,jriehl/numba,stonebig/numba,cpcloud/numba,IntelLabs/numba,stonebig/numba,stonebig/numba,gmarkall/numba,stonebig/numba,seibert/numba,cpcloud/numba,stuartarchibald/numba,gmarkall/numba,sklam/numba,seibert/numba,stonebig/numba,gmarkall/numba,numba/numba,stuartarchibald/numba,cpcloud/numba
from __future__ import print_function, absolute_import, division # Re export from .stubs import (threadIdx, blockIdx, blockDim, gridDim, syncthreads, shared, local, const, grid, gridsize, atomic, threadfence_block, threadfence_system, threadfence) from .cudadrv.error import CudaSupportError from .cudadrv import nvvm from . import initialize from .errors import KernelRuntimeError from .decorators import jit, autojit, declare_device from .api import * from .api import _auto_device from .kernels import reduction reduce = Reduce = reduction.Reduce def is_available(): """Returns a boolean to indicate the availability of a CUDA GPU. This will initialize the driver if it hasn't been initialized. """ - return driver.driver.is_available and nvvm.is_available() + # whilst `driver.is_available` will init the driver itself, + # the driver initialization may raise and as a result break + # test discovery/orchestration as `cuda.is_available` is often + # used as a guard for whether to run a CUDA test, the try/except + # below is to handle this case. + driver_is_available = False + try: + driver_is_available = driver.driver.is_available + except CudaSupportError: + pass + return driver_is_available and nvvm.is_available() def cuda_error(): """Returns None or an exception if the CUDA driver fails to initialize. """ return driver.driver.initialization_error initialize.initialize_all()
Fix issue with test discovery and broken CUDA drivers.
## Code Before: from __future__ import print_function, absolute_import, division # Re export from .stubs import (threadIdx, blockIdx, blockDim, gridDim, syncthreads, shared, local, const, grid, gridsize, atomic, threadfence_block, threadfence_system, threadfence) from .cudadrv.error import CudaSupportError from .cudadrv import nvvm from . import initialize from .errors import KernelRuntimeError from .decorators import jit, autojit, declare_device from .api import * from .api import _auto_device from .kernels import reduction reduce = Reduce = reduction.Reduce def is_available(): """Returns a boolean to indicate the availability of a CUDA GPU. This will initialize the driver if it hasn't been initialized. """ return driver.driver.is_available and nvvm.is_available() def cuda_error(): """Returns None or an exception if the CUDA driver fails to initialize. """ return driver.driver.initialization_error initialize.initialize_all() ## Instruction: Fix issue with test discovery and broken CUDA drivers. ## Code After: from __future__ import print_function, absolute_import, division # Re export from .stubs import (threadIdx, blockIdx, blockDim, gridDim, syncthreads, shared, local, const, grid, gridsize, atomic, threadfence_block, threadfence_system, threadfence) from .cudadrv.error import CudaSupportError from .cudadrv import nvvm from . import initialize from .errors import KernelRuntimeError from .decorators import jit, autojit, declare_device from .api import * from .api import _auto_device from .kernels import reduction reduce = Reduce = reduction.Reduce def is_available(): """Returns a boolean to indicate the availability of a CUDA GPU. This will initialize the driver if it hasn't been initialized. """ # whilst `driver.is_available` will init the driver itself, # the driver initialization may raise and as a result break # test discovery/orchestration as `cuda.is_available` is often # used as a guard for whether to run a CUDA test, the try/except # below is to handle this case. driver_is_available = False try: driver_is_available = driver.driver.is_available except CudaSupportError: pass return driver_is_available and nvvm.is_available() def cuda_error(): """Returns None or an exception if the CUDA driver fails to initialize. """ return driver.driver.initialization_error initialize.initialize_all()
from __future__ import print_function, absolute_import, division # Re export from .stubs import (threadIdx, blockIdx, blockDim, gridDim, syncthreads, shared, local, const, grid, gridsize, atomic, threadfence_block, threadfence_system, threadfence) from .cudadrv.error import CudaSupportError from .cudadrv import nvvm from . import initialize from .errors import KernelRuntimeError from .decorators import jit, autojit, declare_device from .api import * from .api import _auto_device from .kernels import reduction reduce = Reduce = reduction.Reduce def is_available(): """Returns a boolean to indicate the availability of a CUDA GPU. This will initialize the driver if it hasn't been initialized. """ - return driver.driver.is_available and nvvm.is_available() + # whilst `driver.is_available` will init the driver itself, + # the driver initialization may raise and as a result break + # test discovery/orchestration as `cuda.is_available` is often + # used as a guard for whether to run a CUDA test, the try/except + # below is to handle this case. + driver_is_available = False + try: + driver_is_available = driver.driver.is_available + except CudaSupportError: + pass + return driver_is_available and nvvm.is_available() def cuda_error(): """Returns None or an exception if the CUDA driver fails to initialize. """ return driver.driver.initialization_error initialize.initialize_all()
1e8fd33ef4e8b75632d8a4fe4d86944fdfc5a649
beetle/__init__.py
beetle/__init__.py
name = 'beetle' version = '0.4.1-dev' project_url = 'https://github.com/cknv/beetle' class BeetleError(Exception): pass
name = 'beetle' version = '0.4.1-dev' project_url = 'https://github.com/cknv/beetle' class BeetleError(Exception): def __init__(self, page=None): self.page = page
Allow the BeetleError class to take a page object as an argument
Allow the BeetleError class to take a page object as an argument
Python
mit
cknv/beetle
name = 'beetle' version = '0.4.1-dev' project_url = 'https://github.com/cknv/beetle' class BeetleError(Exception): - pass + def __init__(self, page=None): + self.page = page
Allow the BeetleError class to take a page object as an argument
## Code Before: name = 'beetle' version = '0.4.1-dev' project_url = 'https://github.com/cknv/beetle' class BeetleError(Exception): pass ## Instruction: Allow the BeetleError class to take a page object as an argument ## Code After: name = 'beetle' version = '0.4.1-dev' project_url = 'https://github.com/cknv/beetle' class BeetleError(Exception): def __init__(self, page=None): self.page = page
name = 'beetle' version = '0.4.1-dev' project_url = 'https://github.com/cknv/beetle' class BeetleError(Exception): - pass + def __init__(self, page=None): + self.page = page
a18a19345298c43400dbfb984f97e97b3d0b624a
pyelasticsearch/__init__.py
pyelasticsearch/__init__.py
from __future__ import absolute_import from pyelasticsearch.client import ElasticSearch from pyelasticsearch.exceptions import (Timeout, ConnectionError, ElasticHttpError, InvalidJsonResponseError, ElasticHttpNotFoundError, IndexAlreadyExistsError) __author__ = 'Robert Eanes' __all__ = ['ElasticSearch', 'ElasticHttpError', 'InvalidJsonResponseError', 'Timeout', 'ConnectionError', 'ElasticHttpNotFoundError', 'IndexAlreadyExistsError'] __version__ = '0.6.1' __version_info__ = tuple(__version__.split('.')) get_version = lambda: __version_info__
from __future__ import absolute_import from pyelasticsearch.client import ElasticSearch from pyelasticsearch.exceptions import (Timeout, ConnectionError, ElasticHttpError, InvalidJsonResponseError, ElasticHttpNotFoundError, IndexAlreadyExistsError) __author__ = 'Erik Rose' __all__ = ['ElasticSearch', 'ElasticHttpError', 'InvalidJsonResponseError', 'Timeout', 'ConnectionError', 'ElasticHttpNotFoundError', 'IndexAlreadyExistsError'] __version__ = '0.7' __version_info__ = tuple(__version__.split('.')) get_version = lambda: __version_info__
Change author and bump version.
Change author and bump version.
Python
bsd-3-clause
erikrose/pyelasticsearch
from __future__ import absolute_import from pyelasticsearch.client import ElasticSearch from pyelasticsearch.exceptions import (Timeout, ConnectionError, ElasticHttpError, InvalidJsonResponseError, ElasticHttpNotFoundError, IndexAlreadyExistsError) - __author__ = 'Robert Eanes' + __author__ = 'Erik Rose' __all__ = ['ElasticSearch', 'ElasticHttpError', 'InvalidJsonResponseError', 'Timeout', 'ConnectionError', 'ElasticHttpNotFoundError', 'IndexAlreadyExistsError'] - __version__ = '0.6.1' + __version__ = '0.7' __version_info__ = tuple(__version__.split('.')) get_version = lambda: __version_info__
Change author and bump version.
## Code Before: from __future__ import absolute_import from pyelasticsearch.client import ElasticSearch from pyelasticsearch.exceptions import (Timeout, ConnectionError, ElasticHttpError, InvalidJsonResponseError, ElasticHttpNotFoundError, IndexAlreadyExistsError) __author__ = 'Robert Eanes' __all__ = ['ElasticSearch', 'ElasticHttpError', 'InvalidJsonResponseError', 'Timeout', 'ConnectionError', 'ElasticHttpNotFoundError', 'IndexAlreadyExistsError'] __version__ = '0.6.1' __version_info__ = tuple(__version__.split('.')) get_version = lambda: __version_info__ ## Instruction: Change author and bump version. ## Code After: from __future__ import absolute_import from pyelasticsearch.client import ElasticSearch from pyelasticsearch.exceptions import (Timeout, ConnectionError, ElasticHttpError, InvalidJsonResponseError, ElasticHttpNotFoundError, IndexAlreadyExistsError) __author__ = 'Erik Rose' __all__ = ['ElasticSearch', 'ElasticHttpError', 'InvalidJsonResponseError', 'Timeout', 'ConnectionError', 'ElasticHttpNotFoundError', 'IndexAlreadyExistsError'] __version__ = '0.7' __version_info__ = tuple(__version__.split('.')) get_version = lambda: __version_info__
from __future__ import absolute_import from pyelasticsearch.client import ElasticSearch from pyelasticsearch.exceptions import (Timeout, ConnectionError, ElasticHttpError, InvalidJsonResponseError, ElasticHttpNotFoundError, IndexAlreadyExistsError) - __author__ = 'Robert Eanes' + __author__ = 'Erik Rose' __all__ = ['ElasticSearch', 'ElasticHttpError', 'InvalidJsonResponseError', 'Timeout', 'ConnectionError', 'ElasticHttpNotFoundError', 'IndexAlreadyExistsError'] - __version__ = '0.6.1' ? ^^^ + __version__ = '0.7' ? ^ __version_info__ = tuple(__version__.split('.')) get_version = lambda: __version_info__
58fee826ab5298f7de036bf320bbc109b853eec8
tendrl/commons/manager/__init__.py
tendrl/commons/manager/__init__.py
import abc import logging import six from tendrl.commons import jobs LOG = logging.getLogger(__name__) @six.add_metaclass(abc.ABCMeta) class Manager(object): def __init__( self, sds_sync_thread, central_store_thread, ): self._central_store_thread = central_store_thread self._sds_sync_thread = sds_sync_thread self._job_consumer_thread = jobs.JobConsumerThread() def stop(self): LOG.info("%s stopping" % self.__class__.__name__) self._job_consumer_thread.stop() self._sds_sync_thread.stop() self._central_store_thread.stop() def start(self): LOG.info("%s starting" % self.__class__.__name__) self._central_store_thread.start() self._sds_sync_thread.start() self._job_consumer_thread.start() def join(self): LOG.info("%s joining" % self.__class__.__name__) self._job_consumer_thread.join() self._sds_sync_thread.join() self._central_store_thread.join()
import abc import logging import six from tendrl.commons import jobs LOG = logging.getLogger(__name__) @six.add_metaclass(abc.ABCMeta) class Manager(object): def __init__( self, sds_sync_thread, central_store_thread, ): self._central_store_thread = central_store_thread self._sds_sync_thread = sds_sync_thread self._job_consumer_thread = jobs.JobConsumerThread() def stop(self): LOG.info("%s stopping" % self.__class__.__name__) self._job_consumer_thread.stop() if self._sds_sync_thread: self._sds_sync_thread.stop() self._central_store_thread.stop() def start(self): LOG.info("%s starting" % self.__class__.__name__) self._central_store_thread.start() if self._sds_sync_thread: self._sds_sync_thread.start() self._job_consumer_thread.start() def join(self): LOG.info("%s joining" % self.__class__.__name__) self._job_consumer_thread.join() if self._sds_sync_thread: self._sds_sync_thread.join() self._central_store_thread.join()
Add null check for sds sync thread which can be optional
Add null check for sds sync thread which can be optional Signed-off-by: anmolbabu <[email protected]>
Python
lgpl-2.1
Tendrl/commons,rishubhjain/commons,r0h4n/commons
import abc import logging import six from tendrl.commons import jobs LOG = logging.getLogger(__name__) @six.add_metaclass(abc.ABCMeta) class Manager(object): def __init__( self, sds_sync_thread, central_store_thread, ): self._central_store_thread = central_store_thread self._sds_sync_thread = sds_sync_thread self._job_consumer_thread = jobs.JobConsumerThread() def stop(self): LOG.info("%s stopping" % self.__class__.__name__) self._job_consumer_thread.stop() + if self._sds_sync_thread: - self._sds_sync_thread.stop() + self._sds_sync_thread.stop() self._central_store_thread.stop() def start(self): LOG.info("%s starting" % self.__class__.__name__) self._central_store_thread.start() + if self._sds_sync_thread: - self._sds_sync_thread.start() + self._sds_sync_thread.start() self._job_consumer_thread.start() def join(self): LOG.info("%s joining" % self.__class__.__name__) self._job_consumer_thread.join() + if self._sds_sync_thread: - self._sds_sync_thread.join() + self._sds_sync_thread.join() self._central_store_thread.join() -
Add null check for sds sync thread which can be optional
## Code Before: import abc import logging import six from tendrl.commons import jobs LOG = logging.getLogger(__name__) @six.add_metaclass(abc.ABCMeta) class Manager(object): def __init__( self, sds_sync_thread, central_store_thread, ): self._central_store_thread = central_store_thread self._sds_sync_thread = sds_sync_thread self._job_consumer_thread = jobs.JobConsumerThread() def stop(self): LOG.info("%s stopping" % self.__class__.__name__) self._job_consumer_thread.stop() self._sds_sync_thread.stop() self._central_store_thread.stop() def start(self): LOG.info("%s starting" % self.__class__.__name__) self._central_store_thread.start() self._sds_sync_thread.start() self._job_consumer_thread.start() def join(self): LOG.info("%s joining" % self.__class__.__name__) self._job_consumer_thread.join() self._sds_sync_thread.join() self._central_store_thread.join() ## Instruction: Add null check for sds sync thread which can be optional ## Code After: import abc import logging import six from tendrl.commons import jobs LOG = logging.getLogger(__name__) @six.add_metaclass(abc.ABCMeta) class Manager(object): def __init__( self, sds_sync_thread, central_store_thread, ): self._central_store_thread = central_store_thread self._sds_sync_thread = sds_sync_thread self._job_consumer_thread = jobs.JobConsumerThread() def stop(self): LOG.info("%s stopping" % self.__class__.__name__) self._job_consumer_thread.stop() if self._sds_sync_thread: self._sds_sync_thread.stop() self._central_store_thread.stop() def start(self): LOG.info("%s starting" % self.__class__.__name__) self._central_store_thread.start() if self._sds_sync_thread: self._sds_sync_thread.start() self._job_consumer_thread.start() def join(self): LOG.info("%s joining" % self.__class__.__name__) self._job_consumer_thread.join() if self._sds_sync_thread: self._sds_sync_thread.join() self._central_store_thread.join()
import abc import logging import six from tendrl.commons import jobs LOG = logging.getLogger(__name__) @six.add_metaclass(abc.ABCMeta) class Manager(object): def __init__( self, sds_sync_thread, central_store_thread, ): self._central_store_thread = central_store_thread self._sds_sync_thread = sds_sync_thread self._job_consumer_thread = jobs.JobConsumerThread() def stop(self): LOG.info("%s stopping" % self.__class__.__name__) self._job_consumer_thread.stop() + if self._sds_sync_thread: - self._sds_sync_thread.stop() + self._sds_sync_thread.stop() ? ++++ self._central_store_thread.stop() def start(self): LOG.info("%s starting" % self.__class__.__name__) self._central_store_thread.start() + if self._sds_sync_thread: - self._sds_sync_thread.start() + self._sds_sync_thread.start() ? ++++ self._job_consumer_thread.start() def join(self): LOG.info("%s joining" % self.__class__.__name__) self._job_consumer_thread.join() + if self._sds_sync_thread: - self._sds_sync_thread.join() + self._sds_sync_thread.join() ? ++++ self._central_store_thread.join() -
ecd7f5f46146fa9378000ac469f6eca8f64ac31d
stoq/tests/data/plugins/archiver/dummy_archiver/dummy_archiver.py
stoq/tests/data/plugins/archiver/dummy_archiver/dummy_archiver.py
from typing import Optional from stoq.data_classes import ArchiverResponse, Payload, RequestMeta from stoq.plugins import ArchiverPlugin class DummyArchiver(ArchiverPlugin): def archive( self, payload: Payload, request_meta: RequestMeta ) -> Optional[ArchiverResponse]: return None def get(self, task: str) -> Optional[Payload]: return None
from typing import Optional from stoq.plugins import ArchiverPlugin from stoq.data_classes import ArchiverResponse, Payload, RequestMeta class DummyArchiver(ArchiverPlugin): def archive( self, payload: Payload, request_meta: RequestMeta ) -> Optional[ArchiverResponse]: return None def get(self, task: ArchiverResponse) -> Optional[Payload]: return None
Fix test signature value type for task
Fix test signature value type for task
Python
apache-2.0
PUNCH-Cyber/stoq
from typing import Optional + from stoq.plugins import ArchiverPlugin from stoq.data_classes import ArchiverResponse, Payload, RequestMeta - from stoq.plugins import ArchiverPlugin class DummyArchiver(ArchiverPlugin): def archive( self, payload: Payload, request_meta: RequestMeta ) -> Optional[ArchiverResponse]: return None - def get(self, task: str) -> Optional[Payload]: + def get(self, task: ArchiverResponse) -> Optional[Payload]: return None
Fix test signature value type for task
## Code Before: from typing import Optional from stoq.data_classes import ArchiverResponse, Payload, RequestMeta from stoq.plugins import ArchiverPlugin class DummyArchiver(ArchiverPlugin): def archive( self, payload: Payload, request_meta: RequestMeta ) -> Optional[ArchiverResponse]: return None def get(self, task: str) -> Optional[Payload]: return None ## Instruction: Fix test signature value type for task ## Code After: from typing import Optional from stoq.plugins import ArchiverPlugin from stoq.data_classes import ArchiverResponse, Payload, RequestMeta class DummyArchiver(ArchiverPlugin): def archive( self, payload: Payload, request_meta: RequestMeta ) -> Optional[ArchiverResponse]: return None def get(self, task: ArchiverResponse) -> Optional[Payload]: return None
from typing import Optional + from stoq.plugins import ArchiverPlugin from stoq.data_classes import ArchiverResponse, Payload, RequestMeta - from stoq.plugins import ArchiverPlugin class DummyArchiver(ArchiverPlugin): def archive( self, payload: Payload, request_meta: RequestMeta ) -> Optional[ArchiverResponse]: return None - def get(self, task: str) -> Optional[Payload]: ? ^^ + def get(self, task: ArchiverResponse) -> Optional[Payload]: ? ++++++++++ ^^^^^ return None
0529c91443fa2948514a21933de00f4c146c9764
data_api/snapshot_scheduler/src/test_snapshot_scheduler.py
data_api/snapshot_scheduler/src/test_snapshot_scheduler.py
import datetime as dt import os import mock from unittest.mock import patch import snapshot_scheduler class patched_datetime(dt.datetime): @classmethod def utcnow(cls): return dt.datetime(2011, 6, 21, 0, 0, 0, 0) @mock.patch('datetime.datetime', patched_datetime) def test_writes_message_to_sqs(sns_client, topic_arn): private_bucket_name = "private_bucket_name" es_index = "es_index" patched_os_environ = { 'TOPIC_ARN': topic_arn, 'TARGET_BUCKET_NAME': private_bucket_name, 'ES_INDEX': es_index } with patch.dict(os.environ, patched_os_environ, clear=True): snapshot_scheduler.main( event=None, _ctxt=None, sns_client=sns_client ) messages = sns_client.list_messages() assert len(messages) == 1 assert messages[0][':message'] == { 'time': '2011-06-21T00:00:00', 'private_bucket_name': private_bucket_name, 'es_index': es_index }
import datetime as dt import os import mock from unittest.mock import patch import snapshot_scheduler class patched_datetime(dt.datetime): @classmethod def utcnow(cls): return dt.datetime(2011, 6, 21, 0, 0, 0, 0) @mock.patch('datetime.datetime', patched_datetime) def test_writes_message_to_sqs(sns_client, topic_arn): private_bucket_name = "private_bucket_name" es_index = "es_index" patched_os_environ = { 'TOPIC_ARN': topic_arn, 'PRIVATE_BUCKET_NAME': private_bucket_name, 'ES_INDEX': es_index } with patch.dict(os.environ, patched_os_environ, clear=True): snapshot_scheduler.main(sns_client=sns_client) messages = sns_client.list_messages() assert len(messages) == 1 assert messages[0][':message'] == { 'time': '2011-06-21T00:00:00', 'private_bucket_name': private_bucket_name, 'es_index': es_index }
Use the correctly named env vars
snapshot_scheduler: Use the correctly named env vars
Python
mit
wellcometrust/platform-api,wellcometrust/platform-api,wellcometrust/platform-api,wellcometrust/platform-api
import datetime as dt import os import mock from unittest.mock import patch import snapshot_scheduler class patched_datetime(dt.datetime): @classmethod def utcnow(cls): return dt.datetime(2011, 6, 21, 0, 0, 0, 0) @mock.patch('datetime.datetime', patched_datetime) def test_writes_message_to_sqs(sns_client, topic_arn): private_bucket_name = "private_bucket_name" es_index = "es_index" patched_os_environ = { 'TOPIC_ARN': topic_arn, - 'TARGET_BUCKET_NAME': private_bucket_name, + 'PRIVATE_BUCKET_NAME': private_bucket_name, 'ES_INDEX': es_index } with patch.dict(os.environ, patched_os_environ, clear=True): + snapshot_scheduler.main(sns_client=sns_client) - snapshot_scheduler.main( - event=None, - _ctxt=None, - sns_client=sns_client - ) messages = sns_client.list_messages() assert len(messages) == 1 assert messages[0][':message'] == { 'time': '2011-06-21T00:00:00', 'private_bucket_name': private_bucket_name, 'es_index': es_index }
Use the correctly named env vars
## Code Before: import datetime as dt import os import mock from unittest.mock import patch import snapshot_scheduler class patched_datetime(dt.datetime): @classmethod def utcnow(cls): return dt.datetime(2011, 6, 21, 0, 0, 0, 0) @mock.patch('datetime.datetime', patched_datetime) def test_writes_message_to_sqs(sns_client, topic_arn): private_bucket_name = "private_bucket_name" es_index = "es_index" patched_os_environ = { 'TOPIC_ARN': topic_arn, 'TARGET_BUCKET_NAME': private_bucket_name, 'ES_INDEX': es_index } with patch.dict(os.environ, patched_os_environ, clear=True): snapshot_scheduler.main( event=None, _ctxt=None, sns_client=sns_client ) messages = sns_client.list_messages() assert len(messages) == 1 assert messages[0][':message'] == { 'time': '2011-06-21T00:00:00', 'private_bucket_name': private_bucket_name, 'es_index': es_index } ## Instruction: Use the correctly named env vars ## Code After: import datetime as dt import os import mock from unittest.mock import patch import snapshot_scheduler class patched_datetime(dt.datetime): @classmethod def utcnow(cls): return dt.datetime(2011, 6, 21, 0, 0, 0, 0) @mock.patch('datetime.datetime', patched_datetime) def test_writes_message_to_sqs(sns_client, topic_arn): private_bucket_name = "private_bucket_name" es_index = "es_index" patched_os_environ = { 'TOPIC_ARN': topic_arn, 'PRIVATE_BUCKET_NAME': private_bucket_name, 'ES_INDEX': es_index } with patch.dict(os.environ, patched_os_environ, clear=True): snapshot_scheduler.main(sns_client=sns_client) messages = sns_client.list_messages() assert len(messages) == 1 assert messages[0][':message'] == { 'time': '2011-06-21T00:00:00', 'private_bucket_name': private_bucket_name, 'es_index': es_index }
import datetime as dt import os import mock from unittest.mock import patch import snapshot_scheduler class patched_datetime(dt.datetime): @classmethod def utcnow(cls): return dt.datetime(2011, 6, 21, 0, 0, 0, 0) @mock.patch('datetime.datetime', patched_datetime) def test_writes_message_to_sqs(sns_client, topic_arn): private_bucket_name = "private_bucket_name" es_index = "es_index" patched_os_environ = { 'TOPIC_ARN': topic_arn, - 'TARGET_BUCKET_NAME': private_bucket_name, ? --- - + 'PRIVATE_BUCKET_NAME': private_bucket_name, ? +++++ 'ES_INDEX': es_index } with patch.dict(os.environ, patched_os_environ, clear=True): + snapshot_scheduler.main(sns_client=sns_client) - snapshot_scheduler.main( - event=None, - _ctxt=None, - sns_client=sns_client - ) messages = sns_client.list_messages() assert len(messages) == 1 assert messages[0][':message'] == { 'time': '2011-06-21T00:00:00', 'private_bucket_name': private_bucket_name, 'es_index': es_index }
cc006a07a486ec6c88cd2b4deb929a3c723c5a2c
cupyx/fallback_mode/__init__.py
cupyx/fallback_mode/__init__.py
from cupyx.fallback_mode.fallback import numpy # NOQA
from cupy import util as _util # Attributes and Methods for fallback_mode # Auto-execute numpy method when corresponding cupy method is not found # "NOQA" to suppress flake8 warning from cupyx.fallback_mode.fallback import numpy # NOQA _util.experimental('cupyx.fallback_mode.numpy')
Support fallback-mode as an experimental feature
Support fallback-mode as an experimental feature
Python
mit
cupy/cupy,cupy/cupy,cupy/cupy,cupy/cupy
+ from cupy import util as _util + + # Attributes and Methods for fallback_mode + # Auto-execute numpy method when corresponding cupy method is not found + + # "NOQA" to suppress flake8 warning from cupyx.fallback_mode.fallback import numpy # NOQA + + _util.experimental('cupyx.fallback_mode.numpy') +
Support fallback-mode as an experimental feature
## Code Before: from cupyx.fallback_mode.fallback import numpy # NOQA ## Instruction: Support fallback-mode as an experimental feature ## Code After: from cupy import util as _util # Attributes and Methods for fallback_mode # Auto-execute numpy method when corresponding cupy method is not found # "NOQA" to suppress flake8 warning from cupyx.fallback_mode.fallback import numpy # NOQA _util.experimental('cupyx.fallback_mode.numpy')
+ from cupy import util as _util + + # Attributes and Methods for fallback_mode + # Auto-execute numpy method when corresponding cupy method is not found + + # "NOQA" to suppress flake8 warning from cupyx.fallback_mode.fallback import numpy # NOQA + + + _util.experimental('cupyx.fallback_mode.numpy')
138df31dc628daad0c60f062b05774d6c7d4338d
src/kuas_api/modules/const.py
src/kuas_api/modules/const.py
device_version = { "android": "2.1.2", "android_donate": "2.1.2", "ios": "1.4.3" } # Token duration in seconds token_duration = 3600 # HTTP Status Code ok = 200 no_content = 204
device_version = { "android": "2.1.3", "android_donate": "2.1.2", "ios": "1.6.0" } # Token duration in seconds token_duration = 3600 serect_key = "usapoijupojfa;dsj;lv;ldakjads;lfkjapoiuewqprjf" # HTTP Status Code ok = 200 no_content = 204
Change android version to 2.1.3
Change android version to 2.1.3
Python
mit
JohnSounder/AP-API,kuastw/AP-API,kuastw/AP-API,JohnSounder/AP-API
- device_version = { - "android": "2.1.2", + "android": "2.1.3", "android_donate": "2.1.2", - "ios": "1.4.3" + "ios": "1.6.0" } # Token duration in seconds token_duration = 3600 + serect_key = "usapoijupojfa;dsj;lv;ldakjads;lfkjapoiuewqprjf" # HTTP Status Code ok = 200 no_content = 204
Change android version to 2.1.3
## Code Before: device_version = { "android": "2.1.2", "android_donate": "2.1.2", "ios": "1.4.3" } # Token duration in seconds token_duration = 3600 # HTTP Status Code ok = 200 no_content = 204 ## Instruction: Change android version to 2.1.3 ## Code After: device_version = { "android": "2.1.3", "android_donate": "2.1.2", "ios": "1.6.0" } # Token duration in seconds token_duration = 3600 serect_key = "usapoijupojfa;dsj;lv;ldakjads;lfkjapoiuewqprjf" # HTTP Status Code ok = 200 no_content = 204
- device_version = { - "android": "2.1.2", ? ^ + "android": "2.1.3", ? ^ "android_donate": "2.1.2", - "ios": "1.4.3" ? ^ ^ + "ios": "1.6.0" ? ^ ^ } # Token duration in seconds token_duration = 3600 + serect_key = "usapoijupojfa;dsj;lv;ldakjads;lfkjapoiuewqprjf" # HTTP Status Code ok = 200 no_content = 204
42cc93590bef8e97c76e79110d2b64906c34690d
config_template.py
config_template.py
chatbot_ubuntu = { 'path': '', 'model_id': '', 'python_env': '' } chatbot_swisscom = { 'path': '', 'model_id': '', 'python_env': '' } chatbot_ubuntu_seq2seq = { 'socket_address': '', 'socket_port': '' } ate = { 'path': '', 'python_env': '' } neuroate = { 'path': '', 'python_env': '' } ner = { 'path': '', 'python_env': '' } kpextract = { 'path': '', 'fetcher_path': '', 'python_env': '', 'api_emb_url':'' } neural_programmer = { 'socket_address': '', 'socket_port': '', 'mongo': False, 'mongo_address': '', 'mongo_port': '', 'mongo_db': '', 'mongo_feedback_coll': '', 'mongo_use_coll': '' } gsw_translator = { 'pbsmt_only_url': '', 'pbsmt_phono_url': '', 'pbsmt_ortho_url': '', 'pbsmt_cbnmt_url': '' } machine_translation_stdlangs = { 'base_url': '' } churn = { 'path' : '', 'python_env': '', 'e_host':'', 'e_port': }
chatbot_ubuntu = { 'path': '', 'model_id': '', 'python_env': '' } chatbot_swisscom = { 'path': '', 'model_id': '', 'python_env': '' } chatbot_ubuntu_seq2seq = { 'socket_address': '', 'socket_port': '' } chatbot_goaloriented = { 'socket_address': '127.0.0.1', 'socket_port': 8889 } ate = { 'path': '', 'python_env': '' } neuroate = { 'path': '', 'python_env': '' } ner = { 'path': '', 'python_env': '' } kpextract = { 'path': '', 'fetcher_path': '', 'python_env': '', 'api_emb_url':'' } neural_programmer = { 'socket_address': '', 'socket_port': '', 'mongo': False, 'mongo_address': '', 'mongo_port': '', 'mongo_db': '', 'mongo_feedback_coll': '', 'mongo_use_coll': '' } gsw_translator = { 'pbsmt_only_url': '', 'pbsmt_phono_url': '', 'pbsmt_ortho_url': '', 'pbsmt_cbnmt_url': '' } machine_translation_stdlangs = { 'base_url': '' } churn = { 'path' : '', 'python_env': '', 'e_host':'', 'e_port': '' }
Add ports and fix bug
Add ports and fix bug
Python
mit
nachoaguadoc/aimlx-demos,nachoaguadoc/aimlx-demos,nachoaguadoc/aimlx-demos
chatbot_ubuntu = { 'path': '', 'model_id': '', 'python_env': '' } chatbot_swisscom = { 'path': '', 'model_id': '', 'python_env': '' } chatbot_ubuntu_seq2seq = { 'socket_address': '', 'socket_port': '' } + + chatbot_goaloriented = { + 'socket_address': '127.0.0.1', + 'socket_port': 8889 + } + ate = { 'path': '', 'python_env': '' } neuroate = { 'path': '', 'python_env': '' } ner = { 'path': '', 'python_env': '' } kpextract = { 'path': '', 'fetcher_path': '', 'python_env': '', 'api_emb_url':'' } neural_programmer = { 'socket_address': '', 'socket_port': '', 'mongo': False, 'mongo_address': '', 'mongo_port': '', 'mongo_db': '', 'mongo_feedback_coll': '', 'mongo_use_coll': '' } gsw_translator = { 'pbsmt_only_url': '', 'pbsmt_phono_url': '', 'pbsmt_ortho_url': '', 'pbsmt_cbnmt_url': '' } machine_translation_stdlangs = { 'base_url': '' } churn = { 'path' : '', 'python_env': '', 'e_host':'', - 'e_port': + 'e_port': '' }
Add ports and fix bug
## Code Before: chatbot_ubuntu = { 'path': '', 'model_id': '', 'python_env': '' } chatbot_swisscom = { 'path': '', 'model_id': '', 'python_env': '' } chatbot_ubuntu_seq2seq = { 'socket_address': '', 'socket_port': '' } ate = { 'path': '', 'python_env': '' } neuroate = { 'path': '', 'python_env': '' } ner = { 'path': '', 'python_env': '' } kpextract = { 'path': '', 'fetcher_path': '', 'python_env': '', 'api_emb_url':'' } neural_programmer = { 'socket_address': '', 'socket_port': '', 'mongo': False, 'mongo_address': '', 'mongo_port': '', 'mongo_db': '', 'mongo_feedback_coll': '', 'mongo_use_coll': '' } gsw_translator = { 'pbsmt_only_url': '', 'pbsmt_phono_url': '', 'pbsmt_ortho_url': '', 'pbsmt_cbnmt_url': '' } machine_translation_stdlangs = { 'base_url': '' } churn = { 'path' : '', 'python_env': '', 'e_host':'', 'e_port': } ## Instruction: Add ports and fix bug ## Code After: chatbot_ubuntu = { 'path': '', 'model_id': '', 'python_env': '' } chatbot_swisscom = { 'path': '', 'model_id': '', 'python_env': '' } chatbot_ubuntu_seq2seq = { 'socket_address': '', 'socket_port': '' } chatbot_goaloriented = { 'socket_address': '127.0.0.1', 'socket_port': 8889 } ate = { 'path': '', 'python_env': '' } neuroate = { 'path': '', 'python_env': '' } ner = { 'path': '', 'python_env': '' } kpextract = { 'path': '', 'fetcher_path': '', 'python_env': '', 'api_emb_url':'' } neural_programmer = { 'socket_address': '', 'socket_port': '', 'mongo': False, 'mongo_address': '', 'mongo_port': '', 'mongo_db': '', 'mongo_feedback_coll': '', 'mongo_use_coll': '' } gsw_translator = { 'pbsmt_only_url': '', 'pbsmt_phono_url': '', 'pbsmt_ortho_url': '', 'pbsmt_cbnmt_url': '' } machine_translation_stdlangs = { 'base_url': '' } churn = { 'path' : '', 'python_env': '', 'e_host':'', 'e_port': '' }
chatbot_ubuntu = { 'path': '', 'model_id': '', 'python_env': '' } chatbot_swisscom = { 'path': '', 'model_id': '', 'python_env': '' } chatbot_ubuntu_seq2seq = { 'socket_address': '', 'socket_port': '' } + + chatbot_goaloriented = { + 'socket_address': '127.0.0.1', + 'socket_port': 8889 + } + ate = { 'path': '', 'python_env': '' } neuroate = { 'path': '', 'python_env': '' } ner = { 'path': '', 'python_env': '' } kpextract = { 'path': '', 'fetcher_path': '', 'python_env': '', 'api_emb_url':'' } neural_programmer = { 'socket_address': '', 'socket_port': '', 'mongo': False, 'mongo_address': '', 'mongo_port': '', 'mongo_db': '', 'mongo_feedback_coll': '', 'mongo_use_coll': '' } gsw_translator = { 'pbsmt_only_url': '', 'pbsmt_phono_url': '', 'pbsmt_ortho_url': '', 'pbsmt_cbnmt_url': '' } machine_translation_stdlangs = { 'base_url': '' } churn = { 'path' : '', 'python_env': '', 'e_host':'', - 'e_port': + 'e_port': '' ? ++ }
8acb681ff8963621452f0e018781c76d4935cb84
projects/urls.py
projects/urls.py
from django.conf.urls import patterns, url urlpatterns = patterns('projects.views', url(r'^add/$', 'add_project', name='add_project'), url(r'^edit/(?P<project_id>\d+)/$', 'edit_project', name='edit_project'), url(r'^status/(?P<project_id>\d+)/$', 'edit_status', name='edit_status'), url(r'^archive/$', 'projects_archive', name='projects_archive'), url(r'^archive/review/(?P<project_id>\d+)/$', 'show_project', name='show-project'), )
from django.conf.urls import patterns, url urlpatterns = patterns('projects.views', url(r'^add/$', 'add_project', name='add_project'), url(r'^edit/(?P<project_id>\d+)/$', 'edit_project', name='edit_project'), url(r'^edit_status/(?P<project_id>\d+)/$', 'edit_status', name='edit_status'), url(r'^status/(?P<project_id>\d+)/$', 'edit_status', name='edit_status'), url(r'^archive/$', 'projects_archive', name='projects_archive'), url(r'^archive/review/(?P<project_id>\d+)/$', 'show_project', name='show-project'), )
Add url for project_status_edit option
Add url for project_status_edit option
Python
mit
Hackfmi/Diaphanum,Hackfmi/Diaphanum
from django.conf.urls import patterns, url urlpatterns = patterns('projects.views', url(r'^add/$', 'add_project', name='add_project'), url(r'^edit/(?P<project_id>\d+)/$', 'edit_project', name='edit_project'), + url(r'^edit_status/(?P<project_id>\d+)/$', 'edit_status', name='edit_status'), url(r'^status/(?P<project_id>\d+)/$', 'edit_status', name='edit_status'), url(r'^archive/$', 'projects_archive', name='projects_archive'), url(r'^archive/review/(?P<project_id>\d+)/$', 'show_project', name='show-project'), )
Add url for project_status_edit option
## Code Before: from django.conf.urls import patterns, url urlpatterns = patterns('projects.views', url(r'^add/$', 'add_project', name='add_project'), url(r'^edit/(?P<project_id>\d+)/$', 'edit_project', name='edit_project'), url(r'^status/(?P<project_id>\d+)/$', 'edit_status', name='edit_status'), url(r'^archive/$', 'projects_archive', name='projects_archive'), url(r'^archive/review/(?P<project_id>\d+)/$', 'show_project', name='show-project'), ) ## Instruction: Add url for project_status_edit option ## Code After: from django.conf.urls import patterns, url urlpatterns = patterns('projects.views', url(r'^add/$', 'add_project', name='add_project'), url(r'^edit/(?P<project_id>\d+)/$', 'edit_project', name='edit_project'), url(r'^edit_status/(?P<project_id>\d+)/$', 'edit_status', name='edit_status'), url(r'^status/(?P<project_id>\d+)/$', 'edit_status', name='edit_status'), url(r'^archive/$', 'projects_archive', name='projects_archive'), url(r'^archive/review/(?P<project_id>\d+)/$', 'show_project', name='show-project'), )
from django.conf.urls import patterns, url urlpatterns = patterns('projects.views', url(r'^add/$', 'add_project', name='add_project'), url(r'^edit/(?P<project_id>\d+)/$', 'edit_project', name='edit_project'), + url(r'^edit_status/(?P<project_id>\d+)/$', 'edit_status', name='edit_status'), url(r'^status/(?P<project_id>\d+)/$', 'edit_status', name='edit_status'), url(r'^archive/$', 'projects_archive', name='projects_archive'), url(r'^archive/review/(?P<project_id>\d+)/$', 'show_project', name='show-project'), )
8c8307ff5313b1f6c69d976853f763daf2aece0c
test.py
test.py
""" Functions to call the api and test it """ import sys import fenix api = fenix.FenixAPISingleton() print('Testing Fenix API SDK Python') auth_url = api.get_authentication_url() print(auth_url) api.set_code(sys.argv[1]) print('Access token: ' + api.get_access_token()) print('Refresh token: ' + api.get_refresh_token()) api._refresh_access_token() print('New access token: ' + api.get_access_token()) print(api.get_space('2465311230082'))
""" Functions to call the api and test it """ import sys import fenix api = fenix.FenixAPISingleton() print('Testing Fenix API SDK Python') auth_url = api.get_authentication_url() print(api.get_space('2465311230082')) print(auth_url) api.set_code(sys.argv[1]) print('Access token: ' + api.get_access_token()) print('Refresh token: ' + api.get_refresh_token()) api._refresh_access_token() print('New access token: ' + api.get_access_token())
Test now calls a public endpoint first
Test now calls a public endpoint first
Python
mit
samfcmc/fenixedu-python-sdk
""" Functions to call the api and test it """ import sys import fenix api = fenix.FenixAPISingleton() print('Testing Fenix API SDK Python') auth_url = api.get_authentication_url() + print(api.get_space('2465311230082')) print(auth_url) api.set_code(sys.argv[1]) print('Access token: ' + api.get_access_token()) print('Refresh token: ' + api.get_refresh_token()) api._refresh_access_token() print('New access token: ' + api.get_access_token()) - print(api.get_space('2465311230082'))
Test now calls a public endpoint first
## Code Before: """ Functions to call the api and test it """ import sys import fenix api = fenix.FenixAPISingleton() print('Testing Fenix API SDK Python') auth_url = api.get_authentication_url() print(auth_url) api.set_code(sys.argv[1]) print('Access token: ' + api.get_access_token()) print('Refresh token: ' + api.get_refresh_token()) api._refresh_access_token() print('New access token: ' + api.get_access_token()) print(api.get_space('2465311230082')) ## Instruction: Test now calls a public endpoint first ## Code After: """ Functions to call the api and test it """ import sys import fenix api = fenix.FenixAPISingleton() print('Testing Fenix API SDK Python') auth_url = api.get_authentication_url() print(api.get_space('2465311230082')) print(auth_url) api.set_code(sys.argv[1]) print('Access token: ' + api.get_access_token()) print('Refresh token: ' + api.get_refresh_token()) api._refresh_access_token() print('New access token: ' + api.get_access_token())
""" Functions to call the api and test it """ import sys import fenix api = fenix.FenixAPISingleton() print('Testing Fenix API SDK Python') auth_url = api.get_authentication_url() + print(api.get_space('2465311230082')) print(auth_url) api.set_code(sys.argv[1]) print('Access token: ' + api.get_access_token()) print('Refresh token: ' + api.get_refresh_token()) api._refresh_access_token() print('New access token: ' + api.get_access_token()) - print(api.get_space('2465311230082'))
d6edbc05f1d6f06848b78f131c975b3373b1179a
cpgintegrate/__init__.py
cpgintegrate/__init__.py
import pandas import traceback import typing def process_files(file_iterator: typing.Iterator[typing.IO], processor: typing.Callable) -> pandas.DataFrame: def get_frames(): for file in file_iterator: try: df = processor(file) except Exception: df = pandas.DataFrame({"error": [traceback.format_exc()]}) yield (df .assign(Source=getattr(file, 'name', None), SubjectID=getattr(file, 'cpgintegrate_subject_id', None), FileSubjectID=df.index if df.index.name else None)) return pandas.DataFrame(pandas.concat((frame for frame in get_frames()))).set_index("SubjectID")
import pandas import traceback import typing def process_files(file_iterator: typing.Iterator[typing.IO], processor: typing.Callable) -> pandas.DataFrame: def get_frames(): for file in file_iterator: df = processor(file) yield (df .assign(Source=getattr(file, 'name', None), SubjectID=getattr(file, 'cpgintegrate_subject_id', None), FileSubjectID=df.index if df.index.name else None)) return pandas.DataFrame(pandas.concat((frame for frame in get_frames()))).set_index("SubjectID")
Raise exceptions rather than catching
Raise exceptions rather than catching
Python
agpl-3.0
PointyShinyBurning/cpgintegrate
import pandas import traceback import typing def process_files(file_iterator: typing.Iterator[typing.IO], processor: typing.Callable) -> pandas.DataFrame: def get_frames(): for file in file_iterator: - try: - df = processor(file) + df = processor(file) - except Exception: - df = pandas.DataFrame({"error": [traceback.format_exc()]}) - yield (df .assign(Source=getattr(file, 'name', None), SubjectID=getattr(file, 'cpgintegrate_subject_id', None), FileSubjectID=df.index if df.index.name else None)) return pandas.DataFrame(pandas.concat((frame for frame in get_frames()))).set_index("SubjectID")
Raise exceptions rather than catching
## Code Before: import pandas import traceback import typing def process_files(file_iterator: typing.Iterator[typing.IO], processor: typing.Callable) -> pandas.DataFrame: def get_frames(): for file in file_iterator: try: df = processor(file) except Exception: df = pandas.DataFrame({"error": [traceback.format_exc()]}) yield (df .assign(Source=getattr(file, 'name', None), SubjectID=getattr(file, 'cpgintegrate_subject_id', None), FileSubjectID=df.index if df.index.name else None)) return pandas.DataFrame(pandas.concat((frame for frame in get_frames()))).set_index("SubjectID") ## Instruction: Raise exceptions rather than catching ## Code After: import pandas import traceback import typing def process_files(file_iterator: typing.Iterator[typing.IO], processor: typing.Callable) -> pandas.DataFrame: def get_frames(): for file in file_iterator: df = processor(file) yield (df .assign(Source=getattr(file, 'name', None), SubjectID=getattr(file, 'cpgintegrate_subject_id', None), FileSubjectID=df.index if df.index.name else None)) return pandas.DataFrame(pandas.concat((frame for frame in get_frames()))).set_index("SubjectID")
import pandas import traceback import typing def process_files(file_iterator: typing.Iterator[typing.IO], processor: typing.Callable) -> pandas.DataFrame: def get_frames(): for file in file_iterator: - try: - df = processor(file) ? ---- + df = processor(file) - except Exception: - df = pandas.DataFrame({"error": [traceback.format_exc()]}) - yield (df .assign(Source=getattr(file, 'name', None), SubjectID=getattr(file, 'cpgintegrate_subject_id', None), FileSubjectID=df.index if df.index.name else None)) return pandas.DataFrame(pandas.concat((frame for frame in get_frames()))).set_index("SubjectID")
cb073dc49efffad56d880f63fd709e5a803e7cf6
blog/admin.py
blog/admin.py
from django.contrib import admin # Register your models here. from .models import Post from .models import UserProfile from .models import SocialMedia class ArticleAdmin(admin.ModelAdmin): list_display = ("title", "category", "created", "updated", "status") search_fields = ("title", "category", "content") list_filter = ("created",) # raw_id_fields = ('tag',) date_hierarchy = "created" prepopulated_fields = {"slug": ("title",)} class SocialMediaAdmin(admin.ModelAdmin): list_display = ("social", "url", "link") class UserProfileAdmin(admin.ModelAdmin): list_display = ("name", "description") admin.site.register(Post, ArticleAdmin) admin.site.register(UserProfile, UserProfileAdmin) admin.site.register(SocialMedia, SocialMediaAdmin)
from django.contrib import admin # Register your models here. from .models import Post from .models import UserProfile from .models import SocialMedia class PostAdmin(admin.ModelAdmin): list_display = ("title", "category", "created", "updated", "status") search_fields = ("title", "category", "content") list_filter = ("created",) # raw_id_fields = ('tag',) date_hierarchy = "created" prepopulated_fields = {"slug": ("title",)} class SocialMediaAdmin(admin.ModelAdmin): list_display = ("social", "url", "link") class UserProfileAdmin(admin.ModelAdmin): list_display = ("name", "description") admin.site.register(Post, PostAdmin) admin.site.register(UserProfile, UserProfileAdmin) admin.site.register(SocialMedia, SocialMediaAdmin)
Rename model Article to Post
Rename model Article to Post
Python
apache-2.0
andreztz/DjangoBlog,andreztz/DjangoBlog,andreztz/DjangoBlog
from django.contrib import admin # Register your models here. from .models import Post from .models import UserProfile from .models import SocialMedia - class ArticleAdmin(admin.ModelAdmin): + class PostAdmin(admin.ModelAdmin): list_display = ("title", "category", "created", "updated", "status") search_fields = ("title", "category", "content") list_filter = ("created",) # raw_id_fields = ('tag',) date_hierarchy = "created" prepopulated_fields = {"slug": ("title",)} class SocialMediaAdmin(admin.ModelAdmin): list_display = ("social", "url", "link") class UserProfileAdmin(admin.ModelAdmin): list_display = ("name", "description") - admin.site.register(Post, ArticleAdmin) + admin.site.register(Post, PostAdmin) admin.site.register(UserProfile, UserProfileAdmin) admin.site.register(SocialMedia, SocialMediaAdmin)
Rename model Article to Post
## Code Before: from django.contrib import admin # Register your models here. from .models import Post from .models import UserProfile from .models import SocialMedia class ArticleAdmin(admin.ModelAdmin): list_display = ("title", "category", "created", "updated", "status") search_fields = ("title", "category", "content") list_filter = ("created",) # raw_id_fields = ('tag',) date_hierarchy = "created" prepopulated_fields = {"slug": ("title",)} class SocialMediaAdmin(admin.ModelAdmin): list_display = ("social", "url", "link") class UserProfileAdmin(admin.ModelAdmin): list_display = ("name", "description") admin.site.register(Post, ArticleAdmin) admin.site.register(UserProfile, UserProfileAdmin) admin.site.register(SocialMedia, SocialMediaAdmin) ## Instruction: Rename model Article to Post ## Code After: from django.contrib import admin # Register your models here. from .models import Post from .models import UserProfile from .models import SocialMedia class PostAdmin(admin.ModelAdmin): list_display = ("title", "category", "created", "updated", "status") search_fields = ("title", "category", "content") list_filter = ("created",) # raw_id_fields = ('tag',) date_hierarchy = "created" prepopulated_fields = {"slug": ("title",)} class SocialMediaAdmin(admin.ModelAdmin): list_display = ("social", "url", "link") class UserProfileAdmin(admin.ModelAdmin): list_display = ("name", "description") admin.site.register(Post, PostAdmin) admin.site.register(UserProfile, UserProfileAdmin) admin.site.register(SocialMedia, SocialMediaAdmin)
from django.contrib import admin # Register your models here. from .models import Post from .models import UserProfile from .models import SocialMedia - class ArticleAdmin(admin.ModelAdmin): ? ^^ ---- + class PostAdmin(admin.ModelAdmin): ? ^^^ list_display = ("title", "category", "created", "updated", "status") search_fields = ("title", "category", "content") list_filter = ("created",) # raw_id_fields = ('tag',) date_hierarchy = "created" prepopulated_fields = {"slug": ("title",)} class SocialMediaAdmin(admin.ModelAdmin): list_display = ("social", "url", "link") class UserProfileAdmin(admin.ModelAdmin): list_display = ("name", "description") - admin.site.register(Post, ArticleAdmin) ? ^^ ---- + admin.site.register(Post, PostAdmin) ? ^^^ admin.site.register(UserProfile, UserProfileAdmin) admin.site.register(SocialMedia, SocialMediaAdmin)
7d130a447786c61c7bfbe6bfe2d87b2c28e32eb6
shut-up-bird.py
shut-up-bird.py
from __future__ import print_function import os import sys import argparse import logging
from __future__ import print_function import os import sys import argparse import json import tweepy import pystache import webbrowser CONFIG_FILE = '.shut-up-bird.conf' def tweep_login(consumer_key, consumer_secret, token='', secret=''): auth = tweepy.OAuthHandler(consumer_key, consumer_secret) if token and secret: auth.set_access_token(token, secret) else: try: print ("Authenticating ...please wait") redirect_url = auth.get_authorization_url() print ("Opening url - {0} ...".format(redirect_url)) webbrowser.open(redirect_url) verify_code = raw_input("Verification PIN code: ".format(redirect_url)) auth.get_access_token(verify_code) except tweepy.TweepError as e: raise Exception("Failed to get request token!", e) return auth def tweep_getAPI(auth): api = tweepy.API(auth) print("Authenticated as: {0}".format(api.me().screen_name)) return api def tweep_delete(api): print ("TEST") def config_load(config_path): if not os.path.exists(config_path): return False with open(config_path, 'r') as infile: return json.load(infile) def config_save(config_path, consumer_key, consumer_secret, token, secret): data = {'ck': consumer_key, 'cs': consumer_secret, \ 't': token, 's': secret } with open(config_path, 'w') as outfile: json.dump(data, outfile, indent=2, ensure_ascii=False) def get_input(message): return raw_input(message) ########################### # Main # if __name__ == "__main__": try: home_dir = os.path.expanduser('~') config = config_load(os.path.join(home_dir, CONFIG_FILE)) if (config and config['t'] and config['s']): auth = tweep_login(config['ck'], config['cs'], config['t'], config['s']) else: print ("Please provide your Twitter app access keys\n") consumer_key = get_input("Consumer Key (API Key): ") consumer_secret = get_input("Consumer Secret (API Secret): ") auth = tweep_login(consumer_key, consumer_secret) config_save(os.path.join(home_dir, CONFIG_FILE), consumer_key, \ consumer_secret, auth.access_token, auth.access_token_secret) api = tweep_getAPI(auth) except Exception as e: print ("[ERROR] {0}".format(e))
Add OAuth authentication and config settings load/save
Add OAuth authentication and config settings load/save
Python
mit
petarov/shut-up-bird
+ from __future__ import print_function import os import sys import argparse - import logging + import json + import tweepy + import pystache + import webbrowser + CONFIG_FILE = '.shut-up-bird.conf' + + def tweep_login(consumer_key, consumer_secret, token='', secret=''): + auth = tweepy.OAuthHandler(consumer_key, consumer_secret) + + if token and secret: + auth.set_access_token(token, secret) + else: + try: + print ("Authenticating ...please wait") + redirect_url = auth.get_authorization_url() + + print ("Opening url - {0} ...".format(redirect_url)) + webbrowser.open(redirect_url) + + verify_code = raw_input("Verification PIN code: ".format(redirect_url)) + auth.get_access_token(verify_code) + + except tweepy.TweepError as e: + raise Exception("Failed to get request token!", e) + + return auth + + def tweep_getAPI(auth): + api = tweepy.API(auth) + print("Authenticated as: {0}".format(api.me().screen_name)) + return api + + + def tweep_delete(api): + print ("TEST") + + def config_load(config_path): + if not os.path.exists(config_path): + return False + + with open(config_path, 'r') as infile: + return json.load(infile) + + def config_save(config_path, consumer_key, consumer_secret, token, secret): + data = {'ck': consumer_key, 'cs': consumer_secret, \ + 't': token, 's': secret } + + with open(config_path, 'w') as outfile: + json.dump(data, outfile, indent=2, ensure_ascii=False) + + def get_input(message): + return raw_input(message) + + + ########################### + # Main + # + if __name__ == "__main__": + try: + home_dir = os.path.expanduser('~') + config = config_load(os.path.join(home_dir, CONFIG_FILE)) + + if (config and config['t'] and config['s']): + auth = tweep_login(config['ck'], config['cs'], config['t'], config['s']) + else: + print ("Please provide your Twitter app access keys\n") + consumer_key = get_input("Consumer Key (API Key): ") + consumer_secret = get_input("Consumer Secret (API Secret): ") + + auth = tweep_login(consumer_key, consumer_secret) + + config_save(os.path.join(home_dir, CONFIG_FILE), consumer_key, \ + consumer_secret, auth.access_token, auth.access_token_secret) + + api = tweep_getAPI(auth) + + + except Exception as e: + print ("[ERROR] {0}".format(e)) +
Add OAuth authentication and config settings load/save
## Code Before: from __future__ import print_function import os import sys import argparse import logging ## Instruction: Add OAuth authentication and config settings load/save ## Code After: from __future__ import print_function import os import sys import argparse import json import tweepy import pystache import webbrowser CONFIG_FILE = '.shut-up-bird.conf' def tweep_login(consumer_key, consumer_secret, token='', secret=''): auth = tweepy.OAuthHandler(consumer_key, consumer_secret) if token and secret: auth.set_access_token(token, secret) else: try: print ("Authenticating ...please wait") redirect_url = auth.get_authorization_url() print ("Opening url - {0} ...".format(redirect_url)) webbrowser.open(redirect_url) verify_code = raw_input("Verification PIN code: ".format(redirect_url)) auth.get_access_token(verify_code) except tweepy.TweepError as e: raise Exception("Failed to get request token!", e) return auth def tweep_getAPI(auth): api = tweepy.API(auth) print("Authenticated as: {0}".format(api.me().screen_name)) return api def tweep_delete(api): print ("TEST") def config_load(config_path): if not os.path.exists(config_path): return False with open(config_path, 'r') as infile: return json.load(infile) def config_save(config_path, consumer_key, consumer_secret, token, secret): data = {'ck': consumer_key, 'cs': consumer_secret, \ 't': token, 's': secret } with open(config_path, 'w') as outfile: json.dump(data, outfile, indent=2, ensure_ascii=False) def get_input(message): return raw_input(message) ########################### # Main # if __name__ == "__main__": try: home_dir = os.path.expanduser('~') config = config_load(os.path.join(home_dir, CONFIG_FILE)) if (config and config['t'] and config['s']): auth = tweep_login(config['ck'], config['cs'], config['t'], config['s']) else: print ("Please provide your Twitter app access keys\n") consumer_key = get_input("Consumer Key (API Key): ") consumer_secret = get_input("Consumer Secret (API Secret): ") auth = tweep_login(consumer_key, consumer_secret) config_save(os.path.join(home_dir, CONFIG_FILE), consumer_key, \ consumer_secret, auth.access_token, auth.access_token_secret) api = tweep_getAPI(auth) except Exception as e: print ("[ERROR] {0}".format(e))
+ from __future__ import print_function import os import sys import argparse - import logging + import json + import tweepy + import pystache + import webbrowser + + CONFIG_FILE = '.shut-up-bird.conf' + + def tweep_login(consumer_key, consumer_secret, token='', secret=''): + auth = tweepy.OAuthHandler(consumer_key, consumer_secret) + + if token and secret: + auth.set_access_token(token, secret) + else: + try: + print ("Authenticating ...please wait") + redirect_url = auth.get_authorization_url() + + print ("Opening url - {0} ...".format(redirect_url)) + webbrowser.open(redirect_url) + + verify_code = raw_input("Verification PIN code: ".format(redirect_url)) + auth.get_access_token(verify_code) + + except tweepy.TweepError as e: + raise Exception("Failed to get request token!", e) + + return auth + + def tweep_getAPI(auth): + api = tweepy.API(auth) + print("Authenticated as: {0}".format(api.me().screen_name)) + return api + + + def tweep_delete(api): + print ("TEST") + + def config_load(config_path): + if not os.path.exists(config_path): + return False + + with open(config_path, 'r') as infile: + return json.load(infile) + + def config_save(config_path, consumer_key, consumer_secret, token, secret): + data = {'ck': consumer_key, 'cs': consumer_secret, \ + 't': token, 's': secret } + + with open(config_path, 'w') as outfile: + json.dump(data, outfile, indent=2, ensure_ascii=False) + + def get_input(message): + return raw_input(message) + + + ########################### + # Main + # + if __name__ == "__main__": + try: + home_dir = os.path.expanduser('~') + config = config_load(os.path.join(home_dir, CONFIG_FILE)) + + if (config and config['t'] and config['s']): + auth = tweep_login(config['ck'], config['cs'], config['t'], config['s']) + else: + print ("Please provide your Twitter app access keys\n") + consumer_key = get_input("Consumer Key (API Key): ") + consumer_secret = get_input("Consumer Secret (API Secret): ") + + auth = tweep_login(consumer_key, consumer_secret) + + config_save(os.path.join(home_dir, CONFIG_FILE), consumer_key, \ + consumer_secret, auth.access_token, auth.access_token_secret) + + api = tweep_getAPI(auth) + + + except Exception as e: + print ("[ERROR] {0}".format(e))
b690b87094b4205b448ba1ea5dda546c3e7a976d
python/xi_plugin/style.py
python/xi_plugin/style.py
"""Provides convenience methods for styling text.""" BOLD = 1 UNDERLINE = 2 ITALIC = 4 def color_for_rgb_float(red, green, blue): if any(map(lambda x: x < 0 or x > 1, (red, green, blue))): raise ValueError("Values must be in the range 0..1 (inclusive)") red, green, blue = map(lambda c: int(0xFF * c), (red, green, blue)) return (0xFF << 24) | (red << 16) | (green << 8) | blue
"""Provides convenience methods for styling text.""" BOLD = 1 UNDERLINE = 2 ITALIC = 4 def color_for_rgba_float(red, green, blue, alpha=1): if any(map(lambda x: x < 0 or x > 1, (red, green, blue, alpha))): raise ValueError("Values must be in the range 0..1 (inclusive)") red, green, blue, alpha = map(lambda c: int(0xFF * c), (red, green, blue, alpha)) return (alpha << 24) | (red << 16) | (green << 8) | blue
Support setting alpha in colors
Support setting alpha in colors
Python
apache-2.0
google/xi-editor,fuchsia-mirror/third_party-xi-editor,modelorganism/xi-editor,fuchsia-mirror/third_party-xi-editor,modelorganism/xi-editor,google/xi-editor,modelorganism/xi-editor,fuchsia-mirror/third_party-xi-editor,google/xi-editor,fuchsia-mirror/third_party-xi-editor,google/xi-editor
"""Provides convenience methods for styling text.""" BOLD = 1 UNDERLINE = 2 ITALIC = 4 - def color_for_rgb_float(red, green, blue): + def color_for_rgba_float(red, green, blue, alpha=1): - if any(map(lambda x: x < 0 or x > 1, (red, green, blue))): + if any(map(lambda x: x < 0 or x > 1, (red, green, blue, alpha))): raise ValueError("Values must be in the range 0..1 (inclusive)") - red, green, blue = map(lambda c: int(0xFF * c), (red, green, blue)) + red, green, blue, alpha = map(lambda c: int(0xFF * c), (red, green, blue, alpha)) - return (0xFF << 24) | (red << 16) | (green << 8) | blue + return (alpha << 24) | (red << 16) | (green << 8) | blue
Support setting alpha in colors
## Code Before: """Provides convenience methods for styling text.""" BOLD = 1 UNDERLINE = 2 ITALIC = 4 def color_for_rgb_float(red, green, blue): if any(map(lambda x: x < 0 or x > 1, (red, green, blue))): raise ValueError("Values must be in the range 0..1 (inclusive)") red, green, blue = map(lambda c: int(0xFF * c), (red, green, blue)) return (0xFF << 24) | (red << 16) | (green << 8) | blue ## Instruction: Support setting alpha in colors ## Code After: """Provides convenience methods for styling text.""" BOLD = 1 UNDERLINE = 2 ITALIC = 4 def color_for_rgba_float(red, green, blue, alpha=1): if any(map(lambda x: x < 0 or x > 1, (red, green, blue, alpha))): raise ValueError("Values must be in the range 0..1 (inclusive)") red, green, blue, alpha = map(lambda c: int(0xFF * c), (red, green, blue, alpha)) return (alpha << 24) | (red << 16) | (green << 8) | blue
"""Provides convenience methods for styling text.""" BOLD = 1 UNDERLINE = 2 ITALIC = 4 - def color_for_rgb_float(red, green, blue): + def color_for_rgba_float(red, green, blue, alpha=1): ? + +++++++++ - if any(map(lambda x: x < 0 or x > 1, (red, green, blue))): + if any(map(lambda x: x < 0 or x > 1, (red, green, blue, alpha))): ? +++++++ raise ValueError("Values must be in the range 0..1 (inclusive)") - red, green, blue = map(lambda c: int(0xFF * c), (red, green, blue)) + red, green, blue, alpha = map(lambda c: int(0xFF * c), (red, green, blue, alpha)) ? +++++++ +++++++ - return (0xFF << 24) | (red << 16) | (green << 8) | blue ? ^^^^ + return (alpha << 24) | (red << 16) | (green << 8) | blue ? ^^^^^
3a0b5bd923eff1fb143aa73fc54f735e7b330068
examples/plot_sphere_function.py
examples/plot_sphere_function.py
############################################################################### # Import required packages import numpy as np import matplotlib.pyplot as plt from ailib.utils.plot import plot_2d_contour_solution_space, plot_2d_solution_space from ailib.optimize.functions.unconstrained import sphere ############################################################################### # Plot the sphere function plot_2d_solution_space(sphere, xmin=-2*np.ones(2), xmax=2*np.ones(2), xstar=np.zeros(2), angle_view=(55, 83), title="Sphere function", output_file_name="sphere_3d.png") plt.tight_layout() plt.show() ############################################################################### # Plot the contours plot_2d_contour_solution_space(sphere, xmin=-10*np.ones(2), xmax=10*np.ones(2), xstar=np.zeros(2), title="Sphere function", output_file_name="sphere.png") plt.tight_layout() plt.show()
############################################################################### # Import required packages import numpy as np import matplotlib.pyplot as plt from ailib.utils.plot import plot_2d_contour_solution_space, plot_2d_solution_space from ailib.optimize.functions.unconstrained import sphere ############################################################################### # Plot the sphere function plot_2d_solution_space(sphere, xmin=-2*np.ones(2), xmax=2*np.ones(2), xstar=np.zeros(2), angle_view=(55, 83), title="Sphere function") plt.tight_layout() plt.show() ############################################################################### # Plot the contours plot_2d_contour_solution_space(sphere, xmin=-10*np.ones(2), xmax=10*np.ones(2), xstar=np.zeros(2), title="Sphere function") plt.tight_layout() plt.show()
Switch off the output file generation.
Switch off the output file generation.
Python
mit
jeremiedecock/pyai,jeremiedecock/pyai
############################################################################### # Import required packages import numpy as np import matplotlib.pyplot as plt from ailib.utils.plot import plot_2d_contour_solution_space, plot_2d_solution_space from ailib.optimize.functions.unconstrained import sphere ############################################################################### # Plot the sphere function plot_2d_solution_space(sphere, xmin=-2*np.ones(2), xmax=2*np.ones(2), xstar=np.zeros(2), angle_view=(55, 83), - title="Sphere function", + title="Sphere function") - output_file_name="sphere_3d.png") plt.tight_layout() plt.show() ############################################################################### # Plot the contours plot_2d_contour_solution_space(sphere, xmin=-10*np.ones(2), xmax=10*np.ones(2), xstar=np.zeros(2), - title="Sphere function", + title="Sphere function") - output_file_name="sphere.png") plt.tight_layout() plt.show()
Switch off the output file generation.
## Code Before: ############################################################################### # Import required packages import numpy as np import matplotlib.pyplot as plt from ailib.utils.plot import plot_2d_contour_solution_space, plot_2d_solution_space from ailib.optimize.functions.unconstrained import sphere ############################################################################### # Plot the sphere function plot_2d_solution_space(sphere, xmin=-2*np.ones(2), xmax=2*np.ones(2), xstar=np.zeros(2), angle_view=(55, 83), title="Sphere function", output_file_name="sphere_3d.png") plt.tight_layout() plt.show() ############################################################################### # Plot the contours plot_2d_contour_solution_space(sphere, xmin=-10*np.ones(2), xmax=10*np.ones(2), xstar=np.zeros(2), title="Sphere function", output_file_name="sphere.png") plt.tight_layout() plt.show() ## Instruction: Switch off the output file generation. ## Code After: ############################################################################### # Import required packages import numpy as np import matplotlib.pyplot as plt from ailib.utils.plot import plot_2d_contour_solution_space, plot_2d_solution_space from ailib.optimize.functions.unconstrained import sphere ############################################################################### # Plot the sphere function plot_2d_solution_space(sphere, xmin=-2*np.ones(2), xmax=2*np.ones(2), xstar=np.zeros(2), angle_view=(55, 83), title="Sphere function") plt.tight_layout() plt.show() ############################################################################### # Plot the contours plot_2d_contour_solution_space(sphere, xmin=-10*np.ones(2), xmax=10*np.ones(2), xstar=np.zeros(2), title="Sphere function") plt.tight_layout() plt.show()
############################################################################### # Import required packages import numpy as np import matplotlib.pyplot as plt from ailib.utils.plot import plot_2d_contour_solution_space, plot_2d_solution_space from ailib.optimize.functions.unconstrained import sphere ############################################################################### # Plot the sphere function plot_2d_solution_space(sphere, xmin=-2*np.ones(2), xmax=2*np.ones(2), xstar=np.zeros(2), angle_view=(55, 83), - title="Sphere function", ? ^ + title="Sphere function") ? ^ - output_file_name="sphere_3d.png") plt.tight_layout() plt.show() ############################################################################### # Plot the contours plot_2d_contour_solution_space(sphere, xmin=-10*np.ones(2), xmax=10*np.ones(2), xstar=np.zeros(2), - title="Sphere function", ? ^ + title="Sphere function") ? ^ - output_file_name="sphere.png") plt.tight_layout() plt.show()
675364683c5415f1db7a5599d8ad97f72f69aaf0
buckets/utils.py
buckets/utils.py
import string import random from django.conf import settings def validate_settings(): assert settings.AWS, \ "No AWS settings found" assert settings.AWS.get('ACCESS_KEY'), \ "AWS access key is not set in settings" assert settings.AWS.get('SECRET_KEY'), \ "AWS secret key is not set in settings" assert settings.AWS.get('BUCKET'), \ "AWS bucket name is not set in settings" ID_FIELD_LENGTH = 24 alphabet = string.ascii_lowercase + string.digits for loser in 'l1o0': i = alphabet.index(loser) alphabet = alphabet[:i] + alphabet[i + 1:] def byte_to_base32_chr(byte): return alphabet[byte & 31] def random_id(): rand_id = [random.randint(0, 0xFF) for i in range(ID_FIELD_LENGTH)] return ''.join(map(byte_to_base32_chr, rand_id))
import string import random from django.conf import settings def validate_settings(): assert settings.AWS, \ "No AWS settings found" assert settings.AWS.get('ACCESS_KEY'), \ "AWS access key is not set in settings" assert settings.AWS.get('SECRET_KEY'), \ "AWS secret key is not set in settings" assert settings.AWS.get('BUCKET'), \ "AWS bucket name is not set in settings" ID_FIELD_LENGTH = 24 alphabet = string.ascii_lowercase + string.digits alphabet0 = string.ascii_lowercase + string.ascii_lowercase for loser in 'l1o0': i = alphabet.index(loser) alphabet = alphabet[:i] + alphabet[i + 1:] for loser in 'lo': i = alphabet0.index(loser) alphabet0 = alphabet0[:i] + alphabet0[i + 1:] def byte_to_base32_chr(byte): return alphabet[byte & 31] def byte_to_letter(byte): return alphabet0[byte & 31] def random_id(): rand_id = [random.randint(0, 0xFF) for i in range(ID_FIELD_LENGTH)] return (byte_to_letter(rand_id[0]) + ''.join(map(byte_to_base32_chr, rand_id[1:])))
Make random IDs start with a letter
Make random IDs start with a letter
Python
agpl-3.0
Cadasta/django-buckets,Cadasta/django-buckets,Cadasta/django-buckets
import string import random from django.conf import settings def validate_settings(): assert settings.AWS, \ "No AWS settings found" assert settings.AWS.get('ACCESS_KEY'), \ "AWS access key is not set in settings" assert settings.AWS.get('SECRET_KEY'), \ "AWS secret key is not set in settings" assert settings.AWS.get('BUCKET'), \ "AWS bucket name is not set in settings" ID_FIELD_LENGTH = 24 alphabet = string.ascii_lowercase + string.digits + alphabet0 = string.ascii_lowercase + string.ascii_lowercase for loser in 'l1o0': i = alphabet.index(loser) alphabet = alphabet[:i] + alphabet[i + 1:] + for loser in 'lo': + i = alphabet0.index(loser) + alphabet0 = alphabet0[:i] + alphabet0[i + 1:] def byte_to_base32_chr(byte): return alphabet[byte & 31] + def byte_to_letter(byte): + return alphabet0[byte & 31] + + def random_id(): rand_id = [random.randint(0, 0xFF) for i in range(ID_FIELD_LENGTH)] + return (byte_to_letter(rand_id[0]) + - return ''.join(map(byte_to_base32_chr, rand_id)) + ''.join(map(byte_to_base32_chr, rand_id[1:])))
Make random IDs start with a letter
## Code Before: import string import random from django.conf import settings def validate_settings(): assert settings.AWS, \ "No AWS settings found" assert settings.AWS.get('ACCESS_KEY'), \ "AWS access key is not set in settings" assert settings.AWS.get('SECRET_KEY'), \ "AWS secret key is not set in settings" assert settings.AWS.get('BUCKET'), \ "AWS bucket name is not set in settings" ID_FIELD_LENGTH = 24 alphabet = string.ascii_lowercase + string.digits for loser in 'l1o0': i = alphabet.index(loser) alphabet = alphabet[:i] + alphabet[i + 1:] def byte_to_base32_chr(byte): return alphabet[byte & 31] def random_id(): rand_id = [random.randint(0, 0xFF) for i in range(ID_FIELD_LENGTH)] return ''.join(map(byte_to_base32_chr, rand_id)) ## Instruction: Make random IDs start with a letter ## Code After: import string import random from django.conf import settings def validate_settings(): assert settings.AWS, \ "No AWS settings found" assert settings.AWS.get('ACCESS_KEY'), \ "AWS access key is not set in settings" assert settings.AWS.get('SECRET_KEY'), \ "AWS secret key is not set in settings" assert settings.AWS.get('BUCKET'), \ "AWS bucket name is not set in settings" ID_FIELD_LENGTH = 24 alphabet = string.ascii_lowercase + string.digits alphabet0 = string.ascii_lowercase + string.ascii_lowercase for loser in 'l1o0': i = alphabet.index(loser) alphabet = alphabet[:i] + alphabet[i + 1:] for loser in 'lo': i = alphabet0.index(loser) alphabet0 = alphabet0[:i] + alphabet0[i + 1:] def byte_to_base32_chr(byte): return alphabet[byte & 31] def byte_to_letter(byte): return alphabet0[byte & 31] def random_id(): rand_id = [random.randint(0, 0xFF) for i in range(ID_FIELD_LENGTH)] return (byte_to_letter(rand_id[0]) + ''.join(map(byte_to_base32_chr, rand_id[1:])))
import string import random from django.conf import settings def validate_settings(): assert settings.AWS, \ "No AWS settings found" assert settings.AWS.get('ACCESS_KEY'), \ "AWS access key is not set in settings" assert settings.AWS.get('SECRET_KEY'), \ "AWS secret key is not set in settings" assert settings.AWS.get('BUCKET'), \ "AWS bucket name is not set in settings" ID_FIELD_LENGTH = 24 alphabet = string.ascii_lowercase + string.digits + alphabet0 = string.ascii_lowercase + string.ascii_lowercase for loser in 'l1o0': i = alphabet.index(loser) alphabet = alphabet[:i] + alphabet[i + 1:] + for loser in 'lo': + i = alphabet0.index(loser) + alphabet0 = alphabet0[:i] + alphabet0[i + 1:] def byte_to_base32_chr(byte): return alphabet[byte & 31] + def byte_to_letter(byte): + return alphabet0[byte & 31] + + def random_id(): rand_id = [random.randint(0, 0xFF) for i in range(ID_FIELD_LENGTH)] + return (byte_to_letter(rand_id[0]) + - return ''.join(map(byte_to_base32_chr, rand_id)) ? ^^^^^^ + ''.join(map(byte_to_base32_chr, rand_id[1:]))) ? ^^^^^^^ ++++ +
bc3aee78bb5be3afa639b8c572273b662aea1721
glue/tests/test_settings_helpers.py
glue/tests/test_settings_helpers.py
from mock import patch import os from glue.config import SettingRegistry from glue._settings_helpers import load_settings, save_settings def test_roundtrip(tmpdir): settings = SettingRegistry() settings.add('STRING', 'green', str) settings.add('INT', 3, int) settings.add('FLOAT', 5.5, float) settings.add('LIST', [1,2,3], list) with patch('glue.config.settings', settings): with patch('glue.config.CFG_DIR', tmpdir.strpath): settings.STRING = 'blue' settings.INT = 4 settings.FLOAT = 3.5 settings.LIST = ['A', 'BB', 'CCC'] save_settings() assert os.path.exists(os.path.join(tmpdir.strpath, 'settings.cfg')) settings.STRING = 'red' settings.INT = 3 settings.FLOAT = 4.5 settings.LIST = ['DDD', 'EE', 'F'] load_settings(force=True) assert settings.STRING == 'blue' assert settings.INT == 4 assert settings.FLOAT == 3.5 assert settings.LIST == ['A', 'BB', 'CCC']
from mock import patch import os from glue.config import SettingRegistry from glue._settings_helpers import load_settings, save_settings def test_roundtrip(tmpdir): settings = SettingRegistry() settings.add('STRING', 'green', str) settings.add('INT', 3, int) settings.add('FLOAT', 5.5, float) settings.add('LIST', [1,2,3], list) with patch('glue.config.settings', settings): with patch('glue.config.CFG_DIR', tmpdir.strpath): settings.STRING = 'blue' settings.INT = 4 settings.FLOAT = 3.5 settings.LIST = ['A', 'BB', 'CCC'] settings.reset_defaults() assert settings.STRING == 'green' assert settings.INT == 3 assert settings.FLOAT == 5.5 assert settings.LIST == [1, 2, 3] settings.STRING = 'blue' settings.INT = 4 settings.FLOAT = 3.5 settings.LIST = ['A', 'BB', 'CCC'] save_settings() assert os.path.exists(os.path.join(tmpdir.strpath, 'settings.cfg')) settings.reset_defaults() settings.STRING = 'red' settings.INT = 5 # Loading settings will only change settings that have not been # changed from the defaults... load_settings() assert settings.STRING == 'red' assert settings.INT == 5 assert settings.FLOAT == 3.5 assert settings.LIST == ['A', 'BB', 'CCC'] # ... unless the ``force=True`` option is passed load_settings(force=True) assert settings.STRING == 'blue' assert settings.INT == 4 assert settings.FLOAT == 3.5 assert settings.LIST == ['A', 'BB', 'CCC']
Improve unit test for settings helpers
Improve unit test for settings helpers
Python
bsd-3-clause
saimn/glue,stscieisenhamer/glue,stscieisenhamer/glue,saimn/glue
from mock import patch import os from glue.config import SettingRegistry from glue._settings_helpers import load_settings, save_settings def test_roundtrip(tmpdir): settings = SettingRegistry() + settings.add('STRING', 'green', str) settings.add('INT', 3, int) settings.add('FLOAT', 5.5, float) settings.add('LIST', [1,2,3], list) with patch('glue.config.settings', settings): with patch('glue.config.CFG_DIR', tmpdir.strpath): settings.STRING = 'blue' settings.INT = 4 settings.FLOAT = 3.5 settings.LIST = ['A', 'BB', 'CCC'] + settings.reset_defaults() + + assert settings.STRING == 'green' + assert settings.INT == 3 + assert settings.FLOAT == 5.5 + assert settings.LIST == [1, 2, 3] + + settings.STRING = 'blue' + settings.INT = 4 + settings.FLOAT = 3.5 + settings.LIST = ['A', 'BB', 'CCC'] + save_settings() assert os.path.exists(os.path.join(tmpdir.strpath, 'settings.cfg')) + settings.reset_defaults() + settings.STRING = 'red' - settings.INT = 3 + settings.INT = 5 - settings.FLOAT = 4.5 - settings.LIST = ['DDD', 'EE', 'F'] + # Loading settings will only change settings that have not been + # changed from the defaults... + load_settings() + + assert settings.STRING == 'red' + assert settings.INT == 5 + assert settings.FLOAT == 3.5 + assert settings.LIST == ['A', 'BB', 'CCC'] + + # ... unless the ``force=True`` option is passed load_settings(force=True) assert settings.STRING == 'blue' assert settings.INT == 4 assert settings.FLOAT == 3.5 assert settings.LIST == ['A', 'BB', 'CCC']
Improve unit test for settings helpers
## Code Before: from mock import patch import os from glue.config import SettingRegistry from glue._settings_helpers import load_settings, save_settings def test_roundtrip(tmpdir): settings = SettingRegistry() settings.add('STRING', 'green', str) settings.add('INT', 3, int) settings.add('FLOAT', 5.5, float) settings.add('LIST', [1,2,3], list) with patch('glue.config.settings', settings): with patch('glue.config.CFG_DIR', tmpdir.strpath): settings.STRING = 'blue' settings.INT = 4 settings.FLOAT = 3.5 settings.LIST = ['A', 'BB', 'CCC'] save_settings() assert os.path.exists(os.path.join(tmpdir.strpath, 'settings.cfg')) settings.STRING = 'red' settings.INT = 3 settings.FLOAT = 4.5 settings.LIST = ['DDD', 'EE', 'F'] load_settings(force=True) assert settings.STRING == 'blue' assert settings.INT == 4 assert settings.FLOAT == 3.5 assert settings.LIST == ['A', 'BB', 'CCC'] ## Instruction: Improve unit test for settings helpers ## Code After: from mock import patch import os from glue.config import SettingRegistry from glue._settings_helpers import load_settings, save_settings def test_roundtrip(tmpdir): settings = SettingRegistry() settings.add('STRING', 'green', str) settings.add('INT', 3, int) settings.add('FLOAT', 5.5, float) settings.add('LIST', [1,2,3], list) with patch('glue.config.settings', settings): with patch('glue.config.CFG_DIR', tmpdir.strpath): settings.STRING = 'blue' settings.INT = 4 settings.FLOAT = 3.5 settings.LIST = ['A', 'BB', 'CCC'] settings.reset_defaults() assert settings.STRING == 'green' assert settings.INT == 3 assert settings.FLOAT == 5.5 assert settings.LIST == [1, 2, 3] settings.STRING = 'blue' settings.INT = 4 settings.FLOAT = 3.5 settings.LIST = ['A', 'BB', 'CCC'] save_settings() assert os.path.exists(os.path.join(tmpdir.strpath, 'settings.cfg')) settings.reset_defaults() settings.STRING = 'red' settings.INT = 5 # Loading settings will only change settings that have not been # changed from the defaults... load_settings() assert settings.STRING == 'red' assert settings.INT == 5 assert settings.FLOAT == 3.5 assert settings.LIST == ['A', 'BB', 'CCC'] # ... unless the ``force=True`` option is passed load_settings(force=True) assert settings.STRING == 'blue' assert settings.INT == 4 assert settings.FLOAT == 3.5 assert settings.LIST == ['A', 'BB', 'CCC']
from mock import patch import os from glue.config import SettingRegistry from glue._settings_helpers import load_settings, save_settings def test_roundtrip(tmpdir): settings = SettingRegistry() + settings.add('STRING', 'green', str) settings.add('INT', 3, int) settings.add('FLOAT', 5.5, float) settings.add('LIST', [1,2,3], list) with patch('glue.config.settings', settings): with patch('glue.config.CFG_DIR', tmpdir.strpath): settings.STRING = 'blue' settings.INT = 4 settings.FLOAT = 3.5 settings.LIST = ['A', 'BB', 'CCC'] + settings.reset_defaults() + + assert settings.STRING == 'green' + assert settings.INT == 3 + assert settings.FLOAT == 5.5 + assert settings.LIST == [1, 2, 3] + + settings.STRING = 'blue' + settings.INT = 4 + settings.FLOAT = 3.5 + settings.LIST = ['A', 'BB', 'CCC'] + save_settings() assert os.path.exists(os.path.join(tmpdir.strpath, 'settings.cfg')) + settings.reset_defaults() + settings.STRING = 'red' - settings.INT = 3 ? ^ + settings.INT = 5 ? ^ - settings.FLOAT = 4.5 - settings.LIST = ['DDD', 'EE', 'F'] + # Loading settings will only change settings that have not been + # changed from the defaults... + load_settings() + + assert settings.STRING == 'red' + assert settings.INT == 5 + assert settings.FLOAT == 3.5 + assert settings.LIST == ['A', 'BB', 'CCC'] + + # ... unless the ``force=True`` option is passed load_settings(force=True) assert settings.STRING == 'blue' assert settings.INT == 4 assert settings.FLOAT == 3.5 assert settings.LIST == ['A', 'BB', 'CCC']
22e0e894bdfb457f00bad1016ae28884ef94256c
okupy/otp/__init__.py
okupy/otp/__init__.py
from django_otp import login as otp_login from django_otp.middleware import OTPMiddleware from .sotp.models import SOTPDevice from .totp.models import TOTPDevice def init_otp(request): """ Initialize OTP after login. This sets up OTP devices for django_otp and calls the middleware to fill request.user.is_verified(). """ tdev, created = TOTPDevice.objects.get_or_create( user=request.user, defaults={ 'name': 'TOTP device with LDAP secret', }) if created: tdev.save() sdev, created = SOTPDevice.objects.get_or_create( user=request.user, defaults={ 'name': 'SOTP device with LDAP passwords', }) if created: sdev.save() # if OTP is disabled, it will match already if tdev.verify_token(): otp_login(request, tdev) # add .is_verified() OTPMiddleware().process_request(request)
from django.db import IntegrityError from django_otp import login as otp_login from django_otp.middleware import OTPMiddleware from .sotp.models import SOTPDevice from .totp.models import TOTPDevice def init_otp(request): """ Initialize OTP after login. This sets up OTP devices for django_otp and calls the middleware to fill request.user.is_verified(). """ tdev = TOTPDevice(user=request.user, name='TOTP device with LDAP secret') try: tdev.save() except IntegrityError: tdev = TOTPDevice.objects.get(user=request.user) sdev = SOTPDevice(user=request.user, name='SOTP device with LDAP secret') try: sdev.save() except IntegrityError: pass # if OTP is disabled, it will match already if tdev.verify_token(): otp_login(request, tdev) # add .is_verified() OTPMiddleware().process_request(request)
Make otp_init() race condition safe.
Make otp_init() race condition safe. A race condition in get_or_create() may have resulted in two devices created per user. Now we guarantee only one. Not that it matters real much...
Python
agpl-3.0
gentoo/identity.gentoo.org,gentoo/identity.gentoo.org,dastergon/identity.gentoo.org,dastergon/identity.gentoo.org
+ from django.db import IntegrityError from django_otp import login as otp_login from django_otp.middleware import OTPMiddleware from .sotp.models import SOTPDevice from .totp.models import TOTPDevice def init_otp(request): """ Initialize OTP after login. This sets up OTP devices for django_otp and calls the middleware to fill request.user.is_verified(). """ + tdev = TOTPDevice(user=request.user, - tdev, created = TOTPDevice.objects.get_or_create( - user=request.user, - defaults={ - 'name': 'TOTP device with LDAP secret', + name='TOTP device with LDAP secret') + try: - }) - if created: tdev.save() + except IntegrityError: + tdev = TOTPDevice.objects.get(user=request.user) + sdev = SOTPDevice(user=request.user, - sdev, created = SOTPDevice.objects.get_or_create( - user=request.user, - defaults={ - 'name': 'SOTP device with LDAP passwords', + name='SOTP device with LDAP secret') + try: - }) - if created: sdev.save() + except IntegrityError: + pass # if OTP is disabled, it will match already if tdev.verify_token(): otp_login(request, tdev) # add .is_verified() OTPMiddleware().process_request(request)
Make otp_init() race condition safe.
## Code Before: from django_otp import login as otp_login from django_otp.middleware import OTPMiddleware from .sotp.models import SOTPDevice from .totp.models import TOTPDevice def init_otp(request): """ Initialize OTP after login. This sets up OTP devices for django_otp and calls the middleware to fill request.user.is_verified(). """ tdev, created = TOTPDevice.objects.get_or_create( user=request.user, defaults={ 'name': 'TOTP device with LDAP secret', }) if created: tdev.save() sdev, created = SOTPDevice.objects.get_or_create( user=request.user, defaults={ 'name': 'SOTP device with LDAP passwords', }) if created: sdev.save() # if OTP is disabled, it will match already if tdev.verify_token(): otp_login(request, tdev) # add .is_verified() OTPMiddleware().process_request(request) ## Instruction: Make otp_init() race condition safe. ## Code After: from django.db import IntegrityError from django_otp import login as otp_login from django_otp.middleware import OTPMiddleware from .sotp.models import SOTPDevice from .totp.models import TOTPDevice def init_otp(request): """ Initialize OTP after login. This sets up OTP devices for django_otp and calls the middleware to fill request.user.is_verified(). """ tdev = TOTPDevice(user=request.user, name='TOTP device with LDAP secret') try: tdev.save() except IntegrityError: tdev = TOTPDevice.objects.get(user=request.user) sdev = SOTPDevice(user=request.user, name='SOTP device with LDAP secret') try: sdev.save() except IntegrityError: pass # if OTP is disabled, it will match already if tdev.verify_token(): otp_login(request, tdev) # add .is_verified() OTPMiddleware().process_request(request)
+ from django.db import IntegrityError from django_otp import login as otp_login from django_otp.middleware import OTPMiddleware from .sotp.models import SOTPDevice from .totp.models import TOTPDevice def init_otp(request): """ Initialize OTP after login. This sets up OTP devices for django_otp and calls the middleware to fill request.user.is_verified(). """ + tdev = TOTPDevice(user=request.user, - tdev, created = TOTPDevice.objects.get_or_create( - user=request.user, - defaults={ - 'name': 'TOTP device with LDAP secret', ? ^ ^^^ ^ + name='TOTP device with LDAP secret') ? ^^^^^^^^^^ ^ ^ + try: - }) - if created: tdev.save() + except IntegrityError: + tdev = TOTPDevice.objects.get(user=request.user) + sdev = SOTPDevice(user=request.user, - sdev, created = SOTPDevice.objects.get_or_create( - user=request.user, - defaults={ - 'name': 'SOTP device with LDAP passwords', ? ^ ^^^ -- ^^^ ^^ ^ + name='SOTP device with LDAP secret') ? ^^^^^^^^^^ ^ ^^ ^^ ^ + try: - }) - if created: sdev.save() + except IntegrityError: + pass # if OTP is disabled, it will match already if tdev.verify_token(): otp_login(request, tdev) # add .is_verified() OTPMiddleware().process_request(request)
b93644f758baa3f3a4e4ea506e8f0305d6503305
pathvalidate/_interface.py
pathvalidate/_interface.py
from __future__ import absolute_import, unicode_literals import abc from ._common import _validate_null_string, is_pathlike_obj from ._six import add_metaclass, text_type @add_metaclass(abc.ABCMeta) class NameSanitizer(object): @abc.abstractproperty def reserved_keywords(self): # pragma: no cover pass @abc.abstractmethod def validate(self): # pragma: no cover pass @abc.abstractmethod def sanitize(self, replacement_text=""): # pragma: no cover pass @property def _str(self): return text_type(self._value) def __init__(self, value): self._validate_null_string(value) if is_pathlike_obj(value): self._value = value else: self._value = value.strip() def _is_pathlike_obj(self): return is_pathlike_obj(self._value) def _is_reserved_keyword(self, value): return value in self.reserved_keywords @staticmethod def _validate_null_string(text, error_msg="null name"): _validate_null_string(text)
from __future__ import absolute_import, unicode_literals import abc from ._common import _validate_null_string, is_pathlike_obj from ._six import add_metaclass, text_type @add_metaclass(abc.ABCMeta) class NameSanitizer(object): @abc.abstractproperty def reserved_keywords(self): # pragma: no cover pass @abc.abstractmethod def validate(self): # pragma: no cover pass @abc.abstractmethod def sanitize(self, replacement_text=""): # pragma: no cover pass @property def _str(self): return text_type(self._value) def __init__(self, value): self._validate_null_string(value) if is_pathlike_obj(value): self._value = value else: self._value = value.strip() def _is_pathlike_obj(self): return is_pathlike_obj(self._value) def _is_reserved_keyword(self, value): return value in self.reserved_keywords @staticmethod def _validate_null_string(text): _validate_null_string(text, error_msg="null name")
Modify a function call argument
Modify a function call argument
Python
mit
thombashi/pathvalidate
from __future__ import absolute_import, unicode_literals import abc from ._common import _validate_null_string, is_pathlike_obj from ._six import add_metaclass, text_type @add_metaclass(abc.ABCMeta) class NameSanitizer(object): @abc.abstractproperty def reserved_keywords(self): # pragma: no cover pass @abc.abstractmethod def validate(self): # pragma: no cover pass @abc.abstractmethod def sanitize(self, replacement_text=""): # pragma: no cover pass @property def _str(self): return text_type(self._value) def __init__(self, value): self._validate_null_string(value) if is_pathlike_obj(value): self._value = value else: self._value = value.strip() def _is_pathlike_obj(self): return is_pathlike_obj(self._value) def _is_reserved_keyword(self, value): return value in self.reserved_keywords @staticmethod + def _validate_null_string(text): - def _validate_null_string(text, error_msg="null name"): + _validate_null_string(text, error_msg="null name") - _validate_null_string(text)
Modify a function call argument
## Code Before: from __future__ import absolute_import, unicode_literals import abc from ._common import _validate_null_string, is_pathlike_obj from ._six import add_metaclass, text_type @add_metaclass(abc.ABCMeta) class NameSanitizer(object): @abc.abstractproperty def reserved_keywords(self): # pragma: no cover pass @abc.abstractmethod def validate(self): # pragma: no cover pass @abc.abstractmethod def sanitize(self, replacement_text=""): # pragma: no cover pass @property def _str(self): return text_type(self._value) def __init__(self, value): self._validate_null_string(value) if is_pathlike_obj(value): self._value = value else: self._value = value.strip() def _is_pathlike_obj(self): return is_pathlike_obj(self._value) def _is_reserved_keyword(self, value): return value in self.reserved_keywords @staticmethod def _validate_null_string(text, error_msg="null name"): _validate_null_string(text) ## Instruction: Modify a function call argument ## Code After: from __future__ import absolute_import, unicode_literals import abc from ._common import _validate_null_string, is_pathlike_obj from ._six import add_metaclass, text_type @add_metaclass(abc.ABCMeta) class NameSanitizer(object): @abc.abstractproperty def reserved_keywords(self): # pragma: no cover pass @abc.abstractmethod def validate(self): # pragma: no cover pass @abc.abstractmethod def sanitize(self, replacement_text=""): # pragma: no cover pass @property def _str(self): return text_type(self._value) def __init__(self, value): self._validate_null_string(value) if is_pathlike_obj(value): self._value = value else: self._value = value.strip() def _is_pathlike_obj(self): return is_pathlike_obj(self._value) def _is_reserved_keyword(self, value): return value in self.reserved_keywords @staticmethod def _validate_null_string(text): _validate_null_string(text, error_msg="null name")
from __future__ import absolute_import, unicode_literals import abc from ._common import _validate_null_string, is_pathlike_obj from ._six import add_metaclass, text_type @add_metaclass(abc.ABCMeta) class NameSanitizer(object): @abc.abstractproperty def reserved_keywords(self): # pragma: no cover pass @abc.abstractmethod def validate(self): # pragma: no cover pass @abc.abstractmethod def sanitize(self, replacement_text=""): # pragma: no cover pass @property def _str(self): return text_type(self._value) def __init__(self, value): self._validate_null_string(value) if is_pathlike_obj(value): self._value = value else: self._value = value.strip() def _is_pathlike_obj(self): return is_pathlike_obj(self._value) def _is_reserved_keyword(self, value): return value in self.reserved_keywords @staticmethod + def _validate_null_string(text): - def _validate_null_string(text, error_msg="null name"): ? ^^^ - + _validate_null_string(text, error_msg="null name") ? ^^^ - _validate_null_string(text)
22ce36b227c40ace101db5d9c4e30575adca5f36
tests/settings.py
tests/settings.py
import os from settings import * db = os.environ.get('DB') SECRET_KEY = "This is a test, you don't need secrets" if db == "sqlite": DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', }, } elif db == "postgres": DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'inboxen', 'USER': 'postgres', }, } else: raise NotImplementedError("Please check tests/settings.py for valid DB values")
from __future__ import absolute_import import os from settings import * db = os.environ.get('DB') SECRET_KEY = "This is a test, you don't need secrets" if db == "sqlite": DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', }, } elif db == "postgres": DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'inboxen', 'USER': 'postgres', }, } else: raise NotImplementedError("Please check tests/settings.py for valid DB values")
Use absolute imports to avoid module naming issues
Use absolute imports to avoid module naming issues
Python
agpl-3.0
Inboxen/Inboxen,Inboxen/Inboxen,Inboxen/infrastructure,Inboxen/Inboxen,Inboxen/Inboxen
+ from __future__ import absolute_import import os from settings import * db = os.environ.get('DB') SECRET_KEY = "This is a test, you don't need secrets" if db == "sqlite": DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', }, } elif db == "postgres": DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'inboxen', 'USER': 'postgres', }, } else: raise NotImplementedError("Please check tests/settings.py for valid DB values")
Use absolute imports to avoid module naming issues
## Code Before: import os from settings import * db = os.environ.get('DB') SECRET_KEY = "This is a test, you don't need secrets" if db == "sqlite": DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', }, } elif db == "postgres": DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'inboxen', 'USER': 'postgres', }, } else: raise NotImplementedError("Please check tests/settings.py for valid DB values") ## Instruction: Use absolute imports to avoid module naming issues ## Code After: from __future__ import absolute_import import os from settings import * db = os.environ.get('DB') SECRET_KEY = "This is a test, you don't need secrets" if db == "sqlite": DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', }, } elif db == "postgres": DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'inboxen', 'USER': 'postgres', }, } else: raise NotImplementedError("Please check tests/settings.py for valid DB values")
+ from __future__ import absolute_import import os from settings import * db = os.environ.get('DB') SECRET_KEY = "This is a test, you don't need secrets" if db == "sqlite": DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', }, } elif db == "postgres": DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'inboxen', 'USER': 'postgres', }, } else: raise NotImplementedError("Please check tests/settings.py for valid DB values")
5156af5576d5663555bc04f5960e7e4cdd861166
objectrocket/util.py
objectrocket/util.py
"""Utility code for the objectrocket package.""" import types def register_extension_class(ext, base, *args, **kwargs): """Instantiate the given extension class and register as a public attribute of the given base. README: The expected protocol here to instantiate the given extension and pass the base object as the first positional argument, then unpack args and kwargs as additional arguments to the extension's constructor. """ ext_instance = ext.plugin(base, *args, **kwargs) setattr(base, ext.name.lstrip('_'), ext_instance) def register_extension_method(ext, base, *args, **kwargs): """Register the given extension method as a public attribute of the given base.""" bound_method = types.MethodType(ext.plugin, base, base.__class__) setattr(base, ext.name.lstrip('_'), bound_method)
"""Utility code for the objectrocket package.""" import types def register_extension_class(ext, base, *args, **kwargs): """Instantiate the given extension class and register as a public attribute of the given base. README: The expected protocol here is to instantiate the given extension and pass the base object as the first positional argument, then unpack args and kwargs as additional arguments to the extension's constructor. """ ext_instance = ext.plugin(base, *args, **kwargs) setattr(base, ext.name.lstrip('_'), ext_instance) def register_extension_method(ext, base, *args, **kwargs): """Register the given extension method as a public attribute of the given base. README: The expected protocol here is that the given extension method is an unbound function. It will be bound to the specified base as a method, and then set as a public attribute of that base. """ bound_method = types.MethodType(ext.plugin, base, base.__class__) setattr(base, ext.name.lstrip('_'), bound_method)
Clean up docs on extension protocols.
Clean up docs on extension protocols.
Python
mit
objectrocket/python-client,objectrocket/python-client
"""Utility code for the objectrocket package.""" import types def register_extension_class(ext, base, *args, **kwargs): """Instantiate the given extension class and register as a public attribute of the given base. - README: The expected protocol here to instantiate the given extension and pass the base object + README: The expected protocol here is to instantiate the given extension and pass the base - as the first positional argument, then unpack args and kwargs as additional arguments to the + object as the first positional argument, then unpack args and kwargs as additional arguments to - extension's constructor. + the extension's constructor. """ ext_instance = ext.plugin(base, *args, **kwargs) setattr(base, ext.name.lstrip('_'), ext_instance) def register_extension_method(ext, base, *args, **kwargs): - """Register the given extension method as a public attribute of the given base.""" + """Register the given extension method as a public attribute of the given base. + + README: The expected protocol here is that the given extension method is an unbound function. + It will be bound to the specified base as a method, and then set as a public attribute of that + base. + """ bound_method = types.MethodType(ext.plugin, base, base.__class__) setattr(base, ext.name.lstrip('_'), bound_method)
Clean up docs on extension protocols.
## Code Before: """Utility code for the objectrocket package.""" import types def register_extension_class(ext, base, *args, **kwargs): """Instantiate the given extension class and register as a public attribute of the given base. README: The expected protocol here to instantiate the given extension and pass the base object as the first positional argument, then unpack args and kwargs as additional arguments to the extension's constructor. """ ext_instance = ext.plugin(base, *args, **kwargs) setattr(base, ext.name.lstrip('_'), ext_instance) def register_extension_method(ext, base, *args, **kwargs): """Register the given extension method as a public attribute of the given base.""" bound_method = types.MethodType(ext.plugin, base, base.__class__) setattr(base, ext.name.lstrip('_'), bound_method) ## Instruction: Clean up docs on extension protocols. ## Code After: """Utility code for the objectrocket package.""" import types def register_extension_class(ext, base, *args, **kwargs): """Instantiate the given extension class and register as a public attribute of the given base. README: The expected protocol here is to instantiate the given extension and pass the base object as the first positional argument, then unpack args and kwargs as additional arguments to the extension's constructor. """ ext_instance = ext.plugin(base, *args, **kwargs) setattr(base, ext.name.lstrip('_'), ext_instance) def register_extension_method(ext, base, *args, **kwargs): """Register the given extension method as a public attribute of the given base. README: The expected protocol here is that the given extension method is an unbound function. It will be bound to the specified base as a method, and then set as a public attribute of that base. """ bound_method = types.MethodType(ext.plugin, base, base.__class__) setattr(base, ext.name.lstrip('_'), bound_method)
"""Utility code for the objectrocket package.""" import types def register_extension_class(ext, base, *args, **kwargs): """Instantiate the given extension class and register as a public attribute of the given base. - README: The expected protocol here to instantiate the given extension and pass the base object ? ------- + README: The expected protocol here is to instantiate the given extension and pass the base ? +++ - as the first positional argument, then unpack args and kwargs as additional arguments to the ? ---- + object as the first positional argument, then unpack args and kwargs as additional arguments to ? +++++++ - extension's constructor. + the extension's constructor. ? ++++ """ ext_instance = ext.plugin(base, *args, **kwargs) setattr(base, ext.name.lstrip('_'), ext_instance) def register_extension_method(ext, base, *args, **kwargs): - """Register the given extension method as a public attribute of the given base.""" ? --- + """Register the given extension method as a public attribute of the given base. + + README: The expected protocol here is that the given extension method is an unbound function. + It will be bound to the specified base as a method, and then set as a public attribute of that + base. + """ bound_method = types.MethodType(ext.plugin, base, base.__class__) setattr(base, ext.name.lstrip('_'), bound_method)
1b47c9eb39a2c5bbdf05397c949619d5a044f2ae
fabfile.py
fabfile.py
from fabric.api import env, run, local, sudo, settings from fabric.contrib.console import confirm def build_local(): local('docker-compose run app go build -v') local('mv app/app ./application') def copy_app(): local('scp application {0}@{1}:/home/{0}'.format(env.user, env.hosts[0])) def stop_service(): with settings(warn_only=True): sudo('service pi-cloud stop') def remove_old_app(): run('rm pi-cloud') def rename_new_app(): run('mv application pi-cloud') def start_service(): sudo('service pi-cloud start') def deploy(): copy_app() stop_service() remove_old_app() rename_new_app() start_service() def build_deploy(): build_local() deploy()
import os from fabric.api import env, run, local, sudo, settings env.password = os.getenv('SUDO_PASSWORD', None) assert env.password def build_local(): local('docker-compose run app go build -v') local('mv app/app ./application') def copy_app(): local('scp application {0}@{1}:/home/{0}'.format(env.user, env.hosts[0])) def stop_service(): with settings(warn_only=True): sudo('service pi-cloud stop') def remove_old_app(): run('rm pi-cloud') def rename_new_app(): run('mv application pi-cloud') def start_service(): sudo('service pi-cloud start') def deploy(): copy_app() stop_service() remove_old_app() rename_new_app() start_service() def build_deploy(): build_local() deploy()
Set password in env var
Set password in env var
Python
mit
exitcodezero/picloud,exitcodezero/pi-cloud-sockets
+ import os from fabric.api import env, run, local, sudo, settings - from fabric.contrib.console import confirm + + + env.password = os.getenv('SUDO_PASSWORD', None) + assert env.password def build_local(): local('docker-compose run app go build -v') local('mv app/app ./application') def copy_app(): local('scp application {0}@{1}:/home/{0}'.format(env.user, env.hosts[0])) def stop_service(): with settings(warn_only=True): sudo('service pi-cloud stop') def remove_old_app(): run('rm pi-cloud') def rename_new_app(): run('mv application pi-cloud') def start_service(): sudo('service pi-cloud start') def deploy(): copy_app() stop_service() remove_old_app() rename_new_app() start_service() def build_deploy(): build_local() deploy()
Set password in env var
## Code Before: from fabric.api import env, run, local, sudo, settings from fabric.contrib.console import confirm def build_local(): local('docker-compose run app go build -v') local('mv app/app ./application') def copy_app(): local('scp application {0}@{1}:/home/{0}'.format(env.user, env.hosts[0])) def stop_service(): with settings(warn_only=True): sudo('service pi-cloud stop') def remove_old_app(): run('rm pi-cloud') def rename_new_app(): run('mv application pi-cloud') def start_service(): sudo('service pi-cloud start') def deploy(): copy_app() stop_service() remove_old_app() rename_new_app() start_service() def build_deploy(): build_local() deploy() ## Instruction: Set password in env var ## Code After: import os from fabric.api import env, run, local, sudo, settings env.password = os.getenv('SUDO_PASSWORD', None) assert env.password def build_local(): local('docker-compose run app go build -v') local('mv app/app ./application') def copy_app(): local('scp application {0}@{1}:/home/{0}'.format(env.user, env.hosts[0])) def stop_service(): with settings(warn_only=True): sudo('service pi-cloud stop') def remove_old_app(): run('rm pi-cloud') def rename_new_app(): run('mv application pi-cloud') def start_service(): sudo('service pi-cloud start') def deploy(): copy_app() stop_service() remove_old_app() rename_new_app() start_service() def build_deploy(): build_local() deploy()
+ import os from fabric.api import env, run, local, sudo, settings - from fabric.contrib.console import confirm + + + env.password = os.getenv('SUDO_PASSWORD', None) + assert env.password def build_local(): local('docker-compose run app go build -v') local('mv app/app ./application') def copy_app(): local('scp application {0}@{1}:/home/{0}'.format(env.user, env.hosts[0])) def stop_service(): with settings(warn_only=True): sudo('service pi-cloud stop') def remove_old_app(): run('rm pi-cloud') def rename_new_app(): run('mv application pi-cloud') def start_service(): sudo('service pi-cloud start') def deploy(): copy_app() stop_service() remove_old_app() rename_new_app() start_service() def build_deploy(): build_local() deploy()
c31bf6447f7b373ecbcc79e161dc406907887344
commandcenter/app_test.py
commandcenter/app_test.py
from tornado.testing import AsyncHTTPTestCase from tornado.websocket import websocket_connect from . import app class TestMyApp(AsyncHTTPTestCase): def get_app(self): return app.get_app() def test_homepage(self): response = self.fetch('/') self.assertEqual(response.code, 200) body = response.body.decode('utf-8') # Check that title is there. self.assertIn('<h1>CHIRP Command Center</h1>', body) # Check that links are present. self.assertRegexpMatches( body, r'<a href="/new-artists/" class="[^"]+">See new artists</a>')
from tornado.testing import AsyncHTTPTestCase from tornado.websocket import websocket_connect from . import app class TestMyApp(AsyncHTTPTestCase): def get_app(self): return app.get_app() def test_homepage(self): response = self.fetch('/') self.assertEqual(response.code, 200) body = response.body.decode('utf-8') # Check that title is there. self.assertIn('<h1>CHIRP Command Center</h1>', body) # Check that links are present. self.assertRegexpMatches( body, r'<a href="/new-artists/" class="[^"]+">See new artists</a>') def test_new_artists_page(self): response = self.fetch('/new-artists/') self.assertEqual(response.code, 200) body = response.body.decode('utf-8') self.assertIn('See new artists</h1>', body) self.assertIn( 'This command will show you what artists are (supposedly) not yet in the database.', body)
Add another test to check that the "See new artists" page is basically rendered correctly
Add another test to check that the "See new artists" page is basically rendered correctly
Python
apache-2.0
chirpradio/command-center,chirpradio/command-center
from tornado.testing import AsyncHTTPTestCase from tornado.websocket import websocket_connect from . import app class TestMyApp(AsyncHTTPTestCase): def get_app(self): return app.get_app() def test_homepage(self): response = self.fetch('/') self.assertEqual(response.code, 200) body = response.body.decode('utf-8') # Check that title is there. self.assertIn('<h1>CHIRP Command Center</h1>', body) # Check that links are present. self.assertRegexpMatches( body, r'<a href="/new-artists/" class="[^"]+">See new artists</a>') + def test_new_artists_page(self): + response = self.fetch('/new-artists/') + self.assertEqual(response.code, 200) + body = response.body.decode('utf-8') + + self.assertIn('See new artists</h1>', body) + self.assertIn( + 'This command will show you what artists are (supposedly) not yet in the database.', + body) +
Add another test to check that the "See new artists" page is basically rendered correctly
## Code Before: from tornado.testing import AsyncHTTPTestCase from tornado.websocket import websocket_connect from . import app class TestMyApp(AsyncHTTPTestCase): def get_app(self): return app.get_app() def test_homepage(self): response = self.fetch('/') self.assertEqual(response.code, 200) body = response.body.decode('utf-8') # Check that title is there. self.assertIn('<h1>CHIRP Command Center</h1>', body) # Check that links are present. self.assertRegexpMatches( body, r'<a href="/new-artists/" class="[^"]+">See new artists</a>') ## Instruction: Add another test to check that the "See new artists" page is basically rendered correctly ## Code After: from tornado.testing import AsyncHTTPTestCase from tornado.websocket import websocket_connect from . import app class TestMyApp(AsyncHTTPTestCase): def get_app(self): return app.get_app() def test_homepage(self): response = self.fetch('/') self.assertEqual(response.code, 200) body = response.body.decode('utf-8') # Check that title is there. self.assertIn('<h1>CHIRP Command Center</h1>', body) # Check that links are present. self.assertRegexpMatches( body, r'<a href="/new-artists/" class="[^"]+">See new artists</a>') def test_new_artists_page(self): response = self.fetch('/new-artists/') self.assertEqual(response.code, 200) body = response.body.decode('utf-8') self.assertIn('See new artists</h1>', body) self.assertIn( 'This command will show you what artists are (supposedly) not yet in the database.', body)
from tornado.testing import AsyncHTTPTestCase from tornado.websocket import websocket_connect from . import app class TestMyApp(AsyncHTTPTestCase): def get_app(self): return app.get_app() def test_homepage(self): response = self.fetch('/') self.assertEqual(response.code, 200) body = response.body.decode('utf-8') # Check that title is there. self.assertIn('<h1>CHIRP Command Center</h1>', body) # Check that links are present. self.assertRegexpMatches( body, r'<a href="/new-artists/" class="[^"]+">See new artists</a>') + + def test_new_artists_page(self): + response = self.fetch('/new-artists/') + self.assertEqual(response.code, 200) + body = response.body.decode('utf-8') + + self.assertIn('See new artists</h1>', body) + self.assertIn( + 'This command will show you what artists are (supposedly) not yet in the database.', + body)
1469da25fec3e3e966d5a0b5fab11dd279bbe05a
blogsite/models.py
blogsite/models.py
"""Collection of Models used in blogsite.""" from . import db class Post(db.Model): """Model representing a blog post. Attributes ---------- id : db.Column Autogenerated primary key title : db.Column body : db.Column """ # Columns id = db.Column(db.Integer, primary_key=True, autoincrement=True) title = db.Column(db.String(128)) body = db.Column(db.String(4096)) def __init__(self, title, body): """Constructor for Post. Parameters ---------- title : String Title/Summary of post body : String Contents """ self.title = title self.body = body def __repr__(self): """Representation.""" return '<Post %r:%r>' % self.id, self.title
"""Collection of Models used in blogsite.""" from . import db class Post(db.Model): """Model representing a blog post. Attributes ---------- id : SQLAlchemy.Column Autogenerated primary key title : SQLAlchemy.Column body : SQLAlchemy.Column """ # Columns id = db.Column(db.Integer, primary_key=True, autoincrement=True) title = db.Column(db.String(128)) body = db.Column(db.String(4096)) def __init__(self, title, body): """Constructor for Post. Parameters ---------- title : String Title/Summary of post body : String Contents """ self.title = title self.body = body def __repr__(self): """Representation.""" return '<Post %r:%r>' % self.id, self.title
Correct type comment for table columns
Correct type comment for table columns
Python
mit
paulaylingdev/blogsite,paulaylingdev/blogsite
"""Collection of Models used in blogsite.""" from . import db class Post(db.Model): """Model representing a blog post. Attributes ---------- - id : db.Column + id : SQLAlchemy.Column Autogenerated primary key - title : db.Column + title : SQLAlchemy.Column - body : db.Column + body : SQLAlchemy.Column """ # Columns id = db.Column(db.Integer, primary_key=True, autoincrement=True) title = db.Column(db.String(128)) body = db.Column(db.String(4096)) def __init__(self, title, body): """Constructor for Post. Parameters ---------- title : String Title/Summary of post body : String Contents """ self.title = title self.body = body def __repr__(self): """Representation.""" return '<Post %r:%r>' % self.id, self.title
Correct type comment for table columns
## Code Before: """Collection of Models used in blogsite.""" from . import db class Post(db.Model): """Model representing a blog post. Attributes ---------- id : db.Column Autogenerated primary key title : db.Column body : db.Column """ # Columns id = db.Column(db.Integer, primary_key=True, autoincrement=True) title = db.Column(db.String(128)) body = db.Column(db.String(4096)) def __init__(self, title, body): """Constructor for Post. Parameters ---------- title : String Title/Summary of post body : String Contents """ self.title = title self.body = body def __repr__(self): """Representation.""" return '<Post %r:%r>' % self.id, self.title ## Instruction: Correct type comment for table columns ## Code After: """Collection of Models used in blogsite.""" from . import db class Post(db.Model): """Model representing a blog post. Attributes ---------- id : SQLAlchemy.Column Autogenerated primary key title : SQLAlchemy.Column body : SQLAlchemy.Column """ # Columns id = db.Column(db.Integer, primary_key=True, autoincrement=True) title = db.Column(db.String(128)) body = db.Column(db.String(4096)) def __init__(self, title, body): """Constructor for Post. Parameters ---------- title : String Title/Summary of post body : String Contents """ self.title = title self.body = body def __repr__(self): """Representation.""" return '<Post %r:%r>' % self.id, self.title
"""Collection of Models used in blogsite.""" from . import db class Post(db.Model): """Model representing a blog post. Attributes ---------- - id : db.Column + id : SQLAlchemy.Column Autogenerated primary key - title : db.Column ? ^^ + title : SQLAlchemy.Column ? ^^^^^^^^^^ - body : db.Column ? ^^ + body : SQLAlchemy.Column ? ^^^^^^^^^^ """ # Columns id = db.Column(db.Integer, primary_key=True, autoincrement=True) title = db.Column(db.String(128)) body = db.Column(db.String(4096)) def __init__(self, title, body): """Constructor for Post. Parameters ---------- title : String Title/Summary of post body : String Contents """ self.title = title self.body = body def __repr__(self): """Representation.""" return '<Post %r:%r>' % self.id, self.title
fa6c2c43289eeee1c0efab45101149b49be1b5cb
scrapi/processing/osf/__init__.py
scrapi/processing/osf/__init__.py
from scrapi.processing.osf import crud from scrapi.processing.osf import collision from scrapi.processing.base import BaseProcessor class OSFProcessor(BaseProcessor): NAME = 'osf' def process_normalized(self, raw_doc, normalized): if crud.is_event(normalized): crud.dump_metdata(normalized, {}) return normalized['collisionCategory'] = crud.get_collision_cat(normalized['source']) report_norm = normalized resource_norm = crud.clean_report(normalized) report_hash = collision.generate_report_hash_list(report_norm) resource_hash = collision.generate_resource_hash_list(resource_norm) report = collision.detect_collisions(report_hash) resource = collision.detect_collisions(resource_hash, is_resource=True) if not resource: resource = crud.create_resource(resource_norm, resource_hash) else: crud.dump_metadata(resource_norm, {'nid': resource}) if not report: report = crud.create_report(report_norm, resource, report_hash) else: crud.dump_metadata(report_norm, {'nid': report, 'pid': resource}) crud.update_node(report, report_norm) if not crud.is_claimed(resource): crud.update_node(resource, resource_norm)
from scrapi.processing.osf import crud from scrapi.processing.osf import collision from scrapi.processing.base import BaseProcessor class OSFProcessor(BaseProcessor): NAME = 'osf' def process_normalized(self, raw_doc, normalized): found, _hash = collision.already_processed(raw_doc) if found: return normalized['meta'] = { 'docHash': _hash } if crud.is_event(normalized): crud.dump_metdata(normalized, {}) return normalized['collisionCategory'] = crud.get_collision_cat(normalized['source']) report_norm = normalized resource_norm = crud.clean_report(normalized) report_hash = collision.generate_report_hash_list(report_norm) resource_hash = collision.generate_resource_hash_list(resource_norm) report = collision.detect_collisions(report_hash) resource = collision.detect_collisions(resource_hash, is_resource=True) report_norm['meta']['uids'] = report_hash resource_norm['meta']['uids'] = resource_hash if not resource: resource = crud.create_resource(resource_norm) else: crud.dump_metadata(resource_norm, {'nid': resource}) crud.update_node(report, report_norm) if not report: report = crud.create_report(report_norm, resource) else: crud.dump_metadata(report_norm, {'nid': report, 'pid': resource}) if not crud.is_claimed(resource): crud.update_node(resource, resource_norm)
Update dumping to osf logic
Update dumping to osf logic
Python
apache-2.0
ostwald/scrapi,mehanig/scrapi,felliott/scrapi,alexgarciac/scrapi,felliott/scrapi,CenterForOpenScience/scrapi,icereval/scrapi,erinspace/scrapi,CenterForOpenScience/scrapi,fabianvf/scrapi,jeffreyliu3230/scrapi,fabianvf/scrapi,erinspace/scrapi,mehanig/scrapi
from scrapi.processing.osf import crud from scrapi.processing.osf import collision from scrapi.processing.base import BaseProcessor class OSFProcessor(BaseProcessor): NAME = 'osf' def process_normalized(self, raw_doc, normalized): + found, _hash = collision.already_processed(raw_doc) + + if found: + return + + normalized['meta'] = { + 'docHash': _hash + } + if crud.is_event(normalized): crud.dump_metdata(normalized, {}) return normalized['collisionCategory'] = crud.get_collision_cat(normalized['source']) report_norm = normalized resource_norm = crud.clean_report(normalized) report_hash = collision.generate_report_hash_list(report_norm) resource_hash = collision.generate_resource_hash_list(resource_norm) report = collision.detect_collisions(report_hash) resource = collision.detect_collisions(resource_hash, is_resource=True) + report_norm['meta']['uids'] = report_hash + resource_norm['meta']['uids'] = resource_hash + if not resource: - resource = crud.create_resource(resource_norm, resource_hash) + resource = crud.create_resource(resource_norm) else: crud.dump_metadata(resource_norm, {'nid': resource}) + crud.update_node(report, report_norm) if not report: - report = crud.create_report(report_norm, resource, report_hash) + report = crud.create_report(report_norm, resource) else: crud.dump_metadata(report_norm, {'nid': report, 'pid': resource}) + if not crud.is_claimed(resource): + crud.update_node(resource, resource_norm) - crud.update_node(report, report_norm) - if not crud.is_claimed(resource): - crud.update_node(resource, resource_norm) -
Update dumping to osf logic
## Code Before: from scrapi.processing.osf import crud from scrapi.processing.osf import collision from scrapi.processing.base import BaseProcessor class OSFProcessor(BaseProcessor): NAME = 'osf' def process_normalized(self, raw_doc, normalized): if crud.is_event(normalized): crud.dump_metdata(normalized, {}) return normalized['collisionCategory'] = crud.get_collision_cat(normalized['source']) report_norm = normalized resource_norm = crud.clean_report(normalized) report_hash = collision.generate_report_hash_list(report_norm) resource_hash = collision.generate_resource_hash_list(resource_norm) report = collision.detect_collisions(report_hash) resource = collision.detect_collisions(resource_hash, is_resource=True) if not resource: resource = crud.create_resource(resource_norm, resource_hash) else: crud.dump_metadata(resource_norm, {'nid': resource}) if not report: report = crud.create_report(report_norm, resource, report_hash) else: crud.dump_metadata(report_norm, {'nid': report, 'pid': resource}) crud.update_node(report, report_norm) if not crud.is_claimed(resource): crud.update_node(resource, resource_norm) ## Instruction: Update dumping to osf logic ## Code After: from scrapi.processing.osf import crud from scrapi.processing.osf import collision from scrapi.processing.base import BaseProcessor class OSFProcessor(BaseProcessor): NAME = 'osf' def process_normalized(self, raw_doc, normalized): found, _hash = collision.already_processed(raw_doc) if found: return normalized['meta'] = { 'docHash': _hash } if crud.is_event(normalized): crud.dump_metdata(normalized, {}) return normalized['collisionCategory'] = crud.get_collision_cat(normalized['source']) report_norm = normalized resource_norm = crud.clean_report(normalized) report_hash = collision.generate_report_hash_list(report_norm) resource_hash = collision.generate_resource_hash_list(resource_norm) report = collision.detect_collisions(report_hash) resource = collision.detect_collisions(resource_hash, is_resource=True) report_norm['meta']['uids'] = report_hash resource_norm['meta']['uids'] = resource_hash if not resource: resource = crud.create_resource(resource_norm) else: crud.dump_metadata(resource_norm, {'nid': resource}) crud.update_node(report, report_norm) if not report: report = crud.create_report(report_norm, resource) else: crud.dump_metadata(report_norm, {'nid': report, 'pid': resource}) if not crud.is_claimed(resource): crud.update_node(resource, resource_norm)
from scrapi.processing.osf import crud from scrapi.processing.osf import collision from scrapi.processing.base import BaseProcessor class OSFProcessor(BaseProcessor): NAME = 'osf' def process_normalized(self, raw_doc, normalized): + found, _hash = collision.already_processed(raw_doc) + + if found: + return + + normalized['meta'] = { + 'docHash': _hash + } + if crud.is_event(normalized): crud.dump_metdata(normalized, {}) return normalized['collisionCategory'] = crud.get_collision_cat(normalized['source']) report_norm = normalized resource_norm = crud.clean_report(normalized) report_hash = collision.generate_report_hash_list(report_norm) resource_hash = collision.generate_resource_hash_list(resource_norm) report = collision.detect_collisions(report_hash) resource = collision.detect_collisions(resource_hash, is_resource=True) + report_norm['meta']['uids'] = report_hash + resource_norm['meta']['uids'] = resource_hash + if not resource: - resource = crud.create_resource(resource_norm, resource_hash) ? --------------- + resource = crud.create_resource(resource_norm) else: crud.dump_metadata(resource_norm, {'nid': resource}) + crud.update_node(report, report_norm) if not report: - report = crud.create_report(report_norm, resource, report_hash) ? ------------- + report = crud.create_report(report_norm, resource) else: crud.dump_metadata(report_norm, {'nid': report, 'pid': resource}) - - crud.update_node(report, report_norm) - if not crud.is_claimed(resource): + if not crud.is_claimed(resource): ? ++++ - crud.update_node(resource, resource_norm) + crud.update_node(resource, resource_norm) ? ++++
c9cb55a4a9f3409a3c22edd0d5a8b6bfbdca1208
mopidy_somafm/__init__.py
mopidy_somafm/__init__.py
from __future__ import unicode_literals import os from mopidy import config, exceptions, ext __version__ = '0.3.0' class Extension(ext.Extension): dist_name = 'Mopidy-SomaFM' ext_name = 'somafm' version = __version__ def get_default_config(self): conf_file = os.path.join(os.path.dirname(__file__), 'ext.conf') return config.read(conf_file) def get_config_schema(self): schema = super(Extension, self).get_config_schema() return schema def validate_environment(self): try: import requests except ImportError as e: raise ExtensionError('Library requests not found', e) def get_backend_classes(self): from .actor import SomaFMBackend return [SomaFMBackend]
from __future__ import unicode_literals import os from mopidy import config, ext __version__ = '0.3.0' class Extension(ext.Extension): dist_name = 'Mopidy-SomaFM' ext_name = 'somafm' version = __version__ def get_default_config(self): conf_file = os.path.join(os.path.dirname(__file__), 'ext.conf') return config.read(conf_file) def get_config_schema(self): schema = super(Extension, self).get_config_schema() return schema def get_backend_classes(self): from .actor import SomaFMBackend return [SomaFMBackend]
Remove dependency check done by Mopidy
Remove dependency check done by Mopidy
Python
mit
AlexandrePTJ/mopidy-somafm
from __future__ import unicode_literals import os - from mopidy import config, exceptions, ext + from mopidy import config, ext __version__ = '0.3.0' + class Extension(ext.Extension): dist_name = 'Mopidy-SomaFM' ext_name = 'somafm' version = __version__ def get_default_config(self): conf_file = os.path.join(os.path.dirname(__file__), 'ext.conf') return config.read(conf_file) def get_config_schema(self): schema = super(Extension, self).get_config_schema() return schema - def validate_environment(self): - try: - import requests - except ImportError as e: - raise ExtensionError('Library requests not found', e) - def get_backend_classes(self): from .actor import SomaFMBackend return [SomaFMBackend]
Remove dependency check done by Mopidy
## Code Before: from __future__ import unicode_literals import os from mopidy import config, exceptions, ext __version__ = '0.3.0' class Extension(ext.Extension): dist_name = 'Mopidy-SomaFM' ext_name = 'somafm' version = __version__ def get_default_config(self): conf_file = os.path.join(os.path.dirname(__file__), 'ext.conf') return config.read(conf_file) def get_config_schema(self): schema = super(Extension, self).get_config_schema() return schema def validate_environment(self): try: import requests except ImportError as e: raise ExtensionError('Library requests not found', e) def get_backend_classes(self): from .actor import SomaFMBackend return [SomaFMBackend] ## Instruction: Remove dependency check done by Mopidy ## Code After: from __future__ import unicode_literals import os from mopidy import config, ext __version__ = '0.3.0' class Extension(ext.Extension): dist_name = 'Mopidy-SomaFM' ext_name = 'somafm' version = __version__ def get_default_config(self): conf_file = os.path.join(os.path.dirname(__file__), 'ext.conf') return config.read(conf_file) def get_config_schema(self): schema = super(Extension, self).get_config_schema() return schema def get_backend_classes(self): from .actor import SomaFMBackend return [SomaFMBackend]
from __future__ import unicode_literals import os - from mopidy import config, exceptions, ext ? --- --------- + from mopidy import config, ext __version__ = '0.3.0' + class Extension(ext.Extension): dist_name = 'Mopidy-SomaFM' ext_name = 'somafm' version = __version__ def get_default_config(self): conf_file = os.path.join(os.path.dirname(__file__), 'ext.conf') return config.read(conf_file) def get_config_schema(self): schema = super(Extension, self).get_config_schema() return schema - def validate_environment(self): - try: - import requests - except ImportError as e: - raise ExtensionError('Library requests not found', e) - def get_backend_classes(self): from .actor import SomaFMBackend return [SomaFMBackend]
691bee381bda822a059c5d9fa790feabc7e00a8d
dnsimple2/tests/services/base.py
dnsimple2/tests/services/base.py
import os from unittest import TestCase from dnsimple2.client import DNSimple from dnsimple2.resources import ( AccountResource, DomainResource ) from dnsimple2.tests.utils import get_test_domain_name class BaseServiceTestCase(TestCase): @classmethod def setUpClass(cls): access_token = os.getenv('DNSIMPLE_V2_ACCESS_TOKEN') cls.client = DNSimple(access_token) cls.account = AccountResource(id=424) cls.domain = cls.client.domains.create( cls.account, DomainResource(name=get_test_domain_name(), account=cls.account) ) cls.invalid_domain = DomainResource( id=1, name='invalid-domain', account=cls.account )
import os from unittest import TestCase from dnsimple2.client import DNSimple from dnsimple2.resources import ( AccountResource, DomainResource ) from dnsimple2.tests.utils import get_test_domain_name class BaseServiceTestCase(TestCase): @classmethod def setUpClass(cls): access_token = os.getenv('DNSIMPLE_V2_ACCESS_TOKEN') cls.client = DNSimple(access_token) account_id = os.getenv('DNSIMPLE_ACCOUNT_ID') cls.account = AccountResource(id=account_id) cls.domain = cls.client.domains.create( cls.account, DomainResource(name=get_test_domain_name(), account=cls.account) ) cls.invalid_domain = DomainResource( id=1, name='invalid-domain', account=cls.account )
Use env variable for account id in tests.
Use env variable for account id in tests.
Python
mit
indradhanush/dnsimple2-python
import os from unittest import TestCase from dnsimple2.client import DNSimple from dnsimple2.resources import ( AccountResource, DomainResource ) from dnsimple2.tests.utils import get_test_domain_name class BaseServiceTestCase(TestCase): @classmethod def setUpClass(cls): access_token = os.getenv('DNSIMPLE_V2_ACCESS_TOKEN') cls.client = DNSimple(access_token) + + account_id = os.getenv('DNSIMPLE_ACCOUNT_ID') - cls.account = AccountResource(id=424) + cls.account = AccountResource(id=account_id) cls.domain = cls.client.domains.create( cls.account, DomainResource(name=get_test_domain_name(), account=cls.account) ) cls.invalid_domain = DomainResource( id=1, name='invalid-domain', account=cls.account )
Use env variable for account id in tests.
## Code Before: import os from unittest import TestCase from dnsimple2.client import DNSimple from dnsimple2.resources import ( AccountResource, DomainResource ) from dnsimple2.tests.utils import get_test_domain_name class BaseServiceTestCase(TestCase): @classmethod def setUpClass(cls): access_token = os.getenv('DNSIMPLE_V2_ACCESS_TOKEN') cls.client = DNSimple(access_token) cls.account = AccountResource(id=424) cls.domain = cls.client.domains.create( cls.account, DomainResource(name=get_test_domain_name(), account=cls.account) ) cls.invalid_domain = DomainResource( id=1, name='invalid-domain', account=cls.account ) ## Instruction: Use env variable for account id in tests. ## Code After: import os from unittest import TestCase from dnsimple2.client import DNSimple from dnsimple2.resources import ( AccountResource, DomainResource ) from dnsimple2.tests.utils import get_test_domain_name class BaseServiceTestCase(TestCase): @classmethod def setUpClass(cls): access_token = os.getenv('DNSIMPLE_V2_ACCESS_TOKEN') cls.client = DNSimple(access_token) account_id = os.getenv('DNSIMPLE_ACCOUNT_ID') cls.account = AccountResource(id=account_id) cls.domain = cls.client.domains.create( cls.account, DomainResource(name=get_test_domain_name(), account=cls.account) ) cls.invalid_domain = DomainResource( id=1, name='invalid-domain', account=cls.account )
import os from unittest import TestCase from dnsimple2.client import DNSimple from dnsimple2.resources import ( AccountResource, DomainResource ) from dnsimple2.tests.utils import get_test_domain_name class BaseServiceTestCase(TestCase): @classmethod def setUpClass(cls): access_token = os.getenv('DNSIMPLE_V2_ACCESS_TOKEN') cls.client = DNSimple(access_token) + + account_id = os.getenv('DNSIMPLE_ACCOUNT_ID') - cls.account = AccountResource(id=424) ? ^^^ + cls.account = AccountResource(id=account_id) ? ^^^^^^^^^^ cls.domain = cls.client.domains.create( cls.account, DomainResource(name=get_test_domain_name(), account=cls.account) ) cls.invalid_domain = DomainResource( id=1, name='invalid-domain', account=cls.account )
2c502a77ad18d34470e2be89ed1c7a38e6f3799d
tests/test_drogher.py
tests/test_drogher.py
import pytest import drogher from drogher.exceptions import InvalidBarcode class TestDrogher: def test_barcode(self): shipper = drogher.barcode('1Z999AA10123456784') assert shipper.shipper == 'UPS' def test_invalid_barcode(self): with pytest.raises(InvalidBarcode): drogher.barcode('1234')
import pytest import drogher from drogher.exceptions import InvalidBarcode class TestDrogher: def test_dhl_barcode(self): shipper = drogher.barcode('1656740256') assert shipper.shipper == 'DHL' def test_fedex_express_barcode(self): shipper = drogher.barcode('9632001960000000000400152152152158') assert shipper.shipper == 'FedEx' def test_fedex_ground_barcode(self): shipper = drogher.barcode('9611019012345612345671') assert shipper.shipper == 'FedEx' def test_ups_barcode(self): shipper = drogher.barcode('1Z999AA10123456784') assert shipper.shipper == 'UPS' def test_usps_barcode(self): shipper = drogher.barcode('420221539101026837331000039521') assert shipper.shipper == 'USPS' def test_invalid_barcode(self): with pytest.raises(InvalidBarcode): drogher.barcode('1234')
Test barcode function with all shippers
Test barcode function with all shippers
Python
bsd-3-clause
jbittel/drogher
import pytest import drogher from drogher.exceptions import InvalidBarcode class TestDrogher: - def test_barcode(self): + def test_dhl_barcode(self): + shipper = drogher.barcode('1656740256') + assert shipper.shipper == 'DHL' + + def test_fedex_express_barcode(self): + shipper = drogher.barcode('9632001960000000000400152152152158') + assert shipper.shipper == 'FedEx' + + def test_fedex_ground_barcode(self): + shipper = drogher.barcode('9611019012345612345671') + assert shipper.shipper == 'FedEx' + + def test_ups_barcode(self): shipper = drogher.barcode('1Z999AA10123456784') assert shipper.shipper == 'UPS' + + def test_usps_barcode(self): + shipper = drogher.barcode('420221539101026837331000039521') + assert shipper.shipper == 'USPS' def test_invalid_barcode(self): with pytest.raises(InvalidBarcode): drogher.barcode('1234')
Test barcode function with all shippers
## Code Before: import pytest import drogher from drogher.exceptions import InvalidBarcode class TestDrogher: def test_barcode(self): shipper = drogher.barcode('1Z999AA10123456784') assert shipper.shipper == 'UPS' def test_invalid_barcode(self): with pytest.raises(InvalidBarcode): drogher.barcode('1234') ## Instruction: Test barcode function with all shippers ## Code After: import pytest import drogher from drogher.exceptions import InvalidBarcode class TestDrogher: def test_dhl_barcode(self): shipper = drogher.barcode('1656740256') assert shipper.shipper == 'DHL' def test_fedex_express_barcode(self): shipper = drogher.barcode('9632001960000000000400152152152158') assert shipper.shipper == 'FedEx' def test_fedex_ground_barcode(self): shipper = drogher.barcode('9611019012345612345671') assert shipper.shipper == 'FedEx' def test_ups_barcode(self): shipper = drogher.barcode('1Z999AA10123456784') assert shipper.shipper == 'UPS' def test_usps_barcode(self): shipper = drogher.barcode('420221539101026837331000039521') assert shipper.shipper == 'USPS' def test_invalid_barcode(self): with pytest.raises(InvalidBarcode): drogher.barcode('1234')
import pytest import drogher from drogher.exceptions import InvalidBarcode class TestDrogher: - def test_barcode(self): + def test_dhl_barcode(self): ? ++++ + shipper = drogher.barcode('1656740256') + assert shipper.shipper == 'DHL' + + def test_fedex_express_barcode(self): + shipper = drogher.barcode('9632001960000000000400152152152158') + assert shipper.shipper == 'FedEx' + + def test_fedex_ground_barcode(self): + shipper = drogher.barcode('9611019012345612345671') + assert shipper.shipper == 'FedEx' + + def test_ups_barcode(self): shipper = drogher.barcode('1Z999AA10123456784') assert shipper.shipper == 'UPS' + + def test_usps_barcode(self): + shipper = drogher.barcode('420221539101026837331000039521') + assert shipper.shipper == 'USPS' def test_invalid_barcode(self): with pytest.raises(InvalidBarcode): drogher.barcode('1234')
bfc7a13439114313897526ea461f404539cc3fe5
tests/test_publisher.py
tests/test_publisher.py
import gc import sys import warnings import weakref from lektor.publisher import Command def test_Command_triggers_no_warnings(): # This excercises the issue where publishing via rsync resulted # in ResourceWarnings about unclosed streams. with warnings.catch_warnings(): warnings.simplefilter("error") # This is essentially how RsyncPublisher runs rsync. with Command([sys.executable, "-c", "print()"]) as client: for _ in client: pass # The ResourceWarnings regarding unclosed files we are checking for # are issued during finalization. Without this extra effort, # finalization wouldn't happen until after the test completes. client_is_alive = weakref.ref(client) del client if client_is_alive(): gc.collect() if client_is_alive(): warnings.warn( "Unable to trigger garbage collection of Command instance, " "so unable to check for warnings issued during finalization." )
import gc import os import sys import warnings import weakref import pytest from lektor.publisher import Command from lektor.publisher import publish def test_Command_triggers_no_warnings(): # This excercises the issue where publishing via rsync resulted # in ResourceWarnings about unclosed streams. with warnings.catch_warnings(): warnings.simplefilter("error") # This is essentially how RsyncPublisher runs rsync. with Command([sys.executable, "-c", "print()"]) as client: for _ in client: pass # The ResourceWarnings regarding unclosed files we are checking for # are issued during finalization. Without this extra effort, # finalization wouldn't happen until after the test completes. client_is_alive = weakref.ref(client) del client if client_is_alive(): gc.collect() if client_is_alive(): warnings.warn( "Unable to trigger garbage collection of Command instance, " "so unable to check for warnings issued during finalization." ) @pytest.mark.parametrize("delete", ["yes", "no"]) def test_RsyncPublisher_integration(env, tmp_path, delete): # Integration test of local rsync deployment # Ensures that RsyncPublisher can successfully invoke rsync files = {"file.txt": "content\n"} output = tmp_path / "output" output.mkdir() for path, content in files.items(): output.joinpath(path).write_text(content) target_path = tmp_path / "target" target_path.mkdir() target = f"rsync://{target_path.resolve()}?delete={delete}" event_iter = publish(env, target, output) for line in event_iter: print(line) target_files = { os.fspath(_.relative_to(target_path)): _.read_text() for _ in target_path.iterdir() } assert target_files == files
Test that local rsync publishing works (with and w/o delete option)
Test that local rsync publishing works (with and w/o delete option) This excercises #946
Python
bsd-3-clause
lektor/lektor,lektor/lektor,lektor/lektor,lektor/lektor
import gc + import os import sys import warnings import weakref + import pytest + from lektor.publisher import Command + from lektor.publisher import publish def test_Command_triggers_no_warnings(): # This excercises the issue where publishing via rsync resulted # in ResourceWarnings about unclosed streams. with warnings.catch_warnings(): warnings.simplefilter("error") # This is essentially how RsyncPublisher runs rsync. with Command([sys.executable, "-c", "print()"]) as client: for _ in client: pass # The ResourceWarnings regarding unclosed files we are checking for # are issued during finalization. Without this extra effort, # finalization wouldn't happen until after the test completes. client_is_alive = weakref.ref(client) del client if client_is_alive(): gc.collect() if client_is_alive(): warnings.warn( "Unable to trigger garbage collection of Command instance, " "so unable to check for warnings issued during finalization." ) + + @pytest.mark.parametrize("delete", ["yes", "no"]) + def test_RsyncPublisher_integration(env, tmp_path, delete): + # Integration test of local rsync deployment + # Ensures that RsyncPublisher can successfully invoke rsync + files = {"file.txt": "content\n"} + output = tmp_path / "output" + output.mkdir() + for path, content in files.items(): + output.joinpath(path).write_text(content) + + target_path = tmp_path / "target" + target_path.mkdir() + target = f"rsync://{target_path.resolve()}?delete={delete}" + + event_iter = publish(env, target, output) + for line in event_iter: + print(line) + + target_files = { + os.fspath(_.relative_to(target_path)): _.read_text() + for _ in target_path.iterdir() + } + assert target_files == files +
Test that local rsync publishing works (with and w/o delete option)
## Code Before: import gc import sys import warnings import weakref from lektor.publisher import Command def test_Command_triggers_no_warnings(): # This excercises the issue where publishing via rsync resulted # in ResourceWarnings about unclosed streams. with warnings.catch_warnings(): warnings.simplefilter("error") # This is essentially how RsyncPublisher runs rsync. with Command([sys.executable, "-c", "print()"]) as client: for _ in client: pass # The ResourceWarnings regarding unclosed files we are checking for # are issued during finalization. Without this extra effort, # finalization wouldn't happen until after the test completes. client_is_alive = weakref.ref(client) del client if client_is_alive(): gc.collect() if client_is_alive(): warnings.warn( "Unable to trigger garbage collection of Command instance, " "so unable to check for warnings issued during finalization." ) ## Instruction: Test that local rsync publishing works (with and w/o delete option) ## Code After: import gc import os import sys import warnings import weakref import pytest from lektor.publisher import Command from lektor.publisher import publish def test_Command_triggers_no_warnings(): # This excercises the issue where publishing via rsync resulted # in ResourceWarnings about unclosed streams. with warnings.catch_warnings(): warnings.simplefilter("error") # This is essentially how RsyncPublisher runs rsync. with Command([sys.executable, "-c", "print()"]) as client: for _ in client: pass # The ResourceWarnings regarding unclosed files we are checking for # are issued during finalization. Without this extra effort, # finalization wouldn't happen until after the test completes. client_is_alive = weakref.ref(client) del client if client_is_alive(): gc.collect() if client_is_alive(): warnings.warn( "Unable to trigger garbage collection of Command instance, " "so unable to check for warnings issued during finalization." ) @pytest.mark.parametrize("delete", ["yes", "no"]) def test_RsyncPublisher_integration(env, tmp_path, delete): # Integration test of local rsync deployment # Ensures that RsyncPublisher can successfully invoke rsync files = {"file.txt": "content\n"} output = tmp_path / "output" output.mkdir() for path, content in files.items(): output.joinpath(path).write_text(content) target_path = tmp_path / "target" target_path.mkdir() target = f"rsync://{target_path.resolve()}?delete={delete}" event_iter = publish(env, target, output) for line in event_iter: print(line) target_files = { os.fspath(_.relative_to(target_path)): _.read_text() for _ in target_path.iterdir() } assert target_files == files
import gc + import os import sys import warnings import weakref + import pytest + from lektor.publisher import Command + from lektor.publisher import publish def test_Command_triggers_no_warnings(): # This excercises the issue where publishing via rsync resulted # in ResourceWarnings about unclosed streams. with warnings.catch_warnings(): warnings.simplefilter("error") # This is essentially how RsyncPublisher runs rsync. with Command([sys.executable, "-c", "print()"]) as client: for _ in client: pass # The ResourceWarnings regarding unclosed files we are checking for # are issued during finalization. Without this extra effort, # finalization wouldn't happen until after the test completes. client_is_alive = weakref.ref(client) del client if client_is_alive(): gc.collect() if client_is_alive(): warnings.warn( "Unable to trigger garbage collection of Command instance, " "so unable to check for warnings issued during finalization." ) + + + @pytest.mark.parametrize("delete", ["yes", "no"]) + def test_RsyncPublisher_integration(env, tmp_path, delete): + # Integration test of local rsync deployment + # Ensures that RsyncPublisher can successfully invoke rsync + files = {"file.txt": "content\n"} + output = tmp_path / "output" + output.mkdir() + for path, content in files.items(): + output.joinpath(path).write_text(content) + + target_path = tmp_path / "target" + target_path.mkdir() + target = f"rsync://{target_path.resolve()}?delete={delete}" + + event_iter = publish(env, target, output) + for line in event_iter: + print(line) + + target_files = { + os.fspath(_.relative_to(target_path)): _.read_text() + for _ in target_path.iterdir() + } + assert target_files == files
c691c256682bec5f9a242ab71ab42d296bbf88a9
nightreads/posts/admin.py
nightreads/posts/admin.py
from django.contrib import admin # Register your models here.
from django.contrib import admin from .models import Post, Tag admin.site.register(Post) admin.site.register(Tag)
Add `Post`, `Tag` models to Admin
Add `Post`, `Tag` models to Admin
Python
mit
avinassh/nightreads,avinassh/nightreads
from django.contrib import admin - # Register your models here. + from .models import Post, Tag + admin.site.register(Post) + admin.site.register(Tag) +
Add `Post`, `Tag` models to Admin
## Code Before: from django.contrib import admin # Register your models here. ## Instruction: Add `Post`, `Tag` models to Admin ## Code After: from django.contrib import admin from .models import Post, Tag admin.site.register(Post) admin.site.register(Tag)
from django.contrib import admin - # Register your models here. + from .models import Post, Tag + + admin.site.register(Post) + admin.site.register(Tag)
89e49a69e700f49fa70391b02c839e3a0a4a1c7f
server/accounts/views.py
server/accounts/views.py
from django.shortcuts import render # Create your views here.
from django.shortcuts import render from django.contrib.auth import login, logout from django.contrib.auth.models import User from rest_framework import viewsets from rest_framework.permissions import AllowAny from rest_framework.response import Response from rest_framework.views import APIView # Create your views here. from . import serializers, permissions, authenticators, models class UserView(viewsets.ModelViewSet): serializer_class = serializers.UserSerializer model = User def get_permissions(self): # allow non-authenticated user to create return (AllowAny() if self.request.method == 'POST' else permissions.IsStaffOrTargetUser()), class AuthView(APIView): authentication_classes = (authenticators.QuietBasicAuthentication,) def post(self, request, *args, **kwargs): login(request, request.user) return Response(serializers.UserSerializer(request.user).data) def delete(self, request, *args, **kwargs): logout(request) return Response()
Update the models for Auth and User.
Update the models for Auth and User.
Python
agpl-3.0
TomDataworks/angular-inventory,TomDataworks/angular-inventory
from django.shortcuts import render + from django.contrib.auth import login, logout + from django.contrib.auth.models import User + from rest_framework import viewsets + from rest_framework.permissions import AllowAny + from rest_framework.response import Response + from rest_framework.views import APIView # Create your views here. + from . import serializers, permissions, authenticators, models + + class UserView(viewsets.ModelViewSet): + serializer_class = serializers.UserSerializer + model = User + + def get_permissions(self): + # allow non-authenticated user to create + return (AllowAny() if self.request.method == 'POST' + else permissions.IsStaffOrTargetUser()), + + class AuthView(APIView): + authentication_classes = (authenticators.QuietBasicAuthentication,) + + def post(self, request, *args, **kwargs): + login(request, request.user) + return Response(serializers.UserSerializer(request.user).data) + + def delete(self, request, *args, **kwargs): + logout(request) + return Response() +
Update the models for Auth and User.
## Code Before: from django.shortcuts import render # Create your views here. ## Instruction: Update the models for Auth and User. ## Code After: from django.shortcuts import render from django.contrib.auth import login, logout from django.contrib.auth.models import User from rest_framework import viewsets from rest_framework.permissions import AllowAny from rest_framework.response import Response from rest_framework.views import APIView # Create your views here. from . import serializers, permissions, authenticators, models class UserView(viewsets.ModelViewSet): serializer_class = serializers.UserSerializer model = User def get_permissions(self): # allow non-authenticated user to create return (AllowAny() if self.request.method == 'POST' else permissions.IsStaffOrTargetUser()), class AuthView(APIView): authentication_classes = (authenticators.QuietBasicAuthentication,) def post(self, request, *args, **kwargs): login(request, request.user) return Response(serializers.UserSerializer(request.user).data) def delete(self, request, *args, **kwargs): logout(request) return Response()
from django.shortcuts import render + from django.contrib.auth import login, logout + from django.contrib.auth.models import User + from rest_framework import viewsets + from rest_framework.permissions import AllowAny + from rest_framework.response import Response + from rest_framework.views import APIView # Create your views here. + + from . import serializers, permissions, authenticators, models + + class UserView(viewsets.ModelViewSet): + serializer_class = serializers.UserSerializer + model = User + + def get_permissions(self): + # allow non-authenticated user to create + return (AllowAny() if self.request.method == 'POST' + else permissions.IsStaffOrTargetUser()), + + class AuthView(APIView): + authentication_classes = (authenticators.QuietBasicAuthentication,) + + def post(self, request, *args, **kwargs): + login(request, request.user) + return Response(serializers.UserSerializer(request.user).data) + + def delete(self, request, *args, **kwargs): + logout(request) + return Response()
6454372da6550455735cbcb3a86a966e61c134a1
elasticsearch/__init__.py
elasticsearch/__init__.py
from __future__ import absolute_import VERSION = (0, 4, 3) __version__ = VERSION __versionstr__ = '.'.join(map(str, VERSION)) from elasticsearch.client import Elasticsearch from elasticsearch.transport import Transport from elasticsearch.connection_pool import ConnectionPool, ConnectionSelector, \ RoundRobinSelector from elasticsearch.serializer import JSONSerializer from elasticsearch.connection import Connection, RequestsHttpConnection, \ Urllib3HttpConnection, MemcachedConnection from elasticsearch.exceptions import *
from __future__ import absolute_import VERSION = (0, 4, 3) __version__ = VERSION __versionstr__ = '.'.join(map(str, VERSION)) from elasticsearch.client import Elasticsearch from elasticsearch.transport import Transport from elasticsearch.connection_pool import ConnectionPool, ConnectionSelector, \ RoundRobinSelector from elasticsearch.serializer import JSONSerializer from elasticsearch.connection import Connection, RequestsHttpConnection, \ Urllib3HttpConnection, MemcachedConnection, ThriftConnection from elasticsearch.exceptions import *
Allow people to import ThriftConnection from elasticsearch package itself
Allow people to import ThriftConnection from elasticsearch package itself
Python
apache-2.0
veatch/elasticsearch-py,chrisseto/elasticsearch-py,Garrett-R/elasticsearch-py,brunobell/elasticsearch-py,tailhook/elasticsearch-py,AlexMaskovyak/elasticsearch-py,brunobell/elasticsearch-py,mjhennig/elasticsearch-py,thomdixon/elasticsearch-py,kelp404/elasticsearch-py,gardsted/elasticsearch-py,elastic/elasticsearch-py,elastic/elasticsearch-py,konradkonrad/elasticsearch-py,liuyi1112/elasticsearch-py,prinsherbert/elasticsearch-py
from __future__ import absolute_import VERSION = (0, 4, 3) __version__ = VERSION __versionstr__ = '.'.join(map(str, VERSION)) from elasticsearch.client import Elasticsearch from elasticsearch.transport import Transport from elasticsearch.connection_pool import ConnectionPool, ConnectionSelector, \ RoundRobinSelector from elasticsearch.serializer import JSONSerializer from elasticsearch.connection import Connection, RequestsHttpConnection, \ - Urllib3HttpConnection, MemcachedConnection + Urllib3HttpConnection, MemcachedConnection, ThriftConnection from elasticsearch.exceptions import *
Allow people to import ThriftConnection from elasticsearch package itself
## Code Before: from __future__ import absolute_import VERSION = (0, 4, 3) __version__ = VERSION __versionstr__ = '.'.join(map(str, VERSION)) from elasticsearch.client import Elasticsearch from elasticsearch.transport import Transport from elasticsearch.connection_pool import ConnectionPool, ConnectionSelector, \ RoundRobinSelector from elasticsearch.serializer import JSONSerializer from elasticsearch.connection import Connection, RequestsHttpConnection, \ Urllib3HttpConnection, MemcachedConnection from elasticsearch.exceptions import * ## Instruction: Allow people to import ThriftConnection from elasticsearch package itself ## Code After: from __future__ import absolute_import VERSION = (0, 4, 3) __version__ = VERSION __versionstr__ = '.'.join(map(str, VERSION)) from elasticsearch.client import Elasticsearch from elasticsearch.transport import Transport from elasticsearch.connection_pool import ConnectionPool, ConnectionSelector, \ RoundRobinSelector from elasticsearch.serializer import JSONSerializer from elasticsearch.connection import Connection, RequestsHttpConnection, \ Urllib3HttpConnection, MemcachedConnection, ThriftConnection from elasticsearch.exceptions import *
from __future__ import absolute_import VERSION = (0, 4, 3) __version__ = VERSION __versionstr__ = '.'.join(map(str, VERSION)) from elasticsearch.client import Elasticsearch from elasticsearch.transport import Transport from elasticsearch.connection_pool import ConnectionPool, ConnectionSelector, \ RoundRobinSelector from elasticsearch.serializer import JSONSerializer from elasticsearch.connection import Connection, RequestsHttpConnection, \ - Urllib3HttpConnection, MemcachedConnection + Urllib3HttpConnection, MemcachedConnection, ThriftConnection ? ++++++++++++++++++ from elasticsearch.exceptions import *
ce4a588f0104498f5cd2491d85ef39806eb2ba7f
tests/filter_integration_tests/test_filters_with_mongo_storage.py
tests/filter_integration_tests/test_filters_with_mongo_storage.py
from tests.base_case import ChatBotMongoTestCase class RepetitiveResponseFilterTestCase(ChatBotMongoTestCase): """ Test case for the RepetitiveResponseFilter class. """ def test_filter_selection(self): """ Test that repetitive responses are filtered out of the results. """ from chatterbot.filters import RepetitiveResponseFilter from chatterbot.trainers import ListTrainer self.chatbot.filters = (RepetitiveResponseFilter(), ) self.chatbot.set_trainer(ListTrainer, **self.get_kwargs()) self.chatbot.train([ 'Hello', 'Hi', 'Hello', 'Hi', 'Hello', 'Hi, how are you?', 'I am good.' ]) first_response = self.chatbot.get_response('Hello') second_response = self.chatbot.get_response('Hello') self.assertEqual(first_response.text, 'Hi') self.assertEqual(second_response.text, 'Hi, how are you?')
from tests.base_case import ChatBotMongoTestCase class RepetitiveResponseFilterTestCase(ChatBotMongoTestCase): """ Test case for the RepetitiveResponseFilter class. """ def test_filter_selection(self): """ Test that repetitive responses are filtered out of the results. """ from chatterbot.filters import RepetitiveResponseFilter from chatterbot.trainers import ListTrainer self.chatbot.filters = (RepetitiveResponseFilter(), ) self.chatbot.set_trainer(ListTrainer, **self.get_kwargs()) self.chatbot.train([ 'Hello', 'Hi', 'Hello', 'Hi', 'Hello', 'Hi, how are you?', 'I am good.' ]) first_response = self.chatbot.get_response('Hello') second_response = self.chatbot.get_response('Hello') self.assertEqual('Hi', first_response.text) self.assertEqual('Hi, how are you?', second_response.text)
Put the calculated value on the right
Put the calculated value on the right
Python
bsd-3-clause
gunthercox/ChatterBot,vkosuri/ChatterBot
from tests.base_case import ChatBotMongoTestCase class RepetitiveResponseFilterTestCase(ChatBotMongoTestCase): """ Test case for the RepetitiveResponseFilter class. """ def test_filter_selection(self): """ Test that repetitive responses are filtered out of the results. """ from chatterbot.filters import RepetitiveResponseFilter from chatterbot.trainers import ListTrainer self.chatbot.filters = (RepetitiveResponseFilter(), ) self.chatbot.set_trainer(ListTrainer, **self.get_kwargs()) self.chatbot.train([ 'Hello', 'Hi', 'Hello', 'Hi', 'Hello', 'Hi, how are you?', 'I am good.' ]) first_response = self.chatbot.get_response('Hello') second_response = self.chatbot.get_response('Hello') - self.assertEqual(first_response.text, 'Hi') + self.assertEqual('Hi', first_response.text) - self.assertEqual(second_response.text, 'Hi, how are you?') + self.assertEqual('Hi, how are you?', second_response.text)
Put the calculated value on the right
## Code Before: from tests.base_case import ChatBotMongoTestCase class RepetitiveResponseFilterTestCase(ChatBotMongoTestCase): """ Test case for the RepetitiveResponseFilter class. """ def test_filter_selection(self): """ Test that repetitive responses are filtered out of the results. """ from chatterbot.filters import RepetitiveResponseFilter from chatterbot.trainers import ListTrainer self.chatbot.filters = (RepetitiveResponseFilter(), ) self.chatbot.set_trainer(ListTrainer, **self.get_kwargs()) self.chatbot.train([ 'Hello', 'Hi', 'Hello', 'Hi', 'Hello', 'Hi, how are you?', 'I am good.' ]) first_response = self.chatbot.get_response('Hello') second_response = self.chatbot.get_response('Hello') self.assertEqual(first_response.text, 'Hi') self.assertEqual(second_response.text, 'Hi, how are you?') ## Instruction: Put the calculated value on the right ## Code After: from tests.base_case import ChatBotMongoTestCase class RepetitiveResponseFilterTestCase(ChatBotMongoTestCase): """ Test case for the RepetitiveResponseFilter class. """ def test_filter_selection(self): """ Test that repetitive responses are filtered out of the results. """ from chatterbot.filters import RepetitiveResponseFilter from chatterbot.trainers import ListTrainer self.chatbot.filters = (RepetitiveResponseFilter(), ) self.chatbot.set_trainer(ListTrainer, **self.get_kwargs()) self.chatbot.train([ 'Hello', 'Hi', 'Hello', 'Hi', 'Hello', 'Hi, how are you?', 'I am good.' ]) first_response = self.chatbot.get_response('Hello') second_response = self.chatbot.get_response('Hello') self.assertEqual('Hi', first_response.text) self.assertEqual('Hi, how are you?', second_response.text)
from tests.base_case import ChatBotMongoTestCase class RepetitiveResponseFilterTestCase(ChatBotMongoTestCase): """ Test case for the RepetitiveResponseFilter class. """ def test_filter_selection(self): """ Test that repetitive responses are filtered out of the results. """ from chatterbot.filters import RepetitiveResponseFilter from chatterbot.trainers import ListTrainer self.chatbot.filters = (RepetitiveResponseFilter(), ) self.chatbot.set_trainer(ListTrainer, **self.get_kwargs()) self.chatbot.train([ 'Hello', 'Hi', 'Hello', 'Hi', 'Hello', 'Hi, how are you?', 'I am good.' ]) first_response = self.chatbot.get_response('Hello') second_response = self.chatbot.get_response('Hello') - self.assertEqual(first_response.text, 'Hi') ? ------ + self.assertEqual('Hi', first_response.text) ? ++++++ - self.assertEqual(second_response.text, 'Hi, how are you?') + self.assertEqual('Hi, how are you?', second_response.text)
50621ef5b141470879a786088391a516b4f63d52
note/models.py
note/models.py
from django.db import models from django.contrib.auth.models import User # Create your models here. # Create your models here. class Note(models.Model): # always reference the User class using setting conf author = models.ForeignKey(User) value = models.IntegerField(max_length=255) def __str__(self): return "your note is %s" % self.value
from django.db import models from django.conf import settings # Create your models here. # Create your models here. class Note(models.Model): # always reference the User class using setting conf author = models.ForeignKey(settings.AUTH_USER_MODEL) value = models.IntegerField(max_length=255) def __str__(self): return "your note is %s" % self.value
Migrate to a custom User class.
Migrate to a custom User class. Step1: reference the User class, using the AUTH_USER_MODEL setting.
Python
bsd-2-clause
LeMeteore/boomer2
from django.db import models - from django.contrib.auth.models import User + from django.conf import settings # Create your models here. # Create your models here. class Note(models.Model): # always reference the User class using setting conf - author = models.ForeignKey(User) + author = models.ForeignKey(settings.AUTH_USER_MODEL) value = models.IntegerField(max_length=255) def __str__(self): return "your note is %s" % self.value
Migrate to a custom User class.
## Code Before: from django.db import models from django.contrib.auth.models import User # Create your models here. # Create your models here. class Note(models.Model): # always reference the User class using setting conf author = models.ForeignKey(User) value = models.IntegerField(max_length=255) def __str__(self): return "your note is %s" % self.value ## Instruction: Migrate to a custom User class. ## Code After: from django.db import models from django.conf import settings # Create your models here. # Create your models here. class Note(models.Model): # always reference the User class using setting conf author = models.ForeignKey(settings.AUTH_USER_MODEL) value = models.IntegerField(max_length=255) def __str__(self): return "your note is %s" % self.value
from django.db import models - from django.contrib.auth.models import User + from django.conf import settings # Create your models here. # Create your models here. class Note(models.Model): # always reference the User class using setting conf - author = models.ForeignKey(User) + author = models.ForeignKey(settings.AUTH_USER_MODEL) value = models.IntegerField(max_length=255) def __str__(self): return "your note is %s" % self.value
69a94a60d04991ba5f8c25276455dedc3a0b898c
setup.py
setup.py
from distutils.core import setup setup( name='pypicache', version='0.1', description='PyPI caching and proxying server', author='Michael Twomey', author_email='[email protected]', url='http://readthedocs.org/projects/pypicache/', packages=['pypicache'], )
from distutils.core import setup setup( name='pypicache', version='0.1', description='PyPI caching and proxying server', author='Michael Twomey', author_email='[email protected]', url='http://readthedocs.org/projects/pypicache/', packages=['pypicache'], package_data={ 'pypicache': [ 'static/*/*', 'templates/*.html', ] } )
Install assets when installing the package.
Install assets when installing the package.
Python
bsd-2-clause
micktwomey/pypicache
from distutils.core import setup setup( name='pypicache', version='0.1', description='PyPI caching and proxying server', author='Michael Twomey', author_email='[email protected]', url='http://readthedocs.org/projects/pypicache/', packages=['pypicache'], + package_data={ + 'pypicache': [ + 'static/*/*', + 'templates/*.html', + ] + } )
Install assets when installing the package.
## Code Before: from distutils.core import setup setup( name='pypicache', version='0.1', description='PyPI caching and proxying server', author='Michael Twomey', author_email='[email protected]', url='http://readthedocs.org/projects/pypicache/', packages=['pypicache'], ) ## Instruction: Install assets when installing the package. ## Code After: from distutils.core import setup setup( name='pypicache', version='0.1', description='PyPI caching and proxying server', author='Michael Twomey', author_email='[email protected]', url='http://readthedocs.org/projects/pypicache/', packages=['pypicache'], package_data={ 'pypicache': [ 'static/*/*', 'templates/*.html', ] } )
from distutils.core import setup setup( name='pypicache', version='0.1', description='PyPI caching and proxying server', author='Michael Twomey', author_email='[email protected]', url='http://readthedocs.org/projects/pypicache/', packages=['pypicache'], + package_data={ + 'pypicache': [ + 'static/*/*', + 'templates/*.html', + ] + } )
155fca9e7e2c8cfee8d2600268ebae8d94b2e7fe
wagtail/search/apps.py
wagtail/search/apps.py
from django.apps import AppConfig from django.core.checks import Tags, Warning, register from django.db import connection from django.utils.translation import gettext_lazy as _ from wagtail.search.signal_handlers import register_signal_handlers from . import checks # NOQA class WagtailSearchAppConfig(AppConfig): name = 'wagtail.search' label = 'wagtailsearch' verbose_name = _("Wagtail search") default_auto_field = 'django.db.models.AutoField' def ready(self): register_signal_handlers() if connection.vendor == 'postgresql': # Only PostgreSQL has support for tsvector weights from wagtail.search.backends.database.postgres.weights import set_weights set_weights() from wagtail.search.models import IndexEntry IndexEntry.add_generic_relations() @register(Tags.compatibility, Tags.database) def check_if_sqlite_version_is_supported(app_configs, **kwargs): if connection.vendor == 'sqlite': import sqlite3 if sqlite3.sqlite_version_info < (3, 19, 0): return [Warning('Your SQLite version is older than 3.19.0. A fallback search backend will be used instead.', hint='Upgrade your SQLite version to at least 3.19.0', id='wagtailsearch.W002', obj=WagtailSearchAppConfig)] return []
from django.apps import AppConfig from django.core.checks import Tags, Warning, register from django.db import connection from django.utils.translation import gettext_lazy as _ from wagtail.search.signal_handlers import register_signal_handlers from . import checks # NOQA class WagtailSearchAppConfig(AppConfig): name = 'wagtail.search' label = 'wagtailsearch' verbose_name = _("Wagtail search") default_auto_field = 'django.db.models.AutoField' def ready(self): register_signal_handlers() if connection.vendor == 'postgresql': # Only PostgreSQL has support for tsvector weights from wagtail.search.backends.database.postgres.weights import set_weights set_weights() from wagtail.search.models import IndexEntry IndexEntry.add_generic_relations() @register(Tags.compatibility, Tags.database) def check_if_sqlite_version_is_supported(app_configs, **kwargs): if connection.vendor == 'sqlite': import sqlite3 from wagtail.search.backends.database.sqlite.utils import fts5_available if sqlite3.sqlite_version_info < (3, 19, 0): return [Warning('Your SQLite version is older than 3.19.0. A fallback search backend will be used instead.', hint='Upgrade your SQLite version to at least 3.19.0', id='wagtailsearch.W002', obj=WagtailSearchAppConfig)] elif not fts5_available(): return [Warning('Your SQLite installation is missing the fts5 extension. A fallback search backend will be used instead.', hint='Upgrade your SQLite installation to a version with fts5 enabled', id='wagtailsearch.W003', obj=WagtailSearchAppConfig)] return []
Add alternative warning if sqlite is >=3.19 but is missing fts5 support
Add alternative warning if sqlite is >=3.19 but is missing fts5 support
Python
bsd-3-clause
wagtail/wagtail,thenewguy/wagtail,mixxorz/wagtail,rsalmaso/wagtail,zerolab/wagtail,wagtail/wagtail,mixxorz/wagtail,mixxorz/wagtail,zerolab/wagtail,thenewguy/wagtail,jnns/wagtail,jnns/wagtail,zerolab/wagtail,zerolab/wagtail,thenewguy/wagtail,wagtail/wagtail,jnns/wagtail,rsalmaso/wagtail,mixxorz/wagtail,rsalmaso/wagtail,jnns/wagtail,rsalmaso/wagtail,thenewguy/wagtail,mixxorz/wagtail,thenewguy/wagtail,rsalmaso/wagtail,wagtail/wagtail,wagtail/wagtail,zerolab/wagtail
from django.apps import AppConfig from django.core.checks import Tags, Warning, register from django.db import connection from django.utils.translation import gettext_lazy as _ from wagtail.search.signal_handlers import register_signal_handlers from . import checks # NOQA class WagtailSearchAppConfig(AppConfig): name = 'wagtail.search' label = 'wagtailsearch' verbose_name = _("Wagtail search") default_auto_field = 'django.db.models.AutoField' def ready(self): register_signal_handlers() if connection.vendor == 'postgresql': # Only PostgreSQL has support for tsvector weights from wagtail.search.backends.database.postgres.weights import set_weights set_weights() from wagtail.search.models import IndexEntry IndexEntry.add_generic_relations() @register(Tags.compatibility, Tags.database) def check_if_sqlite_version_is_supported(app_configs, **kwargs): if connection.vendor == 'sqlite': import sqlite3 + + from wagtail.search.backends.database.sqlite.utils import fts5_available + if sqlite3.sqlite_version_info < (3, 19, 0): return [Warning('Your SQLite version is older than 3.19.0. A fallback search backend will be used instead.', hint='Upgrade your SQLite version to at least 3.19.0', id='wagtailsearch.W002', obj=WagtailSearchAppConfig)] + elif not fts5_available(): + return [Warning('Your SQLite installation is missing the fts5 extension. A fallback search backend will be used instead.', hint='Upgrade your SQLite installation to a version with fts5 enabled', id='wagtailsearch.W003', obj=WagtailSearchAppConfig)] return []
Add alternative warning if sqlite is >=3.19 but is missing fts5 support
## Code Before: from django.apps import AppConfig from django.core.checks import Tags, Warning, register from django.db import connection from django.utils.translation import gettext_lazy as _ from wagtail.search.signal_handlers import register_signal_handlers from . import checks # NOQA class WagtailSearchAppConfig(AppConfig): name = 'wagtail.search' label = 'wagtailsearch' verbose_name = _("Wagtail search") default_auto_field = 'django.db.models.AutoField' def ready(self): register_signal_handlers() if connection.vendor == 'postgresql': # Only PostgreSQL has support for tsvector weights from wagtail.search.backends.database.postgres.weights import set_weights set_weights() from wagtail.search.models import IndexEntry IndexEntry.add_generic_relations() @register(Tags.compatibility, Tags.database) def check_if_sqlite_version_is_supported(app_configs, **kwargs): if connection.vendor == 'sqlite': import sqlite3 if sqlite3.sqlite_version_info < (3, 19, 0): return [Warning('Your SQLite version is older than 3.19.0. A fallback search backend will be used instead.', hint='Upgrade your SQLite version to at least 3.19.0', id='wagtailsearch.W002', obj=WagtailSearchAppConfig)] return [] ## Instruction: Add alternative warning if sqlite is >=3.19 but is missing fts5 support ## Code After: from django.apps import AppConfig from django.core.checks import Tags, Warning, register from django.db import connection from django.utils.translation import gettext_lazy as _ from wagtail.search.signal_handlers import register_signal_handlers from . import checks # NOQA class WagtailSearchAppConfig(AppConfig): name = 'wagtail.search' label = 'wagtailsearch' verbose_name = _("Wagtail search") default_auto_field = 'django.db.models.AutoField' def ready(self): register_signal_handlers() if connection.vendor == 'postgresql': # Only PostgreSQL has support for tsvector weights from wagtail.search.backends.database.postgres.weights import set_weights set_weights() from wagtail.search.models import IndexEntry IndexEntry.add_generic_relations() @register(Tags.compatibility, Tags.database) def check_if_sqlite_version_is_supported(app_configs, **kwargs): if connection.vendor == 'sqlite': import sqlite3 from wagtail.search.backends.database.sqlite.utils import fts5_available if sqlite3.sqlite_version_info < (3, 19, 0): return [Warning('Your SQLite version is older than 3.19.0. A fallback search backend will be used instead.', hint='Upgrade your SQLite version to at least 3.19.0', id='wagtailsearch.W002', obj=WagtailSearchAppConfig)] elif not fts5_available(): return [Warning('Your SQLite installation is missing the fts5 extension. A fallback search backend will be used instead.', hint='Upgrade your SQLite installation to a version with fts5 enabled', id='wagtailsearch.W003', obj=WagtailSearchAppConfig)] return []
from django.apps import AppConfig from django.core.checks import Tags, Warning, register from django.db import connection from django.utils.translation import gettext_lazy as _ from wagtail.search.signal_handlers import register_signal_handlers from . import checks # NOQA class WagtailSearchAppConfig(AppConfig): name = 'wagtail.search' label = 'wagtailsearch' verbose_name = _("Wagtail search") default_auto_field = 'django.db.models.AutoField' def ready(self): register_signal_handlers() if connection.vendor == 'postgresql': # Only PostgreSQL has support for tsvector weights from wagtail.search.backends.database.postgres.weights import set_weights set_weights() from wagtail.search.models import IndexEntry IndexEntry.add_generic_relations() @register(Tags.compatibility, Tags.database) def check_if_sqlite_version_is_supported(app_configs, **kwargs): if connection.vendor == 'sqlite': import sqlite3 + + from wagtail.search.backends.database.sqlite.utils import fts5_available + if sqlite3.sqlite_version_info < (3, 19, 0): return [Warning('Your SQLite version is older than 3.19.0. A fallback search backend will be used instead.', hint='Upgrade your SQLite version to at least 3.19.0', id='wagtailsearch.W002', obj=WagtailSearchAppConfig)] + elif not fts5_available(): + return [Warning('Your SQLite installation is missing the fts5 extension. A fallback search backend will be used instead.', hint='Upgrade your SQLite installation to a version with fts5 enabled', id='wagtailsearch.W003', obj=WagtailSearchAppConfig)] return []
8f0956313b140d7a0d51510cd9b4a5eec7d54570
plugins/holland.lib.lvm/tests/test_util.py
plugins/holland.lib.lvm/tests/test_util.py
import os import signal from nose.tools import * from holland.lib.lvm.util import * def test_format_bytes(): assert_equals(format_bytes(1024), '1.00KB') assert_equals(format_bytes(0), '0.00Bytes') def test_getmount(): assert_equals(getmount('/'), '/') assert_equals(getmount('/foobarbaz'), '/') def test_getdevice(): # XXX: bad hack dev = open('/etc/mtab', 'r').readline().split()[0].strip() assert_equals(getdevice('/'), dev) assert_equals(getdevice('/foobarbaz'), None) def test_relpath(): assert_raises(ValueError, relpath, '') assert_equals(relpath('/foo/bar/baz', '/foo/bar'), 'baz') assert_equals(relpath('/foo/bar/', '/foo/bar/'), os.curdir) def test_signalmanager(): sigmgr = SignalManager() sigmgr.trap(signal.SIGINT) os.kill(os.getpid(), signal.SIGINT) ok_(sigmgr.pending) assert_equals(sigmgr.pending[0], signal.SIGINT) sigmgr.restore() assert_raises(KeyboardInterrupt, os.kill, os.getpid(), signal.SIGINT)
import os import signal from nose.tools import * from holland.lib.lvm.util import * def test_format_bytes(): assert_equals(format_bytes(1024), '1.00KB') assert_equals(format_bytes(0), '0.00Bytes') def test_getmount(): assert_equals(getmount('/'), '/') assert_equals(getmount('/foobarbaz'), '/') def test_getdevice(): # XXX: bad hack dev = open('/etc/mtab', 'r').readline().split()[0].strip() assert_equals(getdevice('/'), dev) assert_equals(getdevice('/foobarbaz'), None) def test_relpath(): assert_raises(ValueError, relpath, '') assert_equals(relpath('/foo/bar/baz', '/foo/bar'), 'baz') assert_equals(relpath('/foo/bar/', '/foo/bar/'), os.curdir) def test_signalmanager(): sigmgr = SignalManager() sigmgr.trap(signal.SIGINT) os.kill(os.getpid(), signal.SIGINT) ok_(sigmgr.pending) assert_equals(sigmgr.pending[0], signal.SIGINT) sigmgr.restore() assert_raises(KeyboardInterrupt, os.kill, os.getpid(), signal.SIGINT) def test_parsebytes(): # bytes without units should be interpretted as MB bytes = parse_bytes('1024') assert_equals(bytes, 1024**3) # this should not be bytes ok_(bytes > 1024) bytes = parse_bytes('1024G') assert_equals(bytes, 1024**4)
Add test case to holland.lib.lvm for parsing snapshot-size without units
Add test case to holland.lib.lvm for parsing snapshot-size without units
Python
bsd-3-clause
m00dawg/holland,m00dawg/holland
import os import signal from nose.tools import * from holland.lib.lvm.util import * def test_format_bytes(): assert_equals(format_bytes(1024), '1.00KB') assert_equals(format_bytes(0), '0.00Bytes') def test_getmount(): assert_equals(getmount('/'), '/') assert_equals(getmount('/foobarbaz'), '/') def test_getdevice(): # XXX: bad hack dev = open('/etc/mtab', 'r').readline().split()[0].strip() assert_equals(getdevice('/'), dev) assert_equals(getdevice('/foobarbaz'), None) def test_relpath(): assert_raises(ValueError, relpath, '') assert_equals(relpath('/foo/bar/baz', '/foo/bar'), 'baz') assert_equals(relpath('/foo/bar/', '/foo/bar/'), os.curdir) def test_signalmanager(): sigmgr = SignalManager() sigmgr.trap(signal.SIGINT) os.kill(os.getpid(), signal.SIGINT) ok_(sigmgr.pending) assert_equals(sigmgr.pending[0], signal.SIGINT) sigmgr.restore() assert_raises(KeyboardInterrupt, os.kill, os.getpid(), signal.SIGINT) + def test_parsebytes(): + # bytes without units should be interpretted as MB + bytes = parse_bytes('1024') + assert_equals(bytes, 1024**3) + # this should not be bytes + ok_(bytes > 1024) + + bytes = parse_bytes('1024G') + assert_equals(bytes, 1024**4) +
Add test case to holland.lib.lvm for parsing snapshot-size without units
## Code Before: import os import signal from nose.tools import * from holland.lib.lvm.util import * def test_format_bytes(): assert_equals(format_bytes(1024), '1.00KB') assert_equals(format_bytes(0), '0.00Bytes') def test_getmount(): assert_equals(getmount('/'), '/') assert_equals(getmount('/foobarbaz'), '/') def test_getdevice(): # XXX: bad hack dev = open('/etc/mtab', 'r').readline().split()[0].strip() assert_equals(getdevice('/'), dev) assert_equals(getdevice('/foobarbaz'), None) def test_relpath(): assert_raises(ValueError, relpath, '') assert_equals(relpath('/foo/bar/baz', '/foo/bar'), 'baz') assert_equals(relpath('/foo/bar/', '/foo/bar/'), os.curdir) def test_signalmanager(): sigmgr = SignalManager() sigmgr.trap(signal.SIGINT) os.kill(os.getpid(), signal.SIGINT) ok_(sigmgr.pending) assert_equals(sigmgr.pending[0], signal.SIGINT) sigmgr.restore() assert_raises(KeyboardInterrupt, os.kill, os.getpid(), signal.SIGINT) ## Instruction: Add test case to holland.lib.lvm for parsing snapshot-size without units ## Code After: import os import signal from nose.tools import * from holland.lib.lvm.util import * def test_format_bytes(): assert_equals(format_bytes(1024), '1.00KB') assert_equals(format_bytes(0), '0.00Bytes') def test_getmount(): assert_equals(getmount('/'), '/') assert_equals(getmount('/foobarbaz'), '/') def test_getdevice(): # XXX: bad hack dev = open('/etc/mtab', 'r').readline().split()[0].strip() assert_equals(getdevice('/'), dev) assert_equals(getdevice('/foobarbaz'), None) def test_relpath(): assert_raises(ValueError, relpath, '') assert_equals(relpath('/foo/bar/baz', '/foo/bar'), 'baz') assert_equals(relpath('/foo/bar/', '/foo/bar/'), os.curdir) def test_signalmanager(): sigmgr = SignalManager() sigmgr.trap(signal.SIGINT) os.kill(os.getpid(), signal.SIGINT) ok_(sigmgr.pending) assert_equals(sigmgr.pending[0], signal.SIGINT) sigmgr.restore() assert_raises(KeyboardInterrupt, os.kill, os.getpid(), signal.SIGINT) def test_parsebytes(): # bytes without units should be interpretted as MB bytes = parse_bytes('1024') assert_equals(bytes, 1024**3) # this should not be bytes ok_(bytes > 1024) bytes = parse_bytes('1024G') assert_equals(bytes, 1024**4)
import os import signal from nose.tools import * from holland.lib.lvm.util import * def test_format_bytes(): assert_equals(format_bytes(1024), '1.00KB') assert_equals(format_bytes(0), '0.00Bytes') def test_getmount(): assert_equals(getmount('/'), '/') assert_equals(getmount('/foobarbaz'), '/') def test_getdevice(): # XXX: bad hack dev = open('/etc/mtab', 'r').readline().split()[0].strip() assert_equals(getdevice('/'), dev) assert_equals(getdevice('/foobarbaz'), None) def test_relpath(): assert_raises(ValueError, relpath, '') assert_equals(relpath('/foo/bar/baz', '/foo/bar'), 'baz') assert_equals(relpath('/foo/bar/', '/foo/bar/'), os.curdir) def test_signalmanager(): sigmgr = SignalManager() sigmgr.trap(signal.SIGINT) os.kill(os.getpid(), signal.SIGINT) ok_(sigmgr.pending) assert_equals(sigmgr.pending[0], signal.SIGINT) sigmgr.restore() assert_raises(KeyboardInterrupt, os.kill, os.getpid(), signal.SIGINT) + + def test_parsebytes(): + # bytes without units should be interpretted as MB + bytes = parse_bytes('1024') + assert_equals(bytes, 1024**3) + # this should not be bytes + ok_(bytes > 1024) + + bytes = parse_bytes('1024G') + assert_equals(bytes, 1024**4)
dd9161c772e3c345fd21f742b09a62d43f7fa069
scripts/c19.py
scripts/c19.py
from __future__ import print_function import sys from pyspark.sql import SparkSession from pyspark.sql.functions import lit, col if __name__ == "__main__": if len(sys.argv) != 3: print("Usage: c19.py <input> <output>", file=sys.stderr) exit(-1) spark = SparkSession.builder.appName('Select c19').getOrCreate() raw = spark.read.option('mergeSchema','true').load(sys.argv[1]) df = raw.filter(col('date') < '1900') opens = df.filter(col('open') == 'true')\ .select('series', 'date', lit(1).alias('inopen')).distinct() df.join(opens, ['series', 'date'], 'left_outer')\ .filter((col('open') == 'true') | col('inopen').isNull())\ .drop('inopen')\ .dropDuplicates(['id'])\ .write.save(sys.argv[2]) spark.stop()
from __future__ import print_function import sys from pyspark.sql import SparkSession from pyspark.sql.functions import lit, col, datediff if __name__ == "__main__": if len(sys.argv) != 3: print("Usage: c19.py <input> <output>", file=sys.stderr) exit(-1) spark = SparkSession.builder.appName('Select c19').getOrCreate() raw = spark.read.option('mergeSchema','true').load(sys.argv[1]) df = raw.filter(col('date') < '1900') opens = df.filter(col('open') == 'true')\ .select('series', 'date', lit(1).alias('inopen')).distinct() df.join(opens, ['series', 'date'], 'left_outer')\ .filter((col('open') == 'true') | col('inopen').isNull())\ .drop('inopen')\ .dropDuplicates(['id'])\ .withColumn('ednum', col('ed').cast('int')).na.fill(1, ['ednum']) \ .withColumn('hour', datediff(col('date'), lit('1970-01-01')) * 24 + col('ednum') - 1) \ .drop('ednum') \ .write.save(sys.argv[2]) spark.stop()
Add numeric time column (hour) that respects editions.
Add numeric time column (hour) that respects editions.
Python
apache-2.0
ViralTexts/vt-passim,ViralTexts/vt-passim,ViralTexts/vt-passim
from __future__ import print_function import sys from pyspark.sql import SparkSession - from pyspark.sql.functions import lit, col + from pyspark.sql.functions import lit, col, datediff if __name__ == "__main__": if len(sys.argv) != 3: print("Usage: c19.py <input> <output>", file=sys.stderr) exit(-1) spark = SparkSession.builder.appName('Select c19').getOrCreate() raw = spark.read.option('mergeSchema','true').load(sys.argv[1]) df = raw.filter(col('date') < '1900') opens = df.filter(col('open') == 'true')\ .select('series', 'date', lit(1).alias('inopen')).distinct() df.join(opens, ['series', 'date'], 'left_outer')\ .filter((col('open') == 'true') | col('inopen').isNull())\ .drop('inopen')\ .dropDuplicates(['id'])\ + .withColumn('ednum', col('ed').cast('int')).na.fill(1, ['ednum']) \ + .withColumn('hour', datediff(col('date'), lit('1970-01-01')) * 24 + col('ednum') - 1) \ + .drop('ednum') \ .write.save(sys.argv[2]) spark.stop()
Add numeric time column (hour) that respects editions.
## Code Before: from __future__ import print_function import sys from pyspark.sql import SparkSession from pyspark.sql.functions import lit, col if __name__ == "__main__": if len(sys.argv) != 3: print("Usage: c19.py <input> <output>", file=sys.stderr) exit(-1) spark = SparkSession.builder.appName('Select c19').getOrCreate() raw = spark.read.option('mergeSchema','true').load(sys.argv[1]) df = raw.filter(col('date') < '1900') opens = df.filter(col('open') == 'true')\ .select('series', 'date', lit(1).alias('inopen')).distinct() df.join(opens, ['series', 'date'], 'left_outer')\ .filter((col('open') == 'true') | col('inopen').isNull())\ .drop('inopen')\ .dropDuplicates(['id'])\ .write.save(sys.argv[2]) spark.stop() ## Instruction: Add numeric time column (hour) that respects editions. ## Code After: from __future__ import print_function import sys from pyspark.sql import SparkSession from pyspark.sql.functions import lit, col, datediff if __name__ == "__main__": if len(sys.argv) != 3: print("Usage: c19.py <input> <output>", file=sys.stderr) exit(-1) spark = SparkSession.builder.appName('Select c19').getOrCreate() raw = spark.read.option('mergeSchema','true').load(sys.argv[1]) df = raw.filter(col('date') < '1900') opens = df.filter(col('open') == 'true')\ .select('series', 'date', lit(1).alias('inopen')).distinct() df.join(opens, ['series', 'date'], 'left_outer')\ .filter((col('open') == 'true') | col('inopen').isNull())\ .drop('inopen')\ .dropDuplicates(['id'])\ .withColumn('ednum', col('ed').cast('int')).na.fill(1, ['ednum']) \ .withColumn('hour', datediff(col('date'), lit('1970-01-01')) * 24 + col('ednum') - 1) \ .drop('ednum') \ .write.save(sys.argv[2]) spark.stop()
from __future__ import print_function import sys from pyspark.sql import SparkSession - from pyspark.sql.functions import lit, col + from pyspark.sql.functions import lit, col, datediff ? ++++++++++ if __name__ == "__main__": if len(sys.argv) != 3: print("Usage: c19.py <input> <output>", file=sys.stderr) exit(-1) spark = SparkSession.builder.appName('Select c19').getOrCreate() raw = spark.read.option('mergeSchema','true').load(sys.argv[1]) df = raw.filter(col('date') < '1900') opens = df.filter(col('open') == 'true')\ .select('series', 'date', lit(1).alias('inopen')).distinct() df.join(opens, ['series', 'date'], 'left_outer')\ .filter((col('open') == 'true') | col('inopen').isNull())\ .drop('inopen')\ .dropDuplicates(['id'])\ + .withColumn('ednum', col('ed').cast('int')).na.fill(1, ['ednum']) \ + .withColumn('hour', datediff(col('date'), lit('1970-01-01')) * 24 + col('ednum') - 1) \ + .drop('ednum') \ .write.save(sys.argv[2]) spark.stop()
0cb295e80fbf8d08276166d8005722918012ca83
wafer/kv/models.py
wafer/kv/models.py
from django.contrib.auth.models import Group from django.db import models from jsonfield import JSONField class KeyValue(models.Model): group = models.ForeignKey(Group, on_delete=models.CASCADE) key = models.CharField(max_length=64, db_index=True) value = JSONField() def __unicode__(self): return u'KV(%s, %s, %r)' % (self.group.name, self.key, self.value)
from django.contrib.auth.models import Group from django.db import models from jsonfield import JSONField class KeyValue(models.Model): group = models.ForeignKey(Group, on_delete=models.CASCADE) key = models.CharField(max_length=64, db_index=True) value = JSONField() def __unicode__(self): return u'KV(%s, %s, %r)' % (self.group.name, self.key, self.value) def __str__(self): return 'KV(%s, %s, %r)' % (self.group.name, self.key, self.value)
Add KeyValue.__str__ for python 3
Add KeyValue.__str__ for python 3
Python
isc
CTPUG/wafer,CTPUG/wafer,CTPUG/wafer,CTPUG/wafer
from django.contrib.auth.models import Group from django.db import models from jsonfield import JSONField class KeyValue(models.Model): group = models.ForeignKey(Group, on_delete=models.CASCADE) key = models.CharField(max_length=64, db_index=True) value = JSONField() def __unicode__(self): return u'KV(%s, %s, %r)' % (self.group.name, self.key, self.value) + def __str__(self): + return 'KV(%s, %s, %r)' % (self.group.name, self.key, self.value) +
Add KeyValue.__str__ for python 3
## Code Before: from django.contrib.auth.models import Group from django.db import models from jsonfield import JSONField class KeyValue(models.Model): group = models.ForeignKey(Group, on_delete=models.CASCADE) key = models.CharField(max_length=64, db_index=True) value = JSONField() def __unicode__(self): return u'KV(%s, %s, %r)' % (self.group.name, self.key, self.value) ## Instruction: Add KeyValue.__str__ for python 3 ## Code After: from django.contrib.auth.models import Group from django.db import models from jsonfield import JSONField class KeyValue(models.Model): group = models.ForeignKey(Group, on_delete=models.CASCADE) key = models.CharField(max_length=64, db_index=True) value = JSONField() def __unicode__(self): return u'KV(%s, %s, %r)' % (self.group.name, self.key, self.value) def __str__(self): return 'KV(%s, %s, %r)' % (self.group.name, self.key, self.value)
from django.contrib.auth.models import Group from django.db import models from jsonfield import JSONField class KeyValue(models.Model): group = models.ForeignKey(Group, on_delete=models.CASCADE) key = models.CharField(max_length=64, db_index=True) value = JSONField() def __unicode__(self): return u'KV(%s, %s, %r)' % (self.group.name, self.key, self.value) + + def __str__(self): + return 'KV(%s, %s, %r)' % (self.group.name, self.key, self.value)
ae2dd4b9fe3686aca44a21ff72a4226c6110f2ee
presentation/views.py
presentation/views.py
from django.views.generic import DetailView from django.views.generic import ListView from django.shortcuts import redirect, render from .forms import PresentationCreateForm from .models import Presentation, Slide class PresentationList(ListView): model = Presentation paginate_by = 9 context_object_name = 'presentations' class PresentationDetail(DetailView): model = Presentation context_object_name = 'presentation' template_name = 'presentation/presentation_list.html' def presentation_create(request): form = PresentationCreateForm(request.POST) if request.method == 'POST': if form.is_valid(): presentation = Presentation.objects.create( subject=form.cleaned_data.get('subject'), author=request.user, is_public=form.cleaned_data.get('is_public') ) slide_list = request.POST.getlist('slide_list[]', []) for slide in slide_list: Slide.objects.create( presentation=presentation, slide_order=slide['slide_order'], markdown=slide['markdown'], html=slide['html'], ) return redirect('presentation:list') context = {'form': form} return render(request, 'presentation/presentation_create.html', context)
from django.views.generic import DetailView from django.views.generic import ListView from django.shortcuts import redirect, render from pure_pagination import PaginationMixin from .forms import PresentationCreateForm from .models import Presentation, Slide class PresentationList(PaginationMixin, ListView): model = Presentation paginate_by = 9 context_object_name = 'presentations' ordering = ['-pk'] class PresentationDetail(DetailView): model = Presentation context_object_name = 'presentation' template_name = 'presentation/presentation_list.html' def presentation_create(request): form = PresentationCreateForm(request.POST) if request.method == 'POST': if form.is_valid(): presentation = Presentation.objects.create( subject=form.cleaned_data.get('subject'), author=request.user, is_public=form.cleaned_data.get('is_public') ) slide_list = request.POST.getlist('slide_list[]', []) for slide in slide_list: Slide.objects.create( presentation=presentation, slide_order=slide['slide_order'], markdown=slide['markdown'], html=slide['html'], ) return redirect('presentation:list') context = {'form': form} return render(request, 'presentation/presentation_create.html', context)
Add Ordering and PaginationMixin on Listview
Add Ordering and PaginationMixin on Listview
Python
mit
SaturDJang/warp,SaturDJang/warp,SaturDJang/warp,SaturDJang/warp
from django.views.generic import DetailView from django.views.generic import ListView from django.shortcuts import redirect, render + + from pure_pagination import PaginationMixin from .forms import PresentationCreateForm from .models import Presentation, Slide - class PresentationList(ListView): + class PresentationList(PaginationMixin, ListView): model = Presentation paginate_by = 9 context_object_name = 'presentations' + ordering = ['-pk'] class PresentationDetail(DetailView): model = Presentation context_object_name = 'presentation' template_name = 'presentation/presentation_list.html' def presentation_create(request): form = PresentationCreateForm(request.POST) if request.method == 'POST': if form.is_valid(): presentation = Presentation.objects.create( subject=form.cleaned_data.get('subject'), author=request.user, is_public=form.cleaned_data.get('is_public') ) slide_list = request.POST.getlist('slide_list[]', []) for slide in slide_list: Slide.objects.create( presentation=presentation, slide_order=slide['slide_order'], markdown=slide['markdown'], html=slide['html'], ) return redirect('presentation:list') context = {'form': form} return render(request, 'presentation/presentation_create.html', context)
Add Ordering and PaginationMixin on Listview
## Code Before: from django.views.generic import DetailView from django.views.generic import ListView from django.shortcuts import redirect, render from .forms import PresentationCreateForm from .models import Presentation, Slide class PresentationList(ListView): model = Presentation paginate_by = 9 context_object_name = 'presentations' class PresentationDetail(DetailView): model = Presentation context_object_name = 'presentation' template_name = 'presentation/presentation_list.html' def presentation_create(request): form = PresentationCreateForm(request.POST) if request.method == 'POST': if form.is_valid(): presentation = Presentation.objects.create( subject=form.cleaned_data.get('subject'), author=request.user, is_public=form.cleaned_data.get('is_public') ) slide_list = request.POST.getlist('slide_list[]', []) for slide in slide_list: Slide.objects.create( presentation=presentation, slide_order=slide['slide_order'], markdown=slide['markdown'], html=slide['html'], ) return redirect('presentation:list') context = {'form': form} return render(request, 'presentation/presentation_create.html', context) ## Instruction: Add Ordering and PaginationMixin on Listview ## Code After: from django.views.generic import DetailView from django.views.generic import ListView from django.shortcuts import redirect, render from pure_pagination import PaginationMixin from .forms import PresentationCreateForm from .models import Presentation, Slide class PresentationList(PaginationMixin, ListView): model = Presentation paginate_by = 9 context_object_name = 'presentations' ordering = ['-pk'] class PresentationDetail(DetailView): model = Presentation context_object_name = 'presentation' template_name = 'presentation/presentation_list.html' def presentation_create(request): form = PresentationCreateForm(request.POST) if request.method == 'POST': if form.is_valid(): presentation = Presentation.objects.create( subject=form.cleaned_data.get('subject'), author=request.user, is_public=form.cleaned_data.get('is_public') ) slide_list = request.POST.getlist('slide_list[]', []) for slide in slide_list: Slide.objects.create( presentation=presentation, slide_order=slide['slide_order'], markdown=slide['markdown'], html=slide['html'], ) return redirect('presentation:list') context = {'form': form} return render(request, 'presentation/presentation_create.html', context)
from django.views.generic import DetailView from django.views.generic import ListView from django.shortcuts import redirect, render + + from pure_pagination import PaginationMixin from .forms import PresentationCreateForm from .models import Presentation, Slide - class PresentationList(ListView): + class PresentationList(PaginationMixin, ListView): ? +++++++++++++++++ model = Presentation paginate_by = 9 context_object_name = 'presentations' + ordering = ['-pk'] class PresentationDetail(DetailView): model = Presentation context_object_name = 'presentation' template_name = 'presentation/presentation_list.html' def presentation_create(request): form = PresentationCreateForm(request.POST) if request.method == 'POST': if form.is_valid(): presentation = Presentation.objects.create( subject=form.cleaned_data.get('subject'), author=request.user, is_public=form.cleaned_data.get('is_public') ) slide_list = request.POST.getlist('slide_list[]', []) for slide in slide_list: Slide.objects.create( presentation=presentation, slide_order=slide['slide_order'], markdown=slide['markdown'], html=slide['html'], ) return redirect('presentation:list') context = {'form': form} return render(request, 'presentation/presentation_create.html', context)
5d5f8e02efa6854bef0813e0e8383a3760cf93d2
os_brick/privileged/__init__.py
os_brick/privileged/__init__.py
from oslo_privsep import capabilities as c from oslo_privsep import priv_context # It is expected that most (if not all) os-brick operations can be # executed with these privileges. default = priv_context.PrivContext( __name__, cfg_section='privsep_osbrick', pypath=__name__ + '.default', capabilities=[c.CAP_SYS_ADMIN], )
import os from oslo_privsep import capabilities as c from oslo_privsep import priv_context capabilities = [c.CAP_SYS_ADMIN] # On virtual environments libraries are not owned by the Daemon user (root), so # the Daemon needs the capability to bypass file read permission checks in # order to dynamically load the code to run. if os.environ.get('VIRTUAL_ENV'): capabilities.append(c.CAP_DAC_READ_SEARCH) # It is expected that most (if not all) os-brick operations can be # executed with these privileges. default = priv_context.PrivContext( __name__, cfg_section='privsep_osbrick', pypath=__name__ + '.default', capabilities=capabilities, )
Fix os-brick in virtual environments
Fix os-brick in virtual environments When running os-brick in a virtual environment created by a non root user, we get the following error: ModuleNotFoundError: No module named 'os_brick.privileged.rootwrap' This happens because the privsep daemon drops all the privileged except those defined in the context, and our current context doesn't bypass file read permission checks, so the Daemon cannot read the file with the code it was asked to run, because it belongs to a different user. This patch adds the CAP_DAC_READ_SEARCH capability to our privsep context so we can load the libraries, but only when we are running on a virtual environment to follow the principle of least privilege. This bug doesn't affect system-wide installations because the files installed under /sys/python*/site-packages belong to the Daemon user (root), so no special capabilities are necessary. Change-Id: Ib191c075ad1250822f6ac842f39214af8f3a02f0 Close-Bug: #1884059
Python
apache-2.0
openstack/os-brick,openstack/os-brick
+ + import os from oslo_privsep import capabilities as c from oslo_privsep import priv_context + + + capabilities = [c.CAP_SYS_ADMIN] + + # On virtual environments libraries are not owned by the Daemon user (root), so + # the Daemon needs the capability to bypass file read permission checks in + # order to dynamically load the code to run. + if os.environ.get('VIRTUAL_ENV'): + capabilities.append(c.CAP_DAC_READ_SEARCH) # It is expected that most (if not all) os-brick operations can be # executed with these privileges. default = priv_context.PrivContext( __name__, cfg_section='privsep_osbrick', pypath=__name__ + '.default', - capabilities=[c.CAP_SYS_ADMIN], + capabilities=capabilities, )
Fix os-brick in virtual environments
## Code Before: from oslo_privsep import capabilities as c from oslo_privsep import priv_context # It is expected that most (if not all) os-brick operations can be # executed with these privileges. default = priv_context.PrivContext( __name__, cfg_section='privsep_osbrick', pypath=__name__ + '.default', capabilities=[c.CAP_SYS_ADMIN], ) ## Instruction: Fix os-brick in virtual environments ## Code After: import os from oslo_privsep import capabilities as c from oslo_privsep import priv_context capabilities = [c.CAP_SYS_ADMIN] # On virtual environments libraries are not owned by the Daemon user (root), so # the Daemon needs the capability to bypass file read permission checks in # order to dynamically load the code to run. if os.environ.get('VIRTUAL_ENV'): capabilities.append(c.CAP_DAC_READ_SEARCH) # It is expected that most (if not all) os-brick operations can be # executed with these privileges. default = priv_context.PrivContext( __name__, cfg_section='privsep_osbrick', pypath=__name__ + '.default', capabilities=capabilities, )
+ + import os from oslo_privsep import capabilities as c from oslo_privsep import priv_context + + + capabilities = [c.CAP_SYS_ADMIN] + + # On virtual environments libraries are not owned by the Daemon user (root), so + # the Daemon needs the capability to bypass file read permission checks in + # order to dynamically load the code to run. + if os.environ.get('VIRTUAL_ENV'): + capabilities.append(c.CAP_DAC_READ_SEARCH) # It is expected that most (if not all) os-brick operations can be # executed with these privileges. default = priv_context.PrivContext( __name__, cfg_section='privsep_osbrick', pypath=__name__ + '.default', - capabilities=[c.CAP_SYS_ADMIN], + capabilities=capabilities, )
0c56e276aa1963ec35d744f61cecbb9368f115be
admin_tools/theming/templatetags/theming_tags.py
admin_tools/theming/templatetags/theming_tags.py
from django import template from django.conf import settings from admin_tools.utils import get_media_url register = template.Library() def render_theming_css(): """ Template tag that renders the needed css files for the theming app. """ css = getattr(settings, 'ADMIN_TOOLS_THEMING_CSS', False) if css: css = '/'.join([get_media_url(), css]) else: css = '/'.join([get_media_url(), 'admin_tools', 'css', 'theming.css']) return '<link rel="stylesheet" type="text/css" media="screen" href="%s" />' % css register.simple_tag(render_theming_css)
from django import template from django.conf import settings from admin_tools.utils import get_media_url register = template.Library() def render_theming_css(): """ Template tag that renders the needed css files for the theming app. If ADMIN_TOOLS_THEMING_CSS is explicitely defined to None, don't render anything. """ rval = '' try: css_path = getattr(settings, 'ADMIN_TOOLS_THEMING_CSS') except AttributeError: css_path = 'admin_tools/css/theming.css' if css_path is not None: css_url = '%s/%s' % (get_media_url(), css_path) rval = '<link rel="stylesheet" type="text/css" media="screen" href="%s" />' % css_url return rval register.simple_tag(render_theming_css)
Enable not loading theming CSS by explicitely setting ADMIN_TOOLS_THEMING_CSS to None
Enable not loading theming CSS by explicitely setting ADMIN_TOOLS_THEMING_CSS to None
Python
mit
liberation/django-admin-tools,liberation/django-admin-tools,liberation/django-admin-tools,liberation/django-admin-tools
from django import template from django.conf import settings from admin_tools.utils import get_media_url register = template.Library() def render_theming_css(): """ Template tag that renders the needed css files for the theming app. + + If ADMIN_TOOLS_THEMING_CSS is explicitely defined to None, don't render + anything. """ + rval = '' + try: - css = getattr(settings, 'ADMIN_TOOLS_THEMING_CSS', False) + css_path = getattr(settings, 'ADMIN_TOOLS_THEMING_CSS') - if css: + except AttributeError: + css_path = 'admin_tools/css/theming.css' + + if css_path is not None: - css = '/'.join([get_media_url(), css]) + css_url = '%s/%s' % (get_media_url(), css_path) - else: - css = '/'.join([get_media_url(), 'admin_tools', 'css', 'theming.css']) - return '<link rel="stylesheet" type="text/css" media="screen" href="%s" />' % css + rval = '<link rel="stylesheet" type="text/css" media="screen" href="%s" />' % css_url + + return rval + register.simple_tag(render_theming_css)
Enable not loading theming CSS by explicitely setting ADMIN_TOOLS_THEMING_CSS to None
## Code Before: from django import template from django.conf import settings from admin_tools.utils import get_media_url register = template.Library() def render_theming_css(): """ Template tag that renders the needed css files for the theming app. """ css = getattr(settings, 'ADMIN_TOOLS_THEMING_CSS', False) if css: css = '/'.join([get_media_url(), css]) else: css = '/'.join([get_media_url(), 'admin_tools', 'css', 'theming.css']) return '<link rel="stylesheet" type="text/css" media="screen" href="%s" />' % css register.simple_tag(render_theming_css) ## Instruction: Enable not loading theming CSS by explicitely setting ADMIN_TOOLS_THEMING_CSS to None ## Code After: from django import template from django.conf import settings from admin_tools.utils import get_media_url register = template.Library() def render_theming_css(): """ Template tag that renders the needed css files for the theming app. If ADMIN_TOOLS_THEMING_CSS is explicitely defined to None, don't render anything. """ rval = '' try: css_path = getattr(settings, 'ADMIN_TOOLS_THEMING_CSS') except AttributeError: css_path = 'admin_tools/css/theming.css' if css_path is not None: css_url = '%s/%s' % (get_media_url(), css_path) rval = '<link rel="stylesheet" type="text/css" media="screen" href="%s" />' % css_url return rval register.simple_tag(render_theming_css)
from django import template from django.conf import settings from admin_tools.utils import get_media_url register = template.Library() def render_theming_css(): """ Template tag that renders the needed css files for the theming app. + + If ADMIN_TOOLS_THEMING_CSS is explicitely defined to None, don't render + anything. """ + rval = '' + try: - css = getattr(settings, 'ADMIN_TOOLS_THEMING_CSS', False) ? ------- + css_path = getattr(settings, 'ADMIN_TOOLS_THEMING_CSS') ? ++++ +++++ - if css: + except AttributeError: + css_path = 'admin_tools/css/theming.css' + + if css_path is not None: - css = '/'.join([get_media_url(), css]) ? ^^^^^ - ^ + css_url = '%s/%s' % (get_media_url(), css_path) ? ++++ ++ ++ ^^^ ^^^^^ - else: - css = '/'.join([get_media_url(), 'admin_tools', 'css', 'theming.css']) - return '<link rel="stylesheet" type="text/css" media="screen" href="%s" />' % css ? ^^^^^ + rval = '<link rel="stylesheet" type="text/css" media="screen" href="%s" />' % css_url ? ++++ ^^^^^ ++++ + + return rval + register.simple_tag(render_theming_css)
ff0ae66ee16bc3ac07cb88ddacb52ffa41779757
tests/test_func.py
tests/test_func.py
from .utils import assert_eval def test_simple_func(): assert_eval('(def @a $a 8) (@a)', 1, 8) def test_simple_func_args(): assert_eval( '(def @a $a $a)' '(@a 1)' '(@a 2)' '(@a 5)', 1, 1, 2, 5) def test_func_args_overwrite_globals(): assert_eval( '(def @a $a 3)' '(set $a 10)' '$a' '(@a 8)' '$a', 1, 10, 10, 3, 8, ) def test_func_args_with_offset(): assert_eval( '(def @a $d (+ $d $i))' '(def @b $i (+ $i $j))' '(@a 1 2 3)' '(@b 8 9 10)' '$a\n$b\n$c\n$d\n$e\n$i\n$j\n$k\n', 1, 1, 4, 17, 0, 0, 0, 1, 2, 8, 9, 10, )
from .utils import assert_eval def test_simple_func(): assert_eval('(def @a $a 8) (@a)', 1, 8) def test_simple_func_args(): assert_eval( '(def @a $a $a)' '(@a 1)' '(@a 2)' '(@a 5)', 1, 1, 2, 5) def test_func_args_overwrite_globals(): assert_eval( '(def @a $a 3)' '(set $a 10)' '$a' '(@a 8)' '$a', 1, 10, 10, 3, 8, ) def test_func_args_with_offset(): assert_eval( '(def @a $d (+ $d $i))' '(def @b $i (+ $i $j))' '(@a 1 2 3)' '(@b 8 9 10)' '$a\n$b\n$c\n$d\n$e\n$i\n$j\n$k\n', 1, 1, 4, 17, 0, 0, 0, 1, 2, 8, 9, 10, ) def test_no_args(): assert_eval( '(def @a (+ 0 5))' '(@a)', 1, 5, ) def test_nested_calls(): assert_eval( '(def @a $a (+ $a $b))' '(def @b (+ (@a 1 2) (@a 3 4)))' '(@b)', 1, 1, 1 + 2 + 3 + 4, )
Add some more function tests.
Add some more function tests.
Python
bsd-3-clause
sapir/tinywhat,sapir/tinywhat,sapir/tinywhat
from .utils import assert_eval def test_simple_func(): assert_eval('(def @a $a 8) (@a)', 1, 8) def test_simple_func_args(): assert_eval( '(def @a $a $a)' '(@a 1)' '(@a 2)' '(@a 5)', 1, 1, 2, 5) def test_func_args_overwrite_globals(): assert_eval( '(def @a $a 3)' '(set $a 10)' '$a' '(@a 8)' '$a', 1, 10, 10, 3, 8, ) def test_func_args_with_offset(): assert_eval( '(def @a $d (+ $d $i))' '(def @b $i (+ $i $j))' '(@a 1 2 3)' '(@b 8 9 10)' '$a\n$b\n$c\n$d\n$e\n$i\n$j\n$k\n', 1, 1, 4, 17, 0, 0, 0, 1, 2, 8, 9, 10, ) + + def test_no_args(): + assert_eval( + '(def @a (+ 0 5))' + '(@a)', + 1, + 5, + ) + + + def test_nested_calls(): + assert_eval( + '(def @a $a (+ $a $b))' + '(def @b (+ (@a 1 2) (@a 3 4)))' + '(@b)', + 1, + 1, + 1 + 2 + 3 + 4, + ) +
Add some more function tests.
## Code Before: from .utils import assert_eval def test_simple_func(): assert_eval('(def @a $a 8) (@a)', 1, 8) def test_simple_func_args(): assert_eval( '(def @a $a $a)' '(@a 1)' '(@a 2)' '(@a 5)', 1, 1, 2, 5) def test_func_args_overwrite_globals(): assert_eval( '(def @a $a 3)' '(set $a 10)' '$a' '(@a 8)' '$a', 1, 10, 10, 3, 8, ) def test_func_args_with_offset(): assert_eval( '(def @a $d (+ $d $i))' '(def @b $i (+ $i $j))' '(@a 1 2 3)' '(@b 8 9 10)' '$a\n$b\n$c\n$d\n$e\n$i\n$j\n$k\n', 1, 1, 4, 17, 0, 0, 0, 1, 2, 8, 9, 10, ) ## Instruction: Add some more function tests. ## Code After: from .utils import assert_eval def test_simple_func(): assert_eval('(def @a $a 8) (@a)', 1, 8) def test_simple_func_args(): assert_eval( '(def @a $a $a)' '(@a 1)' '(@a 2)' '(@a 5)', 1, 1, 2, 5) def test_func_args_overwrite_globals(): assert_eval( '(def @a $a 3)' '(set $a 10)' '$a' '(@a 8)' '$a', 1, 10, 10, 3, 8, ) def test_func_args_with_offset(): assert_eval( '(def @a $d (+ $d $i))' '(def @b $i (+ $i $j))' '(@a 1 2 3)' '(@b 8 9 10)' '$a\n$b\n$c\n$d\n$e\n$i\n$j\n$k\n', 1, 1, 4, 17, 0, 0, 0, 1, 2, 8, 9, 10, ) def test_no_args(): assert_eval( '(def @a (+ 0 5))' '(@a)', 1, 5, ) def test_nested_calls(): assert_eval( '(def @a $a (+ $a $b))' '(def @b (+ (@a 1 2) (@a 3 4)))' '(@b)', 1, 1, 1 + 2 + 3 + 4, )
from .utils import assert_eval def test_simple_func(): assert_eval('(def @a $a 8) (@a)', 1, 8) def test_simple_func_args(): assert_eval( '(def @a $a $a)' '(@a 1)' '(@a 2)' '(@a 5)', 1, 1, 2, 5) def test_func_args_overwrite_globals(): assert_eval( '(def @a $a 3)' '(set $a 10)' '$a' '(@a 8)' '$a', 1, 10, 10, 3, 8, ) def test_func_args_with_offset(): assert_eval( '(def @a $d (+ $d $i))' '(def @b $i (+ $i $j))' '(@a 1 2 3)' '(@b 8 9 10)' '$a\n$b\n$c\n$d\n$e\n$i\n$j\n$k\n', 1, 1, 4, 17, 0, 0, 0, 1, 2, 8, 9, 10, ) + + + def test_no_args(): + assert_eval( + '(def @a (+ 0 5))' + '(@a)', + 1, + 5, + ) + + + def test_nested_calls(): + assert_eval( + '(def @a $a (+ $a $b))' + '(def @b (+ (@a 1 2) (@a 3 4)))' + '(@b)', + 1, + 1, + 1 + 2 + 3 + 4, + )
41c6a71e2a9e013966df06e3b5f458aa9a902bc8
tests/test_core.py
tests/test_core.py
import pytest from mock import Mock from saleor.core.utils import ( Country, get_country_by_ip, get_currency_for_country) @pytest.mark.parametrize('ip_data, expected_country', [ ({'country': {'iso_code': 'PL'}}, Country('PL')), ({'country': {'iso_code': 'UNKNOWN'}}, None), (None, None), ({}, None), ({'country': {}}, None)]) def test_get_country_by_ip(ip_data, expected_country, monkeypatch): monkeypatch.setattr( 'saleor.core.utils.georeader.get', Mock(return_value=ip_data)) country = get_country_by_ip('127.0.0.1') assert country == expected_country @pytest.mark.parametrize('country, expected_currency', [ (Country('PL'), 'PLN'), (Country('US'), 'USD'), (Country('GB'), 'GBP')]) def test_get_currency_for_country(country, expected_currency, monkeypatch): currency = get_currency_for_country(country) assert currency == expected_currency
import pytest from mock import Mock from saleor.core.utils import ( Country, get_country_by_ip, get_currency_for_country, create_superuser) from saleor.userprofile.models import User @pytest.mark.parametrize('ip_data, expected_country', [ ({'country': {'iso_code': 'PL'}}, Country('PL')), ({'country': {'iso_code': 'UNKNOWN'}}, None), (None, None), ({}, None), ({'country': {}}, None)]) def test_get_country_by_ip(ip_data, expected_country, monkeypatch): monkeypatch.setattr( 'saleor.core.utils.georeader.get', Mock(return_value=ip_data)) country = get_country_by_ip('127.0.0.1') assert country == expected_country @pytest.mark.parametrize('country, expected_currency', [ (Country('PL'), 'PLN'), (Country('US'), 'USD'), (Country('GB'), 'GBP')]) def test_get_currency_for_country(country, expected_currency, monkeypatch): currency = get_currency_for_country(country) assert currency == expected_currency def test_create_superuser(db, client): credentials = {'email': '[email protected]', 'password': 'admin'} # Test admin creation assert User.objects.all().count() == 0 create_superuser(credentials) assert User.objects.all().count() == 1 admin = User.objects.all().first() assert admin.is_superuser # Test duplicating create_superuser(credentials) assert User.objects.all().count() == 1 # Test logging in response = client.post('/account/login/', {'login': credentials['email'], 'password': credentials['password']}, follow=True) assert response.context['request'].user == admin
Add populatedb admin creation test
Add populatedb admin creation test
Python
bsd-3-clause
car3oon/saleor,mociepka/saleor,jreigel/saleor,mociepka/saleor,tfroehlich82/saleor,maferelo/saleor,itbabu/saleor,itbabu/saleor,HyperManTT/ECommerceSaleor,KenMutemi/saleor,UITools/saleor,tfroehlich82/saleor,KenMutemi/saleor,maferelo/saleor,maferelo/saleor,UITools/saleor,mociepka/saleor,jreigel/saleor,jreigel/saleor,UITools/saleor,UITools/saleor,car3oon/saleor,HyperManTT/ECommerceSaleor,KenMutemi/saleor,itbabu/saleor,car3oon/saleor,tfroehlich82/saleor,HyperManTT/ECommerceSaleor,UITools/saleor
import pytest from mock import Mock from saleor.core.utils import ( - Country, get_country_by_ip, get_currency_for_country) + Country, get_country_by_ip, get_currency_for_country, create_superuser) + from saleor.userprofile.models import User @pytest.mark.parametrize('ip_data, expected_country', [ ({'country': {'iso_code': 'PL'}}, Country('PL')), ({'country': {'iso_code': 'UNKNOWN'}}, None), (None, None), ({}, None), ({'country': {}}, None)]) def test_get_country_by_ip(ip_data, expected_country, monkeypatch): monkeypatch.setattr( 'saleor.core.utils.georeader.get', Mock(return_value=ip_data)) country = get_country_by_ip('127.0.0.1') assert country == expected_country @pytest.mark.parametrize('country, expected_currency', [ (Country('PL'), 'PLN'), (Country('US'), 'USD'), (Country('GB'), 'GBP')]) def test_get_currency_for_country(country, expected_currency, monkeypatch): currency = get_currency_for_country(country) assert currency == expected_currency + + def test_create_superuser(db, client): + credentials = {'email': '[email protected]', 'password': 'admin'} + # Test admin creation + assert User.objects.all().count() == 0 + create_superuser(credentials) + assert User.objects.all().count() == 1 + admin = User.objects.all().first() + assert admin.is_superuser + # Test duplicating + create_superuser(credentials) + assert User.objects.all().count() == 1 + # Test logging in + response = client.post('/account/login/', + {'login': credentials['email'], + 'password': credentials['password']}, + follow=True) + assert response.context['request'].user == admin +
Add populatedb admin creation test
## Code Before: import pytest from mock import Mock from saleor.core.utils import ( Country, get_country_by_ip, get_currency_for_country) @pytest.mark.parametrize('ip_data, expected_country', [ ({'country': {'iso_code': 'PL'}}, Country('PL')), ({'country': {'iso_code': 'UNKNOWN'}}, None), (None, None), ({}, None), ({'country': {}}, None)]) def test_get_country_by_ip(ip_data, expected_country, monkeypatch): monkeypatch.setattr( 'saleor.core.utils.georeader.get', Mock(return_value=ip_data)) country = get_country_by_ip('127.0.0.1') assert country == expected_country @pytest.mark.parametrize('country, expected_currency', [ (Country('PL'), 'PLN'), (Country('US'), 'USD'), (Country('GB'), 'GBP')]) def test_get_currency_for_country(country, expected_currency, monkeypatch): currency = get_currency_for_country(country) assert currency == expected_currency ## Instruction: Add populatedb admin creation test ## Code After: import pytest from mock import Mock from saleor.core.utils import ( Country, get_country_by_ip, get_currency_for_country, create_superuser) from saleor.userprofile.models import User @pytest.mark.parametrize('ip_data, expected_country', [ ({'country': {'iso_code': 'PL'}}, Country('PL')), ({'country': {'iso_code': 'UNKNOWN'}}, None), (None, None), ({}, None), ({'country': {}}, None)]) def test_get_country_by_ip(ip_data, expected_country, monkeypatch): monkeypatch.setattr( 'saleor.core.utils.georeader.get', Mock(return_value=ip_data)) country = get_country_by_ip('127.0.0.1') assert country == expected_country @pytest.mark.parametrize('country, expected_currency', [ (Country('PL'), 'PLN'), (Country('US'), 'USD'), (Country('GB'), 'GBP')]) def test_get_currency_for_country(country, expected_currency, monkeypatch): currency = get_currency_for_country(country) assert currency == expected_currency def test_create_superuser(db, client): credentials = {'email': '[email protected]', 'password': 'admin'} # Test admin creation assert User.objects.all().count() == 0 create_superuser(credentials) assert User.objects.all().count() == 1 admin = User.objects.all().first() assert admin.is_superuser # Test duplicating create_superuser(credentials) assert User.objects.all().count() == 1 # Test logging in response = client.post('/account/login/', {'login': credentials['email'], 'password': credentials['password']}, follow=True) assert response.context['request'].user == admin
import pytest from mock import Mock from saleor.core.utils import ( - Country, get_country_by_ip, get_currency_for_country) + Country, get_country_by_ip, get_currency_for_country, create_superuser) ? ++++++++++++++++++ + from saleor.userprofile.models import User @pytest.mark.parametrize('ip_data, expected_country', [ ({'country': {'iso_code': 'PL'}}, Country('PL')), ({'country': {'iso_code': 'UNKNOWN'}}, None), (None, None), ({}, None), ({'country': {}}, None)]) def test_get_country_by_ip(ip_data, expected_country, monkeypatch): monkeypatch.setattr( 'saleor.core.utils.georeader.get', Mock(return_value=ip_data)) country = get_country_by_ip('127.0.0.1') assert country == expected_country @pytest.mark.parametrize('country, expected_currency', [ (Country('PL'), 'PLN'), (Country('US'), 'USD'), (Country('GB'), 'GBP')]) def test_get_currency_for_country(country, expected_currency, monkeypatch): currency = get_currency_for_country(country) assert currency == expected_currency + + + def test_create_superuser(db, client): + credentials = {'email': '[email protected]', 'password': 'admin'} + # Test admin creation + assert User.objects.all().count() == 0 + create_superuser(credentials) + assert User.objects.all().count() == 1 + admin = User.objects.all().first() + assert admin.is_superuser + # Test duplicating + create_superuser(credentials) + assert User.objects.all().count() == 1 + # Test logging in + response = client.post('/account/login/', + {'login': credentials['email'], + 'password': credentials['password']}, + follow=True) + assert response.context['request'].user == admin
6cd2a721d90c991b7c7dde221affd6ebecf70e95
setup.py
setup.py
from distutils.core import setup, Extension setup(name="fastcache", version="0.1", packages = ["fastcache", "fastcache.tests"], ext_modules= [Extension("fastcache._lrucache", ["src/_lrucache.c"], ), ] )
from distutils.core import setup, Extension setup(name="fastcache", version="0.1", packages = ["fastcache", "fastcache.tests"], ext_modules= [Extension("fastcache._lrucache", ["src/_lrucache.c"], extra_compile_args=['-std=c99']), ] )
Fix travis build error by specifying compiler arg -std=c99
Fix travis build error by specifying compiler arg -std=c99
Python
mit
pbrady/fastcache,pbrady/fastcache,pbrady/fastcache
from distutils.core import setup, Extension setup(name="fastcache", version="0.1", packages = ["fastcache", "fastcache.tests"], ext_modules= [Extension("fastcache._lrucache", ["src/_lrucache.c"], - ), + extra_compile_args=['-std=c99']), ] )
Fix travis build error by specifying compiler arg -std=c99
## Code Before: from distutils.core import setup, Extension setup(name="fastcache", version="0.1", packages = ["fastcache", "fastcache.tests"], ext_modules= [Extension("fastcache._lrucache", ["src/_lrucache.c"], ), ] ) ## Instruction: Fix travis build error by specifying compiler arg -std=c99 ## Code After: from distutils.core import setup, Extension setup(name="fastcache", version="0.1", packages = ["fastcache", "fastcache.tests"], ext_modules= [Extension("fastcache._lrucache", ["src/_lrucache.c"], extra_compile_args=['-std=c99']), ] )
from distutils.core import setup, Extension setup(name="fastcache", version="0.1", packages = ["fastcache", "fastcache.tests"], ext_modules= [Extension("fastcache._lrucache", ["src/_lrucache.c"], - ), + extra_compile_args=['-std=c99']), ] )
269474608221e35907896f5f618e69d6e5136388
facepy/exceptions.py
facepy/exceptions.py
class FacepyError(Exception): """Base class for exceptions raised by Facepy.""" def __init__(self, message): self.message = message def _get_message(self): return self._message def _set_message(self, message): self._message = message message = property(_get_message, _set_message)
class FacepyError(Exception): """Base class for exceptions raised by Facepy.""" def __init__(self, message): self.message = message
Remove uneccessary getter and setter
Remove uneccessary getter and setter
Python
mit
merwok-forks/facepy,jwjohns/facepy,jgorset/facepy,Spockuto/facepy,liorshahverdi/facepy,buzzfeed/facepy,jwjohns/facepy
class FacepyError(Exception): """Base class for exceptions raised by Facepy.""" def __init__(self, message): self.message = message - def _get_message(self): - return self._message - - def _set_message(self, message): - self._message = message - - message = property(_get_message, _set_message) -
Remove uneccessary getter and setter
## Code Before: class FacepyError(Exception): """Base class for exceptions raised by Facepy.""" def __init__(self, message): self.message = message def _get_message(self): return self._message def _set_message(self, message): self._message = message message = property(_get_message, _set_message) ## Instruction: Remove uneccessary getter and setter ## Code After: class FacepyError(Exception): """Base class for exceptions raised by Facepy.""" def __init__(self, message): self.message = message
class FacepyError(Exception): """Base class for exceptions raised by Facepy.""" def __init__(self, message): self.message = message - - def _get_message(self): - return self._message - - def _set_message(self, message): - self._message = message - - message = property(_get_message, _set_message)
aaba085cd2e97c8c23e6724da3313d42d12798f0
app/grandchallenge/annotations/validators.py
app/grandchallenge/annotations/validators.py
from rest_framework import serializers from django.conf import settings def validate_grader_is_current_retina_user(grader, context): """ This method checks if the passed grader equals the request.user that is passed in the context. Only applies to users that are in the retina_graders group. """ request = context.get("request") if request and request.user.is_authenticated: user = request.user if user.groups.filter( name=settings.RETINA_GRADERS_GROUP_NAME ).exists(): if grader != user: raise serializers.ValidationError( "User is not allowed to create annotation for other grader" )
from rest_framework import serializers from django.conf import settings def validate_grader_is_current_retina_user(grader, context): """ This method checks if the passed grader equals the request.user that is passed in the context. Only applies to users that are in the retina_graders group. """ request = context.get("request") if ( request is not None and request.user is not None and request.user.is_authenticated ): user = request.user if user.groups.filter( name=settings.RETINA_GRADERS_GROUP_NAME ).exists(): if grader != user: raise serializers.ValidationError( "User is not allowed to create annotation for other grader" )
Make sure request.user is a user
Make sure request.user is a user
Python
apache-2.0
comic/comic-django,comic/comic-django,comic/comic-django,comic/comic-django,comic/comic-django
from rest_framework import serializers from django.conf import settings def validate_grader_is_current_retina_user(grader, context): """ This method checks if the passed grader equals the request.user that is passed in the context. Only applies to users that are in the retina_graders group. """ request = context.get("request") + if ( + request is not None + and request.user is not None - if request and request.user.is_authenticated: + and request.user.is_authenticated + ): user = request.user if user.groups.filter( name=settings.RETINA_GRADERS_GROUP_NAME ).exists(): if grader != user: raise serializers.ValidationError( "User is not allowed to create annotation for other grader" )
Make sure request.user is a user
## Code Before: from rest_framework import serializers from django.conf import settings def validate_grader_is_current_retina_user(grader, context): """ This method checks if the passed grader equals the request.user that is passed in the context. Only applies to users that are in the retina_graders group. """ request = context.get("request") if request and request.user.is_authenticated: user = request.user if user.groups.filter( name=settings.RETINA_GRADERS_GROUP_NAME ).exists(): if grader != user: raise serializers.ValidationError( "User is not allowed to create annotation for other grader" ) ## Instruction: Make sure request.user is a user ## Code After: from rest_framework import serializers from django.conf import settings def validate_grader_is_current_retina_user(grader, context): """ This method checks if the passed grader equals the request.user that is passed in the context. Only applies to users that are in the retina_graders group. """ request = context.get("request") if ( request is not None and request.user is not None and request.user.is_authenticated ): user = request.user if user.groups.filter( name=settings.RETINA_GRADERS_GROUP_NAME ).exists(): if grader != user: raise serializers.ValidationError( "User is not allowed to create annotation for other grader" )
from rest_framework import serializers from django.conf import settings def validate_grader_is_current_retina_user(grader, context): """ This method checks if the passed grader equals the request.user that is passed in the context. Only applies to users that are in the retina_graders group. """ request = context.get("request") + if ( + request is not None + and request.user is not None - if request and request.user.is_authenticated: ? -- ^^^^^^^ - + and request.user.is_authenticated ? ^^ + ): user = request.user if user.groups.filter( name=settings.RETINA_GRADERS_GROUP_NAME ).exists(): if grader != user: raise serializers.ValidationError( "User is not allowed to create annotation for other grader" )
2e042201d6c0e0709d7056d399052389d1ea54b0
shopify_auth/__init__.py
shopify_auth/__init__.py
import shopify from django.conf import settings from django.core.exceptions import ImproperlyConfigured VERSION = (0, 1, 5) __version__ = '.'.join(map(str, VERSION)) __author__ = 'Gavin Ballard' def initialize(): if not settings.SHOPIFY_APP_API_KEY or not settings.SHOPIFY_APP_API_SECRET: raise ImproperlyConfigured("SHOPIFY_APP_API_KEY and SHOPIFY_APP_API_SECRET must be set in settings") shopify.Session.setup(api_key = settings.SHOPIFY_APP_API_KEY, secret = settings.SHOPIFY_APP_API_SECRET)
VERSION = (0, 1, 6) __version__ = '.'.join(map(str, VERSION)) __author__ = 'Gavin Ballard' def initialize(): import shopify from django.conf import settings from django.core.exceptions import ImproperlyConfigured if not settings.SHOPIFY_APP_API_KEY or not settings.SHOPIFY_APP_API_SECRET: raise ImproperlyConfigured("SHOPIFY_APP_API_KEY and SHOPIFY_APP_API_SECRET must be set in settings") shopify.Session.setup(api_key = settings.SHOPIFY_APP_API_KEY, secret = settings.SHOPIFY_APP_API_SECRET)
Move imports inside initialize() method so that we don’t break things on initial setup.
Move imports inside initialize() method so that we don’t break things on initial setup.
Python
mit
RafaAguilar/django-shopify-auth,discolabs/django-shopify-auth,RafaAguilar/django-shopify-auth,funkybob/django-shopify-auth,funkybob/django-shopify-auth,discolabs/django-shopify-auth
- import shopify - - from django.conf import settings - from django.core.exceptions import ImproperlyConfigured - - - VERSION = (0, 1, 5) + VERSION = (0, 1, 6) __version__ = '.'.join(map(str, VERSION)) __author__ = 'Gavin Ballard' def initialize(): + import shopify + + from django.conf import settings + from django.core.exceptions import ImproperlyConfigured + if not settings.SHOPIFY_APP_API_KEY or not settings.SHOPIFY_APP_API_SECRET: raise ImproperlyConfigured("SHOPIFY_APP_API_KEY and SHOPIFY_APP_API_SECRET must be set in settings") shopify.Session.setup(api_key = settings.SHOPIFY_APP_API_KEY, secret = settings.SHOPIFY_APP_API_SECRET)
Move imports inside initialize() method so that we don’t break things on initial setup.
## Code Before: import shopify from django.conf import settings from django.core.exceptions import ImproperlyConfigured VERSION = (0, 1, 5) __version__ = '.'.join(map(str, VERSION)) __author__ = 'Gavin Ballard' def initialize(): if not settings.SHOPIFY_APP_API_KEY or not settings.SHOPIFY_APP_API_SECRET: raise ImproperlyConfigured("SHOPIFY_APP_API_KEY and SHOPIFY_APP_API_SECRET must be set in settings") shopify.Session.setup(api_key = settings.SHOPIFY_APP_API_KEY, secret = settings.SHOPIFY_APP_API_SECRET) ## Instruction: Move imports inside initialize() method so that we don’t break things on initial setup. ## Code After: VERSION = (0, 1, 6) __version__ = '.'.join(map(str, VERSION)) __author__ = 'Gavin Ballard' def initialize(): import shopify from django.conf import settings from django.core.exceptions import ImproperlyConfigured if not settings.SHOPIFY_APP_API_KEY or not settings.SHOPIFY_APP_API_SECRET: raise ImproperlyConfigured("SHOPIFY_APP_API_KEY and SHOPIFY_APP_API_SECRET must be set in settings") shopify.Session.setup(api_key = settings.SHOPIFY_APP_API_KEY, secret = settings.SHOPIFY_APP_API_SECRET)
- import shopify - - from django.conf import settings - from django.core.exceptions import ImproperlyConfigured - - - VERSION = (0, 1, 5) ? ^ + VERSION = (0, 1, 6) ? ^ __version__ = '.'.join(map(str, VERSION)) __author__ = 'Gavin Ballard' def initialize(): + import shopify + + from django.conf import settings + from django.core.exceptions import ImproperlyConfigured + if not settings.SHOPIFY_APP_API_KEY or not settings.SHOPIFY_APP_API_SECRET: raise ImproperlyConfigured("SHOPIFY_APP_API_KEY and SHOPIFY_APP_API_SECRET must be set in settings") shopify.Session.setup(api_key = settings.SHOPIFY_APP_API_KEY, secret = settings.SHOPIFY_APP_API_SECRET)
c290c132368a93856066513d474078c2a2b22e39
polyaxon/libs/paths.py
polyaxon/libs/paths.py
import logging import os import shutil logger = logging.getLogger('polyaxon.libs.paths') def delete_path(path): if not os.path.exists(path): return try: if os.path.isfile(path): os.remove(path) else: shutil.rmtree(path) except OSError: logger.warning('Could not delete path `%s`', path) def create_path(path): try: os.mkdir(path) except FileExistsError: pass except OSError as e: logger.warning('Could not create path `%s`, exception %s', path, e) def get_tmp_path(path): return os.path.join('/tmp', path) def create_tmp_dir(dir_name): create_path(get_tmp_path(dir_name)) def delete_tmp_dir(dir_name): delete_path(get_tmp_path(dir_name)) def copy_to_tmp_dir(path, dir_name): tmp_path = get_tmp_path(dir_name) if os.path.exists(tmp_path): return tmp_path shutil.copytree(path, tmp_path) return tmp_path
import logging import os import shutil logger = logging.getLogger('polyaxon.libs.paths') def delete_path(path): if not os.path.exists(path): return try: if os.path.isfile(path): os.remove(path) else: shutil.rmtree(path) except OSError: logger.warning('Could not delete path `%s`', path) def create_path(path): try: os.mkdir(path) except FileExistsError: pass except OSError as e: logger.warning('Could not create path `%s`, exception %s', path, e) def get_tmp_path(path): return os.path.join('/tmp', path) def create_tmp_dir(dir_name): create_path(get_tmp_path(dir_name)) def delete_tmp_dir(dir_name): delete_path(get_tmp_path(dir_name)) def copy_to_tmp_dir(path, dir_name): tmp_path = get_tmp_path(dir_name) if os.path.exists(tmp_path): return tmp_path try: shutil.copytree(path, tmp_path) except FileExistsError as e: logger.warning('Path already exists `%s`, exception %s', path, e) return tmp_path
Add exception handling for FileExistsError
Add exception handling for FileExistsError
Python
apache-2.0
polyaxon/polyaxon,polyaxon/polyaxon,polyaxon/polyaxon
import logging import os import shutil logger = logging.getLogger('polyaxon.libs.paths') def delete_path(path): if not os.path.exists(path): return try: if os.path.isfile(path): os.remove(path) else: shutil.rmtree(path) except OSError: logger.warning('Could not delete path `%s`', path) def create_path(path): try: os.mkdir(path) except FileExistsError: pass except OSError as e: logger.warning('Could not create path `%s`, exception %s', path, e) def get_tmp_path(path): return os.path.join('/tmp', path) def create_tmp_dir(dir_name): create_path(get_tmp_path(dir_name)) def delete_tmp_dir(dir_name): delete_path(get_tmp_path(dir_name)) def copy_to_tmp_dir(path, dir_name): tmp_path = get_tmp_path(dir_name) if os.path.exists(tmp_path): return tmp_path + try: - shutil.copytree(path, tmp_path) + shutil.copytree(path, tmp_path) + except FileExistsError as e: + logger.warning('Path already exists `%s`, exception %s', path, e) return tmp_path
Add exception handling for FileExistsError
## Code Before: import logging import os import shutil logger = logging.getLogger('polyaxon.libs.paths') def delete_path(path): if not os.path.exists(path): return try: if os.path.isfile(path): os.remove(path) else: shutil.rmtree(path) except OSError: logger.warning('Could not delete path `%s`', path) def create_path(path): try: os.mkdir(path) except FileExistsError: pass except OSError as e: logger.warning('Could not create path `%s`, exception %s', path, e) def get_tmp_path(path): return os.path.join('/tmp', path) def create_tmp_dir(dir_name): create_path(get_tmp_path(dir_name)) def delete_tmp_dir(dir_name): delete_path(get_tmp_path(dir_name)) def copy_to_tmp_dir(path, dir_name): tmp_path = get_tmp_path(dir_name) if os.path.exists(tmp_path): return tmp_path shutil.copytree(path, tmp_path) return tmp_path ## Instruction: Add exception handling for FileExistsError ## Code After: import logging import os import shutil logger = logging.getLogger('polyaxon.libs.paths') def delete_path(path): if not os.path.exists(path): return try: if os.path.isfile(path): os.remove(path) else: shutil.rmtree(path) except OSError: logger.warning('Could not delete path `%s`', path) def create_path(path): try: os.mkdir(path) except FileExistsError: pass except OSError as e: logger.warning('Could not create path `%s`, exception %s', path, e) def get_tmp_path(path): return os.path.join('/tmp', path) def create_tmp_dir(dir_name): create_path(get_tmp_path(dir_name)) def delete_tmp_dir(dir_name): delete_path(get_tmp_path(dir_name)) def copy_to_tmp_dir(path, dir_name): tmp_path = get_tmp_path(dir_name) if os.path.exists(tmp_path): return tmp_path try: shutil.copytree(path, tmp_path) except FileExistsError as e: logger.warning('Path already exists `%s`, exception %s', path, e) return tmp_path
import logging import os import shutil logger = logging.getLogger('polyaxon.libs.paths') def delete_path(path): if not os.path.exists(path): return try: if os.path.isfile(path): os.remove(path) else: shutil.rmtree(path) except OSError: logger.warning('Could not delete path `%s`', path) def create_path(path): try: os.mkdir(path) except FileExistsError: pass except OSError as e: logger.warning('Could not create path `%s`, exception %s', path, e) def get_tmp_path(path): return os.path.join('/tmp', path) def create_tmp_dir(dir_name): create_path(get_tmp_path(dir_name)) def delete_tmp_dir(dir_name): delete_path(get_tmp_path(dir_name)) def copy_to_tmp_dir(path, dir_name): tmp_path = get_tmp_path(dir_name) if os.path.exists(tmp_path): return tmp_path + try: - shutil.copytree(path, tmp_path) + shutil.copytree(path, tmp_path) ? ++++ + except FileExistsError as e: + logger.warning('Path already exists `%s`, exception %s', path, e) return tmp_path
2d9d3e5a0a904a52e8b97bdb64e59f455d15b6e8
migrations/versions/1815829d365_.py
migrations/versions/1815829d365_.py
# revision identifiers, used by Alembic. revision = '1815829d365' down_revision = '3fcddd64a72' from alembic import op import sqlalchemy as sa def upgrade(): ### commands auto generated by Alembic - please adjust! ### # create new unique index to include geo app ref op.execute("DROP INDEX title_abr_idx") op.execute("CREATE UNIQUE INDEX title_abr_geo_idx ON records((record->'data'->>'title_number'),(record->'data'->>'application_reference'), (record->'data'->>'geometry_application_reference'))") ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.execute("DROP INDEX title_abr_geo_idx") op.execute("CREATE UNIQUE INDEX title_abr_idx ON records((record->'data'->>'title_number'),(record->'data'->>'application_reference'))") ### end Alembic commands ###
# revision identifiers, used by Alembic. revision = '1815829d365' down_revision = '3fcddd64a72' from alembic import op import sqlalchemy as sa def upgrade(): ### commands auto generated by Alembic - please adjust! ### # create new unique index to include geometry_application_ref op.execute("DROP INDEX title_abr_idx") op.execute("CREATE UNIQUE INDEX title_abr_geo_idx ON records((record->'data'->>'title_number'),(record->'data'->>'application_reference'), (record->'data'->>'geometry_application_reference'))") ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.execute("DROP INDEX title_abr_geo_idx") op.execute("CREATE UNIQUE INDEX title_abr_idx ON records((record->'data'->>'title_number'),(record->'data'->>'application_reference'))") ### end Alembic commands ###
Add geometry_application_reference to new unique index.
Add geometry_application_reference to new unique index.
Python
mit
LandRegistry/system-of-record,LandRegistry/system-of-record
# revision identifiers, used by Alembic. revision = '1815829d365' down_revision = '3fcddd64a72' from alembic import op import sqlalchemy as sa def upgrade(): ### commands auto generated by Alembic - please adjust! ### - # create new unique index to include geo app ref + # create new unique index to include geometry_application_ref op.execute("DROP INDEX title_abr_idx") op.execute("CREATE UNIQUE INDEX title_abr_geo_idx ON records((record->'data'->>'title_number'),(record->'data'->>'application_reference'), (record->'data'->>'geometry_application_reference'))") ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.execute("DROP INDEX title_abr_geo_idx") op.execute("CREATE UNIQUE INDEX title_abr_idx ON records((record->'data'->>'title_number'),(record->'data'->>'application_reference'))") ### end Alembic commands ###
Add geometry_application_reference to new unique index.
## Code Before: # revision identifiers, used by Alembic. revision = '1815829d365' down_revision = '3fcddd64a72' from alembic import op import sqlalchemy as sa def upgrade(): ### commands auto generated by Alembic - please adjust! ### # create new unique index to include geo app ref op.execute("DROP INDEX title_abr_idx") op.execute("CREATE UNIQUE INDEX title_abr_geo_idx ON records((record->'data'->>'title_number'),(record->'data'->>'application_reference'), (record->'data'->>'geometry_application_reference'))") ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.execute("DROP INDEX title_abr_geo_idx") op.execute("CREATE UNIQUE INDEX title_abr_idx ON records((record->'data'->>'title_number'),(record->'data'->>'application_reference'))") ### end Alembic commands ### ## Instruction: Add geometry_application_reference to new unique index. ## Code After: # revision identifiers, used by Alembic. revision = '1815829d365' down_revision = '3fcddd64a72' from alembic import op import sqlalchemy as sa def upgrade(): ### commands auto generated by Alembic - please adjust! ### # create new unique index to include geometry_application_ref op.execute("DROP INDEX title_abr_idx") op.execute("CREATE UNIQUE INDEX title_abr_geo_idx ON records((record->'data'->>'title_number'),(record->'data'->>'application_reference'), (record->'data'->>'geometry_application_reference'))") ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.execute("DROP INDEX title_abr_geo_idx") op.execute("CREATE UNIQUE INDEX title_abr_idx ON records((record->'data'->>'title_number'),(record->'data'->>'application_reference'))") ### end Alembic commands ###
# revision identifiers, used by Alembic. revision = '1815829d365' down_revision = '3fcddd64a72' from alembic import op import sqlalchemy as sa def upgrade(): ### commands auto generated by Alembic - please adjust! ### - # create new unique index to include geo app ref ? ^ ^ + # create new unique index to include geometry_application_ref ? ^^^^^^ ^^^^^^^^^ op.execute("DROP INDEX title_abr_idx") op.execute("CREATE UNIQUE INDEX title_abr_geo_idx ON records((record->'data'->>'title_number'),(record->'data'->>'application_reference'), (record->'data'->>'geometry_application_reference'))") ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.execute("DROP INDEX title_abr_geo_idx") op.execute("CREATE UNIQUE INDEX title_abr_idx ON records((record->'data'->>'title_number'),(record->'data'->>'application_reference'))") ### end Alembic commands ###
c13208dcc4fe1715db10d86e4dfd584c18f396fa
sympy/calculus/singularities.py
sympy/calculus/singularities.py
from sympy.solvers import solve from sympy.simplify import simplify def singularities(expr, sym): """ Finds singularities for a function. Currently supported functions are: - univariate real rational functions Examples ======== >>> from sympy.calculus.singularities import singularities >>> from sympy import Symbol >>> x = Symbol('x', real=True) >>> singularities(x**2 + x + 1, x) () >>> singularities(1/(x + 1), x) (-1,) References ========== .. [1] http://en.wikipedia.org/wiki/Mathematical_singularity """ if not expr.is_rational_function(sym): raise NotImplementedError("Algorithms finding singularities for" " non rational functions are not yet" " implemented") else: return tuple(sorted(solve(simplify(1/expr), sym)))
from sympy.solvers import solve from sympy.solvers.solveset import solveset from sympy.simplify import simplify def singularities(expr, sym): """ Finds singularities for a function. Currently supported functions are: - univariate real rational functions Examples ======== >>> from sympy.calculus.singularities import singularities >>> from sympy import Symbol >>> x = Symbol('x', real=True) >>> singularities(x**2 + x + 1, x) () >>> singularities(1/(x + 1), x) (-1,) References ========== .. [1] http://en.wikipedia.org/wiki/Mathematical_singularity """ if not expr.is_rational_function(sym): raise NotImplementedError("Algorithms finding singularities for" " non rational functions are not yet" " implemented") else: return tuple(sorted(solveset(simplify(1/expr), sym)))
Replace solve with solveset in sympy.calculus
Replace solve with solveset in sympy.calculus
Python
bsd-3-clause
skidzo/sympy,chaffra/sympy,pandeyadarsh/sympy,VaibhavAgarwalVA/sympy,abhiii5459/sympy,jbbskinny/sympy,aktech/sympy,lindsayad/sympy,kevalds51/sympy,Titan-C/sympy,hargup/sympy,yukoba/sympy,farhaanbukhsh/sympy,moble/sympy,emon10005/sympy,bukzor/sympy,sahmed95/sympy,mafiya69/sympy,kaushik94/sympy,VaibhavAgarwalVA/sympy,jbbskinny/sympy,cswiercz/sympy,moble/sympy,lindsayad/sympy,Titan-C/sympy,bukzor/sympy,jaimahajan1997/sympy,abhiii5459/sympy,sampadsaha5/sympy,saurabhjn76/sympy,yukoba/sympy,aktech/sympy,mcdaniel67/sympy,ChristinaZografou/sympy,wyom/sympy,postvakje/sympy,iamutkarshtiwari/sympy,wyom/sympy,yashsharan/sympy,ga7g08/sympy,debugger22/sympy,ahhda/sympy,grevutiu-gabriel/sympy,oliverlee/sympy,chaffra/sympy,Designist/sympy,yashsharan/sympy,Davidjohnwilson/sympy,drufat/sympy,skidzo/sympy,oliverlee/sympy,jerli/sympy,AkademieOlympia/sympy,debugger22/sympy,kaushik94/sympy,Shaswat27/sympy,VaibhavAgarwalVA/sympy,ChristinaZografou/sympy,iamutkarshtiwari/sympy,wyom/sympy,ahhda/sympy,mcdaniel67/sympy,atreyv/sympy,kaichogami/sympy,Davidjohnwilson/sympy,abhiii5459/sympy,postvakje/sympy,Arafatk/sympy,Titan-C/sympy,pandeyadarsh/sympy,cswiercz/sympy,wanglongqi/sympy,Shaswat27/sympy,drufat/sympy,maniteja123/sympy,rahuldan/sympy,jerli/sympy,Arafatk/sympy,iamutkarshtiwari/sympy,MechCoder/sympy,farhaanbukhsh/sympy,Arafatk/sympy,madan96/sympy,souravsingh/sympy,chaffra/sympy,wanglongqi/sympy,ChristinaZografou/sympy,madan96/sympy,shikil/sympy,Designist/sympy,sahmed95/sympy,ahhda/sympy,jbbskinny/sympy,Curious72/sympy,souravsingh/sympy,postvakje/sympy,lindsayad/sympy,kevalds51/sympy,sampadsaha5/sympy,jaimahajan1997/sympy,rahuldan/sympy,mcdaniel67/sympy,yukoba/sympy,kumarkrishna/sympy,souravsingh/sympy,MechCoder/sympy,ga7g08/sympy,bukzor/sympy,oliverlee/sympy,Curious72/sympy,grevutiu-gabriel/sympy,MechCoder/sympy,sahmed95/sympy,Vishluck/sympy,shikil/sympy,atreyv/sympy,emon10005/sympy,mafiya69/sympy,kaushik94/sympy,pandeyadarsh/sympy,Curious72/sympy,saurabhjn76/sympy,kaichogami/sympy,cswiercz/sympy,aktech/sympy,sampadsaha5/sympy,maniteja123/sympy,atreyv/sympy,ga7g08/sympy,Davidjohnwilson/sympy,shikil/sympy,moble/sympy,AkademieOlympia/sympy,kaichogami/sympy,skidzo/sympy,emon10005/sympy,grevutiu-gabriel/sympy,Vishluck/sympy,hargup/sympy,debugger22/sympy,wanglongqi/sympy,Shaswat27/sympy,Vishluck/sympy,kevalds51/sympy,AkademieOlympia/sympy,farhaanbukhsh/sympy,Designist/sympy,rahuldan/sympy,maniteja123/sympy,kumarkrishna/sympy,jaimahajan1997/sympy,yashsharan/sympy,kumarkrishna/sympy,jerli/sympy,madan96/sympy,mafiya69/sympy,saurabhjn76/sympy,drufat/sympy,hargup/sympy
from sympy.solvers import solve + from sympy.solvers.solveset import solveset from sympy.simplify import simplify def singularities(expr, sym): """ Finds singularities for a function. Currently supported functions are: - univariate real rational functions Examples ======== >>> from sympy.calculus.singularities import singularities >>> from sympy import Symbol >>> x = Symbol('x', real=True) >>> singularities(x**2 + x + 1, x) () >>> singularities(1/(x + 1), x) (-1,) References ========== .. [1] http://en.wikipedia.org/wiki/Mathematical_singularity """ if not expr.is_rational_function(sym): raise NotImplementedError("Algorithms finding singularities for" " non rational functions are not yet" " implemented") else: - return tuple(sorted(solve(simplify(1/expr), sym))) + return tuple(sorted(solveset(simplify(1/expr), sym)))
Replace solve with solveset in sympy.calculus
## Code Before: from sympy.solvers import solve from sympy.simplify import simplify def singularities(expr, sym): """ Finds singularities for a function. Currently supported functions are: - univariate real rational functions Examples ======== >>> from sympy.calculus.singularities import singularities >>> from sympy import Symbol >>> x = Symbol('x', real=True) >>> singularities(x**2 + x + 1, x) () >>> singularities(1/(x + 1), x) (-1,) References ========== .. [1] http://en.wikipedia.org/wiki/Mathematical_singularity """ if not expr.is_rational_function(sym): raise NotImplementedError("Algorithms finding singularities for" " non rational functions are not yet" " implemented") else: return tuple(sorted(solve(simplify(1/expr), sym))) ## Instruction: Replace solve with solveset in sympy.calculus ## Code After: from sympy.solvers import solve from sympy.solvers.solveset import solveset from sympy.simplify import simplify def singularities(expr, sym): """ Finds singularities for a function. Currently supported functions are: - univariate real rational functions Examples ======== >>> from sympy.calculus.singularities import singularities >>> from sympy import Symbol >>> x = Symbol('x', real=True) >>> singularities(x**2 + x + 1, x) () >>> singularities(1/(x + 1), x) (-1,) References ========== .. [1] http://en.wikipedia.org/wiki/Mathematical_singularity """ if not expr.is_rational_function(sym): raise NotImplementedError("Algorithms finding singularities for" " non rational functions are not yet" " implemented") else: return tuple(sorted(solveset(simplify(1/expr), sym)))
from sympy.solvers import solve + from sympy.solvers.solveset import solveset from sympy.simplify import simplify def singularities(expr, sym): """ Finds singularities for a function. Currently supported functions are: - univariate real rational functions Examples ======== >>> from sympy.calculus.singularities import singularities >>> from sympy import Symbol >>> x = Symbol('x', real=True) >>> singularities(x**2 + x + 1, x) () >>> singularities(1/(x + 1), x) (-1,) References ========== .. [1] http://en.wikipedia.org/wiki/Mathematical_singularity """ if not expr.is_rational_function(sym): raise NotImplementedError("Algorithms finding singularities for" " non rational functions are not yet" " implemented") else: - return tuple(sorted(solve(simplify(1/expr), sym))) + return tuple(sorted(solveset(simplify(1/expr), sym))) ? +++
1636fe834830ebb6644d17f908f893a3c2a41e33
tests/test_sentences.py
tests/test_sentences.py
import pytest from sdsc import sentencesegmenter @pytest.mark.parametrize("sentence,expected", ( # 1 ("This is a simple ##@command-2## sentence. This one too.", ["This is a simple ##@command-2## sentence", "This one too"]), # 2 ("This is not a test in one go. openSUSE is not written with a capital letter.", ["This is not a test in one go", "openSUSE is not written with a capital letter"]), # 3 ("This is a sentence, e.g. for me.", ["This is a sentence, e.g. for me"]), # 4 ("E. g. this is a sentence.", ["E. g. this is a sentence"]), # 5 ("An above average chance stands e.g. Michael. Marta is also on the list.", ["An above average chance stands e.g. Michael", "Marta is also on the list"]), # Add more entries here: )) def test_sentencesegmenter(sentence, expected): """checks whether sentencesegmenter behaves sane""" sentences = sentencesegmenter(sentence) assert sentences == expected
import pytest from sdsc import sentencesegmenter @pytest.mark.parametrize("sentence,expected", ( # 0 - a single simple sentence ("This is a simple sentence.", ["This is a simple sentence"]), # 1 - two simple sentences ("This is a simple ##@command-2## sentence. This one is too.", ["This is a simple ##@command-2## sentence", "This one is too"]), # 2 - lowercase letter starts second sentence ("This is not a test in one go. openSUSE is not written with a capital letter.", ["This is not a test in one go", "openSUSE is not written with a capital letter"]), # 3 - abbreviation in the middle of the sentence ("This is a sentence, e.g. for me.", ["This is a sentence, e.g. for me"]), # 4 - abbreviation at the start of the sentence ("E. g. this is a sentence.", ["E. g. this is a sentence"]), # 5 - abbreviation in the middle of sentence before a capital letter ("An above average chance stands e.g. Michael. Marta is also on the list.", ["An above average chance stands e.g. Michael", "Marta is also on the list"]), # 6 - sentences with parentheses around them ("(We speak in circles. We dance in code.)", ["We speak in circles", "We dance in code"]), # 6 - sentences with parentheses around them ("We speak in circles. (We dance in code.)", ["We speak in circles", "We dance in code"]), )) def test_sentencesegmenter(sentence, expected): """checks whether sentencesegmenter behaves sanely""" sentences = sentencesegmenter(sentence) assert sentences == expected
Expand the sentence segmentation tests a little()
Expand the sentence segmentation tests a little()
Python
lgpl-2.1
sknorr/suse-doc-style-checker,sknorr/suse-doc-style-checker,sknorr/suse-doc-style-checker
import pytest from sdsc import sentencesegmenter @pytest.mark.parametrize("sentence,expected", ( - # 1 + # 0 - a single simple sentence + ("This is a simple sentence.", + ["This is a simple sentence"]), + # 1 - two simple sentences - ("This is a simple ##@command-2## sentence. This one too.", + ("This is a simple ##@command-2## sentence. This one is too.", - ["This is a simple ##@command-2## sentence", "This one too"]), + ["This is a simple ##@command-2## sentence", "This one is too"]), - # 2 + # 2 - lowercase letter starts second sentence ("This is not a test in one go. openSUSE is not written with a capital letter.", ["This is not a test in one go", "openSUSE is not written with a capital letter"]), - # 3 + # 3 - abbreviation in the middle of the sentence ("This is a sentence, e.g. for me.", ["This is a sentence, e.g. for me"]), - # 4 + # 4 - abbreviation at the start of the sentence ("E. g. this is a sentence.", ["E. g. this is a sentence"]), - # 5 + # 5 - abbreviation in the middle of sentence before a capital letter ("An above average chance stands e.g. Michael. Marta is also on the list.", ["An above average chance stands e.g. Michael", "Marta is also on the list"]), - # Add more entries here: + # 6 - sentences with parentheses around them + ("(We speak in circles. We dance in code.)", + ["We speak in circles", + "We dance in code"]), + # 6 - sentences with parentheses around them + ("We speak in circles. (We dance in code.)", + ["We speak in circles", + "We dance in code"]), )) def test_sentencesegmenter(sentence, expected): - """checks whether sentencesegmenter behaves sane""" + """checks whether sentencesegmenter behaves sanely""" sentences = sentencesegmenter(sentence) assert sentences == expected
Expand the sentence segmentation tests a little()
## Code Before: import pytest from sdsc import sentencesegmenter @pytest.mark.parametrize("sentence,expected", ( # 1 ("This is a simple ##@command-2## sentence. This one too.", ["This is a simple ##@command-2## sentence", "This one too"]), # 2 ("This is not a test in one go. openSUSE is not written with a capital letter.", ["This is not a test in one go", "openSUSE is not written with a capital letter"]), # 3 ("This is a sentence, e.g. for me.", ["This is a sentence, e.g. for me"]), # 4 ("E. g. this is a sentence.", ["E. g. this is a sentence"]), # 5 ("An above average chance stands e.g. Michael. Marta is also on the list.", ["An above average chance stands e.g. Michael", "Marta is also on the list"]), # Add more entries here: )) def test_sentencesegmenter(sentence, expected): """checks whether sentencesegmenter behaves sane""" sentences = sentencesegmenter(sentence) assert sentences == expected ## Instruction: Expand the sentence segmentation tests a little() ## Code After: import pytest from sdsc import sentencesegmenter @pytest.mark.parametrize("sentence,expected", ( # 0 - a single simple sentence ("This is a simple sentence.", ["This is a simple sentence"]), # 1 - two simple sentences ("This is a simple ##@command-2## sentence. This one is too.", ["This is a simple ##@command-2## sentence", "This one is too"]), # 2 - lowercase letter starts second sentence ("This is not a test in one go. openSUSE is not written with a capital letter.", ["This is not a test in one go", "openSUSE is not written with a capital letter"]), # 3 - abbreviation in the middle of the sentence ("This is a sentence, e.g. for me.", ["This is a sentence, e.g. for me"]), # 4 - abbreviation at the start of the sentence ("E. g. this is a sentence.", ["E. g. this is a sentence"]), # 5 - abbreviation in the middle of sentence before a capital letter ("An above average chance stands e.g. Michael. Marta is also on the list.", ["An above average chance stands e.g. Michael", "Marta is also on the list"]), # 6 - sentences with parentheses around them ("(We speak in circles. We dance in code.)", ["We speak in circles", "We dance in code"]), # 6 - sentences with parentheses around them ("We speak in circles. (We dance in code.)", ["We speak in circles", "We dance in code"]), )) def test_sentencesegmenter(sentence, expected): """checks whether sentencesegmenter behaves sanely""" sentences = sentencesegmenter(sentence) assert sentences == expected
import pytest from sdsc import sentencesegmenter @pytest.mark.parametrize("sentence,expected", ( - # 1 + # 0 - a single simple sentence + ("This is a simple sentence.", + ["This is a simple sentence"]), + # 1 - two simple sentences - ("This is a simple ##@command-2## sentence. This one too.", + ("This is a simple ##@command-2## sentence. This one is too.", ? +++ - ["This is a simple ##@command-2## sentence", "This one too"]), + ["This is a simple ##@command-2## sentence", "This one is too"]), ? +++ - # 2 + # 2 - lowercase letter starts second sentence ("This is not a test in one go. openSUSE is not written with a capital letter.", ["This is not a test in one go", "openSUSE is not written with a capital letter"]), - # 3 + # 3 - abbreviation in the middle of the sentence ("This is a sentence, e.g. for me.", ["This is a sentence, e.g. for me"]), - # 4 + # 4 - abbreviation at the start of the sentence ("E. g. this is a sentence.", ["E. g. this is a sentence"]), - # 5 + # 5 - abbreviation in the middle of sentence before a capital letter ("An above average chance stands e.g. Michael. Marta is also on the list.", ["An above average chance stands e.g. Michael", "Marta is also on the list"]), - # Add more entries here: + # 6 - sentences with parentheses around them + ("(We speak in circles. We dance in code.)", + ["We speak in circles", + "We dance in code"]), + # 6 - sentences with parentheses around them + ("We speak in circles. (We dance in code.)", + ["We speak in circles", + "We dance in code"]), )) def test_sentencesegmenter(sentence, expected): - """checks whether sentencesegmenter behaves sane""" + """checks whether sentencesegmenter behaves sanely""" ? ++ sentences = sentencesegmenter(sentence) assert sentences == expected
ebb5a2f56c691456b5b65b9448d11b113c4efa46
fedmsg/meta/announce.py
fedmsg/meta/announce.py
from fedmsg.meta.base import BaseProcessor class AnnounceProcessor(BaseProcessor): __name__ = "announce" __description__ = "Official Fedora Announcements" __link__ = "http://fedoraproject.org/" __docs__ = "http://fedoraproject.org/" __obj__ = "Announcements" def subtitle(self, msg, **config): return msg['msg']['message'] def link(self, msg, **config): return msg['msg']['link'] def usernames(self, msg, **config): return set([msg['username']])
from fedmsg.meta.base import BaseProcessor class AnnounceProcessor(BaseProcessor): __name__ = "announce" __description__ = "Official Fedora Announcements" __link__ = "http://fedoraproject.org/" __docs__ = "http://fedoraproject.org/" __obj__ = "Announcements" def subtitle(self, msg, **config): return msg['msg']['message'] def link(self, msg, **config): return msg['msg']['link'] def usernames(self, msg, **config): users = set() if 'username' in msg: users.update(set([msg['username']])) return users
Handle the situation where in old message the 'username' key does not exists
Handle the situation where in old message the 'username' key does not exists With this commit processing an old message with fedmsg_meta will not break if that old message does not have the 'username' key.
Python
lgpl-2.1
chaiku/fedmsg,vivekanand1101/fedmsg,vivekanand1101/fedmsg,cicku/fedmsg,mathstuf/fedmsg,mathstuf/fedmsg,maxamillion/fedmsg,mathstuf/fedmsg,chaiku/fedmsg,fedora-infra/fedmsg,fedora-infra/fedmsg,pombredanne/fedmsg,pombredanne/fedmsg,cicku/fedmsg,maxamillion/fedmsg,chaiku/fedmsg,vivekanand1101/fedmsg,pombredanne/fedmsg,cicku/fedmsg,maxamillion/fedmsg,fedora-infra/fedmsg
from fedmsg.meta.base import BaseProcessor class AnnounceProcessor(BaseProcessor): __name__ = "announce" __description__ = "Official Fedora Announcements" __link__ = "http://fedoraproject.org/" __docs__ = "http://fedoraproject.org/" __obj__ = "Announcements" def subtitle(self, msg, **config): return msg['msg']['message'] def link(self, msg, **config): return msg['msg']['link'] def usernames(self, msg, **config): + users = set() + if 'username' in msg: - return set([msg['username']]) + users.update(set([msg['username']])) + return users
Handle the situation where in old message the 'username' key does not exists
## Code Before: from fedmsg.meta.base import BaseProcessor class AnnounceProcessor(BaseProcessor): __name__ = "announce" __description__ = "Official Fedora Announcements" __link__ = "http://fedoraproject.org/" __docs__ = "http://fedoraproject.org/" __obj__ = "Announcements" def subtitle(self, msg, **config): return msg['msg']['message'] def link(self, msg, **config): return msg['msg']['link'] def usernames(self, msg, **config): return set([msg['username']]) ## Instruction: Handle the situation where in old message the 'username' key does not exists ## Code After: from fedmsg.meta.base import BaseProcessor class AnnounceProcessor(BaseProcessor): __name__ = "announce" __description__ = "Official Fedora Announcements" __link__ = "http://fedoraproject.org/" __docs__ = "http://fedoraproject.org/" __obj__ = "Announcements" def subtitle(self, msg, **config): return msg['msg']['message'] def link(self, msg, **config): return msg['msg']['link'] def usernames(self, msg, **config): users = set() if 'username' in msg: users.update(set([msg['username']])) return users
from fedmsg.meta.base import BaseProcessor class AnnounceProcessor(BaseProcessor): __name__ = "announce" __description__ = "Official Fedora Announcements" __link__ = "http://fedoraproject.org/" __docs__ = "http://fedoraproject.org/" __obj__ = "Announcements" def subtitle(self, msg, **config): return msg['msg']['message'] def link(self, msg, **config): return msg['msg']['link'] def usernames(self, msg, **config): + users = set() + if 'username' in msg: - return set([msg['username']]) ? ^^^^^ + users.update(set([msg['username']])) ? +++++++ +++++++ ^ + + return users
f5234462c3bdacf91aad84df78bf750bf2035493
alfred_db/migrations/versions/4fdf1059c4ba_add_organizations_us.py
alfred_db/migrations/versions/4fdf1059c4ba_add_organizations_us.py
# revision identifiers, used by Alembic. revision = '4fdf1059c4ba' down_revision = '393a48ab5fc7' from alembic import op import sqlalchemy as sa def upgrade(): op.create_table('memberships', sa.Column('id', sa.Integer(), nullable=False), sa.Column('organization_id', sa.Integer(), nullable=True), sa.Column('user_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['organization_id'], ['organizations.id'], ), sa.ForeignKeyConstraint(['user_id'], ['users.id'], ), sa.PrimaryKeyConstraint('id') ) def downgrade(): op.drop_table('memberships')
# revision identifiers, used by Alembic. revision = '4fdf1059c4ba' down_revision = '393a48ab5fc7' from alembic import op import sqlalchemy as sa def upgrade(): op.create_table('memberships', sa.Column('id', sa.Integer(), nullable=False), sa.Column('organization_id', sa.Integer(), nullable=True), sa.Column('user_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint( ['organization_id'], ['organizations.id'], ondelete='CASCADE' ), sa.ForeignKeyConstraint( ['user_id'], ['users.id'], ondelete='CASCADE' ), sa.PrimaryKeyConstraint('id') ) def downgrade(): op.drop_table('memberships')
Fix memebership table creation migration
Fix memebership table creation migration
Python
isc
alfredhq/alfred-db
# revision identifiers, used by Alembic. revision = '4fdf1059c4ba' down_revision = '393a48ab5fc7' from alembic import op import sqlalchemy as sa def upgrade(): op.create_table('memberships', sa.Column('id', sa.Integer(), nullable=False), sa.Column('organization_id', sa.Integer(), nullable=True), sa.Column('user_id', sa.Integer(), nullable=True), - sa.ForeignKeyConstraint(['organization_id'], ['organizations.id'], ), - sa.ForeignKeyConstraint(['user_id'], ['users.id'], ), + sa.ForeignKeyConstraint( + ['organization_id'], ['organizations.id'], ondelete='CASCADE' + ), + sa.ForeignKeyConstraint( + ['user_id'], ['users.id'], ondelete='CASCADE' + ), sa.PrimaryKeyConstraint('id') ) def downgrade(): op.drop_table('memberships')
Fix memebership table creation migration
## Code Before: # revision identifiers, used by Alembic. revision = '4fdf1059c4ba' down_revision = '393a48ab5fc7' from alembic import op import sqlalchemy as sa def upgrade(): op.create_table('memberships', sa.Column('id', sa.Integer(), nullable=False), sa.Column('organization_id', sa.Integer(), nullable=True), sa.Column('user_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['organization_id'], ['organizations.id'], ), sa.ForeignKeyConstraint(['user_id'], ['users.id'], ), sa.PrimaryKeyConstraint('id') ) def downgrade(): op.drop_table('memberships') ## Instruction: Fix memebership table creation migration ## Code After: # revision identifiers, used by Alembic. revision = '4fdf1059c4ba' down_revision = '393a48ab5fc7' from alembic import op import sqlalchemy as sa def upgrade(): op.create_table('memberships', sa.Column('id', sa.Integer(), nullable=False), sa.Column('organization_id', sa.Integer(), nullable=True), sa.Column('user_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint( ['organization_id'], ['organizations.id'], ondelete='CASCADE' ), sa.ForeignKeyConstraint( ['user_id'], ['users.id'], ondelete='CASCADE' ), sa.PrimaryKeyConstraint('id') ) def downgrade(): op.drop_table('memberships')
# revision identifiers, used by Alembic. revision = '4fdf1059c4ba' down_revision = '393a48ab5fc7' from alembic import op import sqlalchemy as sa def upgrade(): op.create_table('memberships', sa.Column('id', sa.Integer(), nullable=False), sa.Column('organization_id', sa.Integer(), nullable=True), sa.Column('user_id', sa.Integer(), nullable=True), - sa.ForeignKeyConstraint(['organization_id'], ['organizations.id'], ), - sa.ForeignKeyConstraint(['user_id'], ['users.id'], ), + sa.ForeignKeyConstraint( + ['organization_id'], ['organizations.id'], ondelete='CASCADE' + ), + sa.ForeignKeyConstraint( + ['user_id'], ['users.id'], ondelete='CASCADE' + ), sa.PrimaryKeyConstraint('id') ) def downgrade(): op.drop_table('memberships')
a9052428e1eee8ec566bd496e1247dae0873d9c9
test/wordfilter_test.py
test/wordfilter_test.py
import nose from lib.wordfilter import Wordfilter # Run with `python -m test.wordfilter_test` class Wordfilter_test: def setup(self): self.wordfilter = Wordfilter() def teardown(self): self.wordfilter = [] def test_loading(self): assert type(self.wordfilter.blacklist) is list def test_badWords(self): assert self.wordfilter.blacklisted('this string contains the word skank') assert self.wordfilter.blacklisted('this string contains the word SkAnK') assert self.wordfilter.blacklisted('this string contains the wordskank') assert self.wordfilter.blacklisted('this string contains the skankword') assert not self.wordfilter.blacklisted('this string is clean!') def test_addWords(self): self.wordfilter.addWords(['clean']) assert self.wordfilter.blacklisted('this string contains the word skank') assert self.wordfilter.blacklisted('this string is clean!') def test_clearList(self): self.wordfilter.clearList(); assert not self.wordfilter.blacklisted('this string contains the word skank') self.wordfilter.addWords(['skank']) assert self.wordfilter.blacklisted('this string contains the word skank') if __name__ == "__main__": nose.main()
import nose from lib.wordfilter import Wordfilter # Run with `python -m test.wordfilter_test` class Wordfilter_test: def setup(self): self.wordfilter = Wordfilter() def teardown(self): self.wordfilter = [] def test_loading(self): assert type(self.wordfilter.blacklist) is list def test_badWords(self): assert self.wordfilter.blacklisted('this string contains the word skank') assert self.wordfilter.blacklisted('this string contains the word SkAnK') assert self.wordfilter.blacklisted('this string contains the wordskank') assert self.wordfilter.blacklisted('this string contains the skankword') assert not self.wordfilter.blacklisted('this string is clean!') def test_addWords(self): self.wordfilter.addWords(['clean']) assert self.wordfilter.blacklisted('this string contains the word skank') assert self.wordfilter.blacklisted('this string is clean!') def test_clearList(self): self.wordfilter.clearList() assert not self.wordfilter.blacklisted('this string contains the word skank') self.wordfilter.addWords(['skank']) assert self.wordfilter.blacklisted('this string contains the word skank') def test_add_multiple_words(self): # Arrange self.wordfilter.clearList() # Act self.wordfilter.addWords(['zebra','elephant']) # Assert assert self.wordfilter.blacklisted('this string has zebra in it') assert self.wordfilter.blacklisted('this string has elephant in it') assert not self.wordfilter.blacklisted('this string has nothing in it') if __name__ == "__main__": nose.main()
Add another test case - add multiple words
Add another test case - add multiple words
Python
mit
dariusk/wordfilter,dariusk/wordfilter,mwatson/wordfilter,hugovk/wordfilter,mwatson/wordfilter,dariusk/wordfilter,hugovk/wordfilter,mwatson/wordfilter,hugovk/wordfilter,dariusk/wordfilter,hugovk/wordfilter,mwatson/wordfilter
import nose from lib.wordfilter import Wordfilter # Run with `python -m test.wordfilter_test` class Wordfilter_test: def setup(self): self.wordfilter = Wordfilter() def teardown(self): self.wordfilter = [] def test_loading(self): assert type(self.wordfilter.blacklist) is list def test_badWords(self): assert self.wordfilter.blacklisted('this string contains the word skank') assert self.wordfilter.blacklisted('this string contains the word SkAnK') assert self.wordfilter.blacklisted('this string contains the wordskank') assert self.wordfilter.blacklisted('this string contains the skankword') assert not self.wordfilter.blacklisted('this string is clean!') def test_addWords(self): self.wordfilter.addWords(['clean']) assert self.wordfilter.blacklisted('this string contains the word skank') assert self.wordfilter.blacklisted('this string is clean!') def test_clearList(self): - self.wordfilter.clearList(); + self.wordfilter.clearList() assert not self.wordfilter.blacklisted('this string contains the word skank') self.wordfilter.addWords(['skank']) assert self.wordfilter.blacklisted('this string contains the word skank') + def test_add_multiple_words(self): + # Arrange + self.wordfilter.clearList() + + # Act + self.wordfilter.addWords(['zebra','elephant']) + + # Assert + assert self.wordfilter.blacklisted('this string has zebra in it') + assert self.wordfilter.blacklisted('this string has elephant in it') + assert not self.wordfilter.blacklisted('this string has nothing in it') + if __name__ == "__main__": nose.main()
Add another test case - add multiple words
## Code Before: import nose from lib.wordfilter import Wordfilter # Run with `python -m test.wordfilter_test` class Wordfilter_test: def setup(self): self.wordfilter = Wordfilter() def teardown(self): self.wordfilter = [] def test_loading(self): assert type(self.wordfilter.blacklist) is list def test_badWords(self): assert self.wordfilter.blacklisted('this string contains the word skank') assert self.wordfilter.blacklisted('this string contains the word SkAnK') assert self.wordfilter.blacklisted('this string contains the wordskank') assert self.wordfilter.blacklisted('this string contains the skankword') assert not self.wordfilter.blacklisted('this string is clean!') def test_addWords(self): self.wordfilter.addWords(['clean']) assert self.wordfilter.blacklisted('this string contains the word skank') assert self.wordfilter.blacklisted('this string is clean!') def test_clearList(self): self.wordfilter.clearList(); assert not self.wordfilter.blacklisted('this string contains the word skank') self.wordfilter.addWords(['skank']) assert self.wordfilter.blacklisted('this string contains the word skank') if __name__ == "__main__": nose.main() ## Instruction: Add another test case - add multiple words ## Code After: import nose from lib.wordfilter import Wordfilter # Run with `python -m test.wordfilter_test` class Wordfilter_test: def setup(self): self.wordfilter = Wordfilter() def teardown(self): self.wordfilter = [] def test_loading(self): assert type(self.wordfilter.blacklist) is list def test_badWords(self): assert self.wordfilter.blacklisted('this string contains the word skank') assert self.wordfilter.blacklisted('this string contains the word SkAnK') assert self.wordfilter.blacklisted('this string contains the wordskank') assert self.wordfilter.blacklisted('this string contains the skankword') assert not self.wordfilter.blacklisted('this string is clean!') def test_addWords(self): self.wordfilter.addWords(['clean']) assert self.wordfilter.blacklisted('this string contains the word skank') assert self.wordfilter.blacklisted('this string is clean!') def test_clearList(self): self.wordfilter.clearList() assert not self.wordfilter.blacklisted('this string contains the word skank') self.wordfilter.addWords(['skank']) assert self.wordfilter.blacklisted('this string contains the word skank') def test_add_multiple_words(self): # Arrange self.wordfilter.clearList() # Act self.wordfilter.addWords(['zebra','elephant']) # Assert assert self.wordfilter.blacklisted('this string has zebra in it') assert self.wordfilter.blacklisted('this string has elephant in it') assert not self.wordfilter.blacklisted('this string has nothing in it') if __name__ == "__main__": nose.main()
import nose from lib.wordfilter import Wordfilter # Run with `python -m test.wordfilter_test` class Wordfilter_test: def setup(self): self.wordfilter = Wordfilter() def teardown(self): self.wordfilter = [] def test_loading(self): assert type(self.wordfilter.blacklist) is list def test_badWords(self): assert self.wordfilter.blacklisted('this string contains the word skank') assert self.wordfilter.blacklisted('this string contains the word SkAnK') assert self.wordfilter.blacklisted('this string contains the wordskank') assert self.wordfilter.blacklisted('this string contains the skankword') assert not self.wordfilter.blacklisted('this string is clean!') def test_addWords(self): self.wordfilter.addWords(['clean']) assert self.wordfilter.blacklisted('this string contains the word skank') assert self.wordfilter.blacklisted('this string is clean!') def test_clearList(self): - self.wordfilter.clearList(); ? - + self.wordfilter.clearList() assert not self.wordfilter.blacklisted('this string contains the word skank') self.wordfilter.addWords(['skank']) assert self.wordfilter.blacklisted('this string contains the word skank') + def test_add_multiple_words(self): + # Arrange + self.wordfilter.clearList() + + # Act + self.wordfilter.addWords(['zebra','elephant']) + + # Assert + assert self.wordfilter.blacklisted('this string has zebra in it') + assert self.wordfilter.blacklisted('this string has elephant in it') + assert not self.wordfilter.blacklisted('this string has nothing in it') + if __name__ == "__main__": nose.main()
8ef9618850794dd499617bb28b5044336f155568
python/setup_fsurfer.py
python/setup_fsurfer.py
from distutils.core import setup setup(name='fsurfer-libs', version='PKG_VERSION', description='Python module to help create freesurfer workflows', author='Suchandra Thapa', author_email='[email protected]', url='https://github.com/OSGConnect/freesurfer_workflow', packages=['fsurfer'], data_files=[('/usr/share/fsurfer/scripts', ["bash/autorecon1.sh", "bash/autorecon2.sh", "bash/autorecon2-whole.sh", "bash/autorecon3.sh", "bash/autorecon-all.sh", "bash/freesurfer-process.sh"])], license='Apache 2.0')
from distutils.core import setup setup(name='fsurfer-libs', version='PKG_VERSION', description='Python module to help create freesurfer workflows', author='Suchandra Thapa', author_email='[email protected]', url='https://github.com/OSGConnect/freesurfer_workflow', packages=['fsurfer'], data_files=[('/usr/share/fsurfer/scripts', ["bash/autorecon1.sh", "bash/autorecon2.sh", "bash/autorecon2-whole.sh", "bash/autorecon3.sh", "bash/autorecon1-options.sh", "bash/autorecon2-options.sh", "bash/autorecon3-options.sh", "bash/autorecon-all.sh", "bash/freesurfer-process.sh"])], license='Apache 2.0')
Update setup to include new scripts
Update setup to include new scripts
Python
apache-2.0
OSGConnect/freesurfer_workflow,OSGConnect/freesurfer_workflow
from distutils.core import setup setup(name='fsurfer-libs', version='PKG_VERSION', description='Python module to help create freesurfer workflows', author='Suchandra Thapa', author_email='[email protected]', url='https://github.com/OSGConnect/freesurfer_workflow', packages=['fsurfer'], data_files=[('/usr/share/fsurfer/scripts', ["bash/autorecon1.sh", "bash/autorecon2.sh", "bash/autorecon2-whole.sh", "bash/autorecon3.sh", + "bash/autorecon1-options.sh", + "bash/autorecon2-options.sh", + "bash/autorecon3-options.sh", "bash/autorecon-all.sh", "bash/freesurfer-process.sh"])], license='Apache 2.0')
Update setup to include new scripts
## Code Before: from distutils.core import setup setup(name='fsurfer-libs', version='PKG_VERSION', description='Python module to help create freesurfer workflows', author='Suchandra Thapa', author_email='[email protected]', url='https://github.com/OSGConnect/freesurfer_workflow', packages=['fsurfer'], data_files=[('/usr/share/fsurfer/scripts', ["bash/autorecon1.sh", "bash/autorecon2.sh", "bash/autorecon2-whole.sh", "bash/autorecon3.sh", "bash/autorecon-all.sh", "bash/freesurfer-process.sh"])], license='Apache 2.0') ## Instruction: Update setup to include new scripts ## Code After: from distutils.core import setup setup(name='fsurfer-libs', version='PKG_VERSION', description='Python module to help create freesurfer workflows', author='Suchandra Thapa', author_email='[email protected]', url='https://github.com/OSGConnect/freesurfer_workflow', packages=['fsurfer'], data_files=[('/usr/share/fsurfer/scripts', ["bash/autorecon1.sh", "bash/autorecon2.sh", "bash/autorecon2-whole.sh", "bash/autorecon3.sh", "bash/autorecon1-options.sh", "bash/autorecon2-options.sh", "bash/autorecon3-options.sh", "bash/autorecon-all.sh", "bash/freesurfer-process.sh"])], license='Apache 2.0')
from distutils.core import setup setup(name='fsurfer-libs', version='PKG_VERSION', description='Python module to help create freesurfer workflows', author='Suchandra Thapa', author_email='[email protected]', url='https://github.com/OSGConnect/freesurfer_workflow', packages=['fsurfer'], data_files=[('/usr/share/fsurfer/scripts', ["bash/autorecon1.sh", "bash/autorecon2.sh", "bash/autorecon2-whole.sh", "bash/autorecon3.sh", + "bash/autorecon1-options.sh", + "bash/autorecon2-options.sh", + "bash/autorecon3-options.sh", "bash/autorecon-all.sh", "bash/freesurfer-process.sh"])], license='Apache 2.0')
cc3f28e74145729c8b572fd9d2ed04d8fb297360
Testing/TestDICOMPython.py
Testing/TestDICOMPython.py
import sys import vtk import vtkDICOMPython # put everything into the vtk namespace for a in dir(vtkDICOMPython): if a[0] != '_': setattr(vtk, a, getattr(vtkDICOMPython, a)) m = vtk.vtkDICOMMetaData() m.SetAttributeValue(vtk.vtkDICOMTag(0x0008, 0x0005), 'ISO_IR 100') v = m.GetAttributeValue(vtk.vtkDICOMTag(0x0008, 0x0005)) if v.AsString() != 'ISO_IR 100': sys.exit(1)
import sys import vtk import vtkDICOMPython # put everything into the vtk namespace for a in dir(vtkDICOMPython): if a[0] != '_': setattr(vtk, a, getattr(vtkDICOMPython, a)) m = vtk.vtkDICOMMetaData() if vtk.vtkVersion.GetVTKMajorVersion() < 6: sys.stderr.write("This test requires VTK 6 or higher.\n"); sys.exit(0) m.SetAttributeValue(vtk.vtkDICOMTag(0x0008, 0x0005), 'ISO_IR 100') v = m.GetAttributeValue(vtk.vtkDICOMTag(0x0008, 0x0005)) if v.AsString() != 'ISO_IR 100': sys.exit(1)
Modify python test for VTK 5.
Modify python test for VTK 5.
Python
bsd-3-clause
dgobbi/vtk-dicom,dgobbi/vtk-dicom,hendradarwin/vtk-dicom,dgobbi/vtk-dicom,hendradarwin/vtk-dicom,hendradarwin/vtk-dicom
import sys import vtk import vtkDICOMPython # put everything into the vtk namespace for a in dir(vtkDICOMPython): if a[0] != '_': setattr(vtk, a, getattr(vtkDICOMPython, a)) m = vtk.vtkDICOMMetaData() + + if vtk.vtkVersion.GetVTKMajorVersion() < 6: + sys.stderr.write("This test requires VTK 6 or higher.\n"); + sys.exit(0) + m.SetAttributeValue(vtk.vtkDICOMTag(0x0008, 0x0005), 'ISO_IR 100') v = m.GetAttributeValue(vtk.vtkDICOMTag(0x0008, 0x0005)) if v.AsString() != 'ISO_IR 100': sys.exit(1)
Modify python test for VTK 5.
## Code Before: import sys import vtk import vtkDICOMPython # put everything into the vtk namespace for a in dir(vtkDICOMPython): if a[0] != '_': setattr(vtk, a, getattr(vtkDICOMPython, a)) m = vtk.vtkDICOMMetaData() m.SetAttributeValue(vtk.vtkDICOMTag(0x0008, 0x0005), 'ISO_IR 100') v = m.GetAttributeValue(vtk.vtkDICOMTag(0x0008, 0x0005)) if v.AsString() != 'ISO_IR 100': sys.exit(1) ## Instruction: Modify python test for VTK 5. ## Code After: import sys import vtk import vtkDICOMPython # put everything into the vtk namespace for a in dir(vtkDICOMPython): if a[0] != '_': setattr(vtk, a, getattr(vtkDICOMPython, a)) m = vtk.vtkDICOMMetaData() if vtk.vtkVersion.GetVTKMajorVersion() < 6: sys.stderr.write("This test requires VTK 6 or higher.\n"); sys.exit(0) m.SetAttributeValue(vtk.vtkDICOMTag(0x0008, 0x0005), 'ISO_IR 100') v = m.GetAttributeValue(vtk.vtkDICOMTag(0x0008, 0x0005)) if v.AsString() != 'ISO_IR 100': sys.exit(1)
import sys import vtk import vtkDICOMPython # put everything into the vtk namespace for a in dir(vtkDICOMPython): if a[0] != '_': setattr(vtk, a, getattr(vtkDICOMPython, a)) m = vtk.vtkDICOMMetaData() + + if vtk.vtkVersion.GetVTKMajorVersion() < 6: + sys.stderr.write("This test requires VTK 6 or higher.\n"); + sys.exit(0) + m.SetAttributeValue(vtk.vtkDICOMTag(0x0008, 0x0005), 'ISO_IR 100') v = m.GetAttributeValue(vtk.vtkDICOMTag(0x0008, 0x0005)) if v.AsString() != 'ISO_IR 100': sys.exit(1)
1dd681517fd1831f3990caa043ea8220f5d1bb90
app/app.py
app/app.py
import os,time,asyncio,json from datetime import datetime from aiohttp import web import logging;logging.basicConfig(level=logging.INFO) from tools.log import Log from tools.httptools import Middleware,Route from tools.template import Template from models import * from tools.config import Config @Route.get('/') def index(): user=yield from User.findall() print(user) return Template.render('index.html') @Route.get('/user/{id}/comment/{comment}') def user(id,comment): return '<h1>%s,%s</h1>'%(id,comment) @asyncio.coroutine def init(loop): print(Middleware.allmiddlewares()) app=web.Application(loop=loop,middlewares=Middleware.allmiddlewares()) Template(app) Route.register_route(app) pool=yield from create_pool(loop) srv=yield from loop.create_server(app.make_handler(),'127.0.0.1',8000) logging.info('server started at http://127.0.0.1:8000') Log.info("server startd at http://127.0.0.1:8000") return srv if __name__=="__main__": loop=asyncio.get_event_loop() loop.run_until_complete(init(loop)) loop.run_forever()
import os,time,asyncio,json from datetime import datetime from aiohttp import web import logging;logging.basicConfig(level=logging.INFO) from tools.log import Log from tools.httptools import Middleware,Route from tools.template import Template from models import * from tools.config import Config @Route.get('/') def index(): user=yield from User.findall() print(user) return Template('index.html').render() @Route.get('/user/{id}/comment/{comment}') def user(id,comment): return '<h1>%s,%s</h1>'%(id,comment) @asyncio.coroutine def init(loop): print(Middleware.allmiddlewares()) app=web.Application(loop=loop,middlewares=Middleware.allmiddlewares()) Template.init(app) Route.register_route(app) pool=yield from create_pool(loop) srv=yield from loop.create_server(app.make_handler(),'127.0.0.1',8000) logging.info('server started at http://127.0.0.1:8000') Log.info("server startd at http://127.0.0.1:8000") return srv if __name__=="__main__": loop=asyncio.get_event_loop() loop.run_until_complete(init(loop)) loop.run_forever()
Change Template() to Template.init() in init function
Change Template() to Template.init() in init function
Python
mit
free-free/pyblog,free-free/pyblog,free-free/pyblog,free-free/pyblog
import os,time,asyncio,json from datetime import datetime from aiohttp import web import logging;logging.basicConfig(level=logging.INFO) from tools.log import Log from tools.httptools import Middleware,Route from tools.template import Template from models import * from tools.config import Config @Route.get('/') def index(): user=yield from User.findall() print(user) - return Template.render('index.html') + return Template('index.html').render() @Route.get('/user/{id}/comment/{comment}') def user(id,comment): return '<h1>%s,%s</h1>'%(id,comment) @asyncio.coroutine def init(loop): print(Middleware.allmiddlewares()) app=web.Application(loop=loop,middlewares=Middleware.allmiddlewares()) - Template(app) + Template.init(app) Route.register_route(app) pool=yield from create_pool(loop) srv=yield from loop.create_server(app.make_handler(),'127.0.0.1',8000) logging.info('server started at http://127.0.0.1:8000') Log.info("server startd at http://127.0.0.1:8000") return srv if __name__=="__main__": loop=asyncio.get_event_loop() loop.run_until_complete(init(loop)) loop.run_forever()
Change Template() to Template.init() in init function
## Code Before: import os,time,asyncio,json from datetime import datetime from aiohttp import web import logging;logging.basicConfig(level=logging.INFO) from tools.log import Log from tools.httptools import Middleware,Route from tools.template import Template from models import * from tools.config import Config @Route.get('/') def index(): user=yield from User.findall() print(user) return Template.render('index.html') @Route.get('/user/{id}/comment/{comment}') def user(id,comment): return '<h1>%s,%s</h1>'%(id,comment) @asyncio.coroutine def init(loop): print(Middleware.allmiddlewares()) app=web.Application(loop=loop,middlewares=Middleware.allmiddlewares()) Template(app) Route.register_route(app) pool=yield from create_pool(loop) srv=yield from loop.create_server(app.make_handler(),'127.0.0.1',8000) logging.info('server started at http://127.0.0.1:8000') Log.info("server startd at http://127.0.0.1:8000") return srv if __name__=="__main__": loop=asyncio.get_event_loop() loop.run_until_complete(init(loop)) loop.run_forever() ## Instruction: Change Template() to Template.init() in init function ## Code After: import os,time,asyncio,json from datetime import datetime from aiohttp import web import logging;logging.basicConfig(level=logging.INFO) from tools.log import Log from tools.httptools import Middleware,Route from tools.template import Template from models import * from tools.config import Config @Route.get('/') def index(): user=yield from User.findall() print(user) return Template('index.html').render() @Route.get('/user/{id}/comment/{comment}') def user(id,comment): return '<h1>%s,%s</h1>'%(id,comment) @asyncio.coroutine def init(loop): print(Middleware.allmiddlewares()) app=web.Application(loop=loop,middlewares=Middleware.allmiddlewares()) Template.init(app) Route.register_route(app) pool=yield from create_pool(loop) srv=yield from loop.create_server(app.make_handler(),'127.0.0.1',8000) logging.info('server started at http://127.0.0.1:8000') Log.info("server startd at http://127.0.0.1:8000") return srv if __name__=="__main__": loop=asyncio.get_event_loop() loop.run_until_complete(init(loop)) loop.run_forever()
import os,time,asyncio,json from datetime import datetime from aiohttp import web import logging;logging.basicConfig(level=logging.INFO) from tools.log import Log from tools.httptools import Middleware,Route from tools.template import Template from models import * from tools.config import Config @Route.get('/') def index(): user=yield from User.findall() print(user) - return Template.render('index.html') ? ------- + return Template('index.html').render() ? +++++++++ @Route.get('/user/{id}/comment/{comment}') def user(id,comment): return '<h1>%s,%s</h1>'%(id,comment) @asyncio.coroutine def init(loop): print(Middleware.allmiddlewares()) app=web.Application(loop=loop,middlewares=Middleware.allmiddlewares()) - Template(app) + Template.init(app) ? +++++ Route.register_route(app) pool=yield from create_pool(loop) srv=yield from loop.create_server(app.make_handler(),'127.0.0.1',8000) logging.info('server started at http://127.0.0.1:8000') Log.info("server startd at http://127.0.0.1:8000") return srv if __name__=="__main__": loop=asyncio.get_event_loop() loop.run_until_complete(init(loop)) loop.run_forever()
d413747e996326e62fdf942426f170f66d5acb7c
osf_tests/test_preprint_summary.py
osf_tests/test_preprint_summary.py
import datetime from osf_tests.factories import PreprintFactory, PreprintProviderFactory from osf.models import PreprintService from nose.tools import * # PEP8 asserts import mock import pytest import pytz import requests from scripts.analytics.preprint_summary import PreprintSummary @pytest.fixture() def preprint_provider(): return PreprintProviderFactory(name='Test 1') @pytest.fixture() def preprint(preprint_provider): return PreprintFactory._build(PreprintService, provider=preprint_provider) pytestmark = pytest.mark.django_db class TestPreprintCount: def test_get_preprint_count(self, preprint_provider, preprint): requests.post = mock.MagicMock() resp = requests.Response() resp._content = '{"hits" : {"total" : 1}}' requests.post.return_value = resp field = PreprintService._meta.get_field('date_created') field.auto_now_add = False # We have to fudge the time because Keen doesn't allow same day queries. date = datetime.datetime.utcnow() - datetime.timedelta(1) preprint.date_created = date - datetime.timedelta(0.1) preprint.save() field.auto_now_add = True results = PreprintSummary().get_events(date.date()) assert_equal(len(results), 1) data = results[0] assert_equal(data['provider']['name'], 'Test 1') assert_equal(data['provider']['total'], 1)
import datetime from osf_tests.factories import PreprintFactory, PreprintProviderFactory from osf.models import PreprintService from nose.tools import * # PEP8 asserts import mock import pytest import pytz import requests from scripts.analytics.preprint_summary import PreprintSummary @pytest.fixture() def preprint_provider(): return PreprintProviderFactory(name='Test 1') @pytest.fixture() def preprint(preprint_provider): return PreprintFactory._build(PreprintService, provider=preprint_provider) pytestmark = pytest.mark.django_db class TestPreprintCount: def test_get_preprint_count(self, preprint): requests.post = mock.MagicMock() resp = requests.Response() resp._content = '{"hits" : {"total" : 1}}' requests.post.return_value = resp field = PreprintService._meta.get_field('date_created') field.auto_now_add = False # We have to fudge the time because Keen doesn't allow same day queries. date = datetime.datetime.utcnow() - datetime.timedelta(days=1, hours=1) preprint.date_created = date - datetime.timedelta(hours=1) preprint.save() field.auto_now_add = True results = PreprintSummary().get_events(date.date()) assert_equal(len(results), 1) data = results[0] assert_equal(data['provider']['name'], 'Test 1') assert_equal(data['provider']['total'], 1)
Make sure test dates are rounded properly by making they are over a day in the past.
Make sure test dates are rounded properly by making they are over a day in the past.
Python
apache-2.0
binoculars/osf.io,TomBaxter/osf.io,sloria/osf.io,adlius/osf.io,Johnetordoff/osf.io,mattclark/osf.io,adlius/osf.io,sloria/osf.io,HalcyonChimera/osf.io,adlius/osf.io,laurenrevere/osf.io,erinspace/osf.io,sloria/osf.io,binoculars/osf.io,mfraezz/osf.io,TomBaxter/osf.io,icereval/osf.io,Johnetordoff/osf.io,pattisdr/osf.io,crcresearch/osf.io,adlius/osf.io,felliott/osf.io,caseyrollins/osf.io,crcresearch/osf.io,aaxelb/osf.io,HalcyonChimera/osf.io,leb2dg/osf.io,chennan47/osf.io,cslzchen/osf.io,cslzchen/osf.io,Johnetordoff/osf.io,leb2dg/osf.io,erinspace/osf.io,leb2dg/osf.io,brianjgeiger/osf.io,caseyrollins/osf.io,CenterForOpenScience/osf.io,aaxelb/osf.io,cslzchen/osf.io,chennan47/osf.io,CenterForOpenScience/osf.io,HalcyonChimera/osf.io,mattclark/osf.io,felliott/osf.io,felliott/osf.io,laurenrevere/osf.io,mfraezz/osf.io,HalcyonChimera/osf.io,brianjgeiger/osf.io,mfraezz/osf.io,pattisdr/osf.io,pattisdr/osf.io,icereval/osf.io,laurenrevere/osf.io,baylee-d/osf.io,chennan47/osf.io,TomBaxter/osf.io,aaxelb/osf.io,binoculars/osf.io,crcresearch/osf.io,Johnetordoff/osf.io,aaxelb/osf.io,cslzchen/osf.io,felliott/osf.io,mfraezz/osf.io,saradbowman/osf.io,CenterForOpenScience/osf.io,mattclark/osf.io,baylee-d/osf.io,brianjgeiger/osf.io,erinspace/osf.io,brianjgeiger/osf.io,icereval/osf.io,caseyrollins/osf.io,CenterForOpenScience/osf.io,baylee-d/osf.io,leb2dg/osf.io,saradbowman/osf.io
import datetime from osf_tests.factories import PreprintFactory, PreprintProviderFactory from osf.models import PreprintService from nose.tools import * # PEP8 asserts import mock import pytest import pytz import requests from scripts.analytics.preprint_summary import PreprintSummary @pytest.fixture() def preprint_provider(): return PreprintProviderFactory(name='Test 1') @pytest.fixture() def preprint(preprint_provider): return PreprintFactory._build(PreprintService, provider=preprint_provider) pytestmark = pytest.mark.django_db class TestPreprintCount: - def test_get_preprint_count(self, preprint_provider, preprint): + def test_get_preprint_count(self, preprint): requests.post = mock.MagicMock() resp = requests.Response() resp._content = '{"hits" : {"total" : 1}}' requests.post.return_value = resp field = PreprintService._meta.get_field('date_created') field.auto_now_add = False # We have to fudge the time because Keen doesn't allow same day queries. - date = datetime.datetime.utcnow() - datetime.timedelta(1) + date = datetime.datetime.utcnow() - datetime.timedelta(days=1, hours=1) - preprint.date_created = date - datetime.timedelta(0.1) + preprint.date_created = date - datetime.timedelta(hours=1) preprint.save() field.auto_now_add = True results = PreprintSummary().get_events(date.date()) assert_equal(len(results), 1) data = results[0] assert_equal(data['provider']['name'], 'Test 1') assert_equal(data['provider']['total'], 1)
Make sure test dates are rounded properly by making they are over a day in the past.
## Code Before: import datetime from osf_tests.factories import PreprintFactory, PreprintProviderFactory from osf.models import PreprintService from nose.tools import * # PEP8 asserts import mock import pytest import pytz import requests from scripts.analytics.preprint_summary import PreprintSummary @pytest.fixture() def preprint_provider(): return PreprintProviderFactory(name='Test 1') @pytest.fixture() def preprint(preprint_provider): return PreprintFactory._build(PreprintService, provider=preprint_provider) pytestmark = pytest.mark.django_db class TestPreprintCount: def test_get_preprint_count(self, preprint_provider, preprint): requests.post = mock.MagicMock() resp = requests.Response() resp._content = '{"hits" : {"total" : 1}}' requests.post.return_value = resp field = PreprintService._meta.get_field('date_created') field.auto_now_add = False # We have to fudge the time because Keen doesn't allow same day queries. date = datetime.datetime.utcnow() - datetime.timedelta(1) preprint.date_created = date - datetime.timedelta(0.1) preprint.save() field.auto_now_add = True results = PreprintSummary().get_events(date.date()) assert_equal(len(results), 1) data = results[0] assert_equal(data['provider']['name'], 'Test 1') assert_equal(data['provider']['total'], 1) ## Instruction: Make sure test dates are rounded properly by making they are over a day in the past. ## Code After: import datetime from osf_tests.factories import PreprintFactory, PreprintProviderFactory from osf.models import PreprintService from nose.tools import * # PEP8 asserts import mock import pytest import pytz import requests from scripts.analytics.preprint_summary import PreprintSummary @pytest.fixture() def preprint_provider(): return PreprintProviderFactory(name='Test 1') @pytest.fixture() def preprint(preprint_provider): return PreprintFactory._build(PreprintService, provider=preprint_provider) pytestmark = pytest.mark.django_db class TestPreprintCount: def test_get_preprint_count(self, preprint): requests.post = mock.MagicMock() resp = requests.Response() resp._content = '{"hits" : {"total" : 1}}' requests.post.return_value = resp field = PreprintService._meta.get_field('date_created') field.auto_now_add = False # We have to fudge the time because Keen doesn't allow same day queries. date = datetime.datetime.utcnow() - datetime.timedelta(days=1, hours=1) preprint.date_created = date - datetime.timedelta(hours=1) preprint.save() field.auto_now_add = True results = PreprintSummary().get_events(date.date()) assert_equal(len(results), 1) data = results[0] assert_equal(data['provider']['name'], 'Test 1') assert_equal(data['provider']['total'], 1)
import datetime from osf_tests.factories import PreprintFactory, PreprintProviderFactory from osf.models import PreprintService from nose.tools import * # PEP8 asserts import mock import pytest import pytz import requests from scripts.analytics.preprint_summary import PreprintSummary @pytest.fixture() def preprint_provider(): return PreprintProviderFactory(name='Test 1') @pytest.fixture() def preprint(preprint_provider): return PreprintFactory._build(PreprintService, provider=preprint_provider) pytestmark = pytest.mark.django_db class TestPreprintCount: - def test_get_preprint_count(self, preprint_provider, preprint): ? ------------------- + def test_get_preprint_count(self, preprint): requests.post = mock.MagicMock() resp = requests.Response() resp._content = '{"hits" : {"total" : 1}}' requests.post.return_value = resp field = PreprintService._meta.get_field('date_created') field.auto_now_add = False # We have to fudge the time because Keen doesn't allow same day queries. - date = datetime.datetime.utcnow() - datetime.timedelta(1) + date = datetime.datetime.utcnow() - datetime.timedelta(days=1, hours=1) ? ++++++++++++++ - preprint.date_created = date - datetime.timedelta(0.1) ? ^^ + preprint.date_created = date - datetime.timedelta(hours=1) ? ^^^^^^ preprint.save() field.auto_now_add = True results = PreprintSummary().get_events(date.date()) assert_equal(len(results), 1) data = results[0] assert_equal(data['provider']['name'], 'Test 1') assert_equal(data['provider']['total'], 1)
6c578b67753e7a3fd646e5d91259b50c0b39bec6
tests/test_add_target.py
tests/test_add_target.py
import io from vws import VWS class TestSuccess: """ Tests for successfully adding a target. """ def test_add_target( self, client: VWS, high_quality_image: io.BytesIO, ) -> None: """ No exception is raised when adding one target. """ client.add_target(name='x', width=1, image=high_quality_image) def test_add_two_targets( self, client: VWS, high_quality_image: io.BytesIO, ) -> None: """ No exception is raised when adding two targets with different names. """ client.add_target(name='x', width=1, image=high_quality_image) client.add_target(name='a', width=1, image=high_quality_image)
import io from mock_vws import MockVWS from vws import VWS class TestSuccess: """ Tests for successfully adding a target. """ def test_add_target( self, client: VWS, high_quality_image: io.BytesIO, ) -> None: """ No exception is raised when adding one target. """ client.add_target(name='x', width=1, image=high_quality_image) def test_add_two_targets( self, client: VWS, high_quality_image: io.BytesIO, ) -> None: """ No exception is raised when adding two targets with different names. """ client.add_target(name='x', width=1, image=high_quality_image) client.add_target(name='a', width=1, image=high_quality_image) class TestCustomBaseURL: """ Tests for adding images to databases under custom VWS URLs. """ def test_custom_base_url(self, high_quality_image: io.BytesIO) -> None: """ It is possible to use add a target to a database under a custom VWS URL. """ base_vws_url = 'http://example.com' with MockVWS(base_vws_url=base_vws_url) as mock: client = VWS( server_access_key=mock.server_access_key, server_secret_key=mock.server_secret_key, base_vws_url=base_vws_url, ) client.add_target( name='x', width=1, image=high_quality_image, )
Add test for custom base URL
Add test for custom base URL
Python
mit
adamtheturtle/vws-python,adamtheturtle/vws-python
import io + + from mock_vws import MockVWS from vws import VWS class TestSuccess: """ Tests for successfully adding a target. """ def test_add_target( self, client: VWS, high_quality_image: io.BytesIO, ) -> None: """ No exception is raised when adding one target. """ client.add_target(name='x', width=1, image=high_quality_image) def test_add_two_targets( self, client: VWS, high_quality_image: io.BytesIO, ) -> None: """ No exception is raised when adding two targets with different names. """ client.add_target(name='x', width=1, image=high_quality_image) client.add_target(name='a', width=1, image=high_quality_image) + + class TestCustomBaseURL: + """ + Tests for adding images to databases under custom VWS URLs. + """ + + def test_custom_base_url(self, high_quality_image: io.BytesIO) -> None: + """ + It is possible to use add a target to a database under a custom VWS + URL. + """ + base_vws_url = 'http://example.com' + with MockVWS(base_vws_url=base_vws_url) as mock: + client = VWS( + server_access_key=mock.server_access_key, + server_secret_key=mock.server_secret_key, + base_vws_url=base_vws_url, + ) + + client.add_target( + name='x', + width=1, + image=high_quality_image, + ) +
Add test for custom base URL
## Code Before: import io from vws import VWS class TestSuccess: """ Tests for successfully adding a target. """ def test_add_target( self, client: VWS, high_quality_image: io.BytesIO, ) -> None: """ No exception is raised when adding one target. """ client.add_target(name='x', width=1, image=high_quality_image) def test_add_two_targets( self, client: VWS, high_quality_image: io.BytesIO, ) -> None: """ No exception is raised when adding two targets with different names. """ client.add_target(name='x', width=1, image=high_quality_image) client.add_target(name='a', width=1, image=high_quality_image) ## Instruction: Add test for custom base URL ## Code After: import io from mock_vws import MockVWS from vws import VWS class TestSuccess: """ Tests for successfully adding a target. """ def test_add_target( self, client: VWS, high_quality_image: io.BytesIO, ) -> None: """ No exception is raised when adding one target. """ client.add_target(name='x', width=1, image=high_quality_image) def test_add_two_targets( self, client: VWS, high_quality_image: io.BytesIO, ) -> None: """ No exception is raised when adding two targets with different names. """ client.add_target(name='x', width=1, image=high_quality_image) client.add_target(name='a', width=1, image=high_quality_image) class TestCustomBaseURL: """ Tests for adding images to databases under custom VWS URLs. """ def test_custom_base_url(self, high_quality_image: io.BytesIO) -> None: """ It is possible to use add a target to a database under a custom VWS URL. """ base_vws_url = 'http://example.com' with MockVWS(base_vws_url=base_vws_url) as mock: client = VWS( server_access_key=mock.server_access_key, server_secret_key=mock.server_secret_key, base_vws_url=base_vws_url, ) client.add_target( name='x', width=1, image=high_quality_image, )
import io + + from mock_vws import MockVWS from vws import VWS class TestSuccess: """ Tests for successfully adding a target. """ def test_add_target( self, client: VWS, high_quality_image: io.BytesIO, ) -> None: """ No exception is raised when adding one target. """ client.add_target(name='x', width=1, image=high_quality_image) def test_add_two_targets( self, client: VWS, high_quality_image: io.BytesIO, ) -> None: """ No exception is raised when adding two targets with different names. """ client.add_target(name='x', width=1, image=high_quality_image) client.add_target(name='a', width=1, image=high_quality_image) + + + class TestCustomBaseURL: + """ + Tests for adding images to databases under custom VWS URLs. + """ + + def test_custom_base_url(self, high_quality_image: io.BytesIO) -> None: + """ + It is possible to use add a target to a database under a custom VWS + URL. + """ + base_vws_url = 'http://example.com' + with MockVWS(base_vws_url=base_vws_url) as mock: + client = VWS( + server_access_key=mock.server_access_key, + server_secret_key=mock.server_secret_key, + base_vws_url=base_vws_url, + ) + + client.add_target( + name='x', + width=1, + image=high_quality_image, + )
f6a8e84a2557c5edf29a6f3afa4d1cce1d42d389
tests/basics/try_finally_loops.py
tests/basics/try_finally_loops.py
while True: try: break finally: print('finally 1') for i in [1, 5, 10]: try: continue finally: print('finally 2') for i in range(3): try: continue finally: print('finally 3') # Multi-level for i in range(4): print(i) try: while True: try: try: break finally: print('finally 1') finally: print('finally 2') print('here') finally: print('finnaly 3')
while True: try: break finally: print('finally 1') for i in [1, 5, 10]: try: continue finally: print('finally 2') for i in range(3): try: continue finally: print('finally 3') # Multi-level for i in range(4): print(i) try: while True: try: try: break finally: print('finally 1') finally: print('finally 2') print('here') finally: print('finnaly 3') # break from within try-finally, within for-loop for i in [1]: try: print(i) break finally: print('finally 4')
Add test for break from within try within a for-loop.
tests/basics: Add test for break from within try within a for-loop.
Python
mit
turbinenreiter/micropython,Peetz0r/micropython-esp32,hosaka/micropython,ryannathans/micropython,bvernoux/micropython,tralamazza/micropython,cwyark/micropython,turbinenreiter/micropython,alex-march/micropython,SHA2017-badge/micropython-esp32,dxxb/micropython,swegener/micropython,adafruit/circuitpython,SHA2017-badge/micropython-esp32,alex-robbins/micropython,HenrikSolver/micropython,hosaka/micropython,bvernoux/micropython,matthewelse/micropython,jmarcelino/pycom-micropython,swegener/micropython,hosaka/micropython,selste/micropython,matthewelse/micropython,torwag/micropython,deshipu/micropython,dxxb/micropython,toolmacher/micropython,deshipu/micropython,SHA2017-badge/micropython-esp32,selste/micropython,oopy/micropython,pfalcon/micropython,PappaPeppar/micropython,infinnovation/micropython,hiway/micropython,AriZuu/micropython,trezor/micropython,chrisdearman/micropython,jmarcelino/pycom-micropython,blazewicz/micropython,infinnovation/micropython,tuc-osg/micropython,AriZuu/micropython,oopy/micropython,blazewicz/micropython,pozetroninc/micropython,tuc-osg/micropython,jmarcelino/pycom-micropython,tobbad/micropython,pozetroninc/micropython,MrSurly/micropython-esp32,pramasoul/micropython,SHA2017-badge/micropython-esp32,PappaPeppar/micropython,alex-robbins/micropython,turbinenreiter/micropython,cwyark/micropython,alex-march/micropython,kerneltask/micropython,alex-robbins/micropython,hosaka/micropython,infinnovation/micropython,Timmenem/micropython,AriZuu/micropython,Peetz0r/micropython-esp32,blazewicz/micropython,matthewelse/micropython,MrSurly/micropython-esp32,pfalcon/micropython,hiway/micropython,MrSurly/micropython-esp32,ryannathans/micropython,kerneltask/micropython,chrisdearman/micropython,swegener/micropython,puuu/micropython,mhoffma/micropython,jmarcelino/pycom-micropython,selste/micropython,blazewicz/micropython,torwag/micropython,henriknelson/micropython,adafruit/micropython,tuc-osg/micropython,TDAbboud/micropython,pfalcon/micropython,adafruit/micropython,kerneltask/micropython,infinnovation/micropython,deshipu/micropython,dmazzella/micropython,puuu/micropython,toolmacher/micropython,bvernoux/micropython,adafruit/circuitpython,MrSurly/micropython-esp32,pozetroninc/micropython,tuc-osg/micropython,oopy/micropython,toolmacher/micropython,swegener/micropython,dmazzella/micropython,infinnovation/micropython,TDAbboud/micropython,kerneltask/micropython,lowRISC/micropython,cwyark/micropython,puuu/micropython,Peetz0r/micropython-esp32,mhoffma/micropython,micropython/micropython-esp32,MrSurly/micropython,HenrikSolver/micropython,selste/micropython,henriknelson/micropython,matthewelse/micropython,adafruit/micropython,dxxb/micropython,hiway/micropython,pfalcon/micropython,adafruit/micropython,deshipu/micropython,torwag/micropython,pramasoul/micropython,mhoffma/micropython,adafruit/circuitpython,chrisdearman/micropython,pramasoul/micropython,TDAbboud/micropython,toolmacher/micropython,adafruit/circuitpython,lowRISC/micropython,kerneltask/micropython,trezor/micropython,PappaPeppar/micropython,tralamazza/micropython,puuu/micropython,henriknelson/micropython,dmazzella/micropython,oopy/micropython,alex-robbins/micropython,micropython/micropython-esp32,tralamazza/micropython,cwyark/micropython,Peetz0r/micropython-esp32,mhoffma/micropython,pramasoul/micropython,ryannathans/micropython,dxxb/micropython,hiway/micropython,adafruit/circuitpython,HenrikSolver/micropython,henriknelson/micropython,dmazzella/micropython,pozetroninc/micropython,puuu/micropython,tobbad/micropython,SHA2017-badge/micropython-esp32,Timmenem/micropython,hosaka/micropython,Timmenem/micropython,ryannathans/micropython,torwag/micropython,MrSurly/micropython,trezor/micropython,ryannathans/micropython,lowRISC/micropython,swegener/micropython,hiway/micropython,henriknelson/micropython,HenrikSolver/micropython,bvernoux/micropython,MrSurly/micropython,turbinenreiter/micropython,MrSurly/micropython,PappaPeppar/micropython,TDAbboud/micropython,matthewelse/micropython,trezor/micropython,lowRISC/micropython,matthewelse/micropython,micropython/micropython-esp32,chrisdearman/micropython,alex-robbins/micropython,turbinenreiter/micropython,alex-march/micropython,tuc-osg/micropython,adafruit/circuitpython,TDAbboud/micropython,dxxb/micropython,chrisdearman/micropython,alex-march/micropython,jmarcelino/pycom-micropython,Peetz0r/micropython-esp32,blazewicz/micropython,trezor/micropython,HenrikSolver/micropython,pramasoul/micropython,tobbad/micropython,cwyark/micropython,tralamazza/micropython,pfalcon/micropython,toolmacher/micropython,tobbad/micropython,MrSurly/micropython,pozetroninc/micropython,micropython/micropython-esp32,lowRISC/micropython,bvernoux/micropython,AriZuu/micropython,tobbad/micropython,micropython/micropython-esp32,selste/micropython,adafruit/micropython,Timmenem/micropython,Timmenem/micropython,alex-march/micropython,torwag/micropython,oopy/micropython,mhoffma/micropython,AriZuu/micropython,MrSurly/micropython-esp32,deshipu/micropython,PappaPeppar/micropython
while True: try: break finally: print('finally 1') for i in [1, 5, 10]: try: continue finally: print('finally 2') for i in range(3): try: continue finally: print('finally 3') # Multi-level for i in range(4): print(i) try: while True: try: try: break finally: print('finally 1') finally: print('finally 2') print('here') finally: print('finnaly 3') + # break from within try-finally, within for-loop + for i in [1]: + try: + print(i) + break + finally: + print('finally 4') +
Add test for break from within try within a for-loop.
## Code Before: while True: try: break finally: print('finally 1') for i in [1, 5, 10]: try: continue finally: print('finally 2') for i in range(3): try: continue finally: print('finally 3') # Multi-level for i in range(4): print(i) try: while True: try: try: break finally: print('finally 1') finally: print('finally 2') print('here') finally: print('finnaly 3') ## Instruction: Add test for break from within try within a for-loop. ## Code After: while True: try: break finally: print('finally 1') for i in [1, 5, 10]: try: continue finally: print('finally 2') for i in range(3): try: continue finally: print('finally 3') # Multi-level for i in range(4): print(i) try: while True: try: try: break finally: print('finally 1') finally: print('finally 2') print('here') finally: print('finnaly 3') # break from within try-finally, within for-loop for i in [1]: try: print(i) break finally: print('finally 4')
while True: try: break finally: print('finally 1') for i in [1, 5, 10]: try: continue finally: print('finally 2') for i in range(3): try: continue finally: print('finally 3') # Multi-level for i in range(4): print(i) try: while True: try: try: break finally: print('finally 1') finally: print('finally 2') print('here') finally: print('finnaly 3') + + # break from within try-finally, within for-loop + for i in [1]: + try: + print(i) + break + finally: + print('finally 4')
e1b62a5d39fd3a4adb7d783c131fd122ba09c3d5
support/biicode-build.py
support/biicode-build.py
import bootstrap, glob, os, shutil from download import Downloader from subprocess import check_call os_name = os.environ['TRAVIS_OS_NAME'] if os_name == 'linux': # Install newer version of CMake. bootstrap.install_cmake( 'cmake-3.1.1-Linux-i386.tar.gz', check_installed=False, download_dir=None, install_dir='.') with Downloader().download('http://www.biicode.com/downloads/latest/ubuntu64') as f: check_call(['sudo', 'dpkg', '-i', f]) elif os_name == 'osx': with Downloader().download('http://www.biicode.com/downloads/latest/macos') as f: check_call(['sudo', 'installer', '-pkg', f, '-target', '/']) project_dir = 'biicode_project' check_call(['bii', 'init', project_dir]) cppformat_dir = os.path.join(project_dir, 'blocks/vitaut/cppformat') shutil.copytree('.', cppformat_dir, ignore=shutil.ignore_patterns('biicode_project')) for f in glob.glob('support/biicode/*'): shutil.copy(f, cppformat_dir) check_call(['bii', 'cpp:build'], cwd=project_dir)
import bootstrap, glob, os, shutil from download import Downloader from subprocess import check_call os_name = os.environ['TRAVIS_OS_NAME'] if os_name == 'linux': # Install newer version of CMake. bootstrap.install_cmake( 'cmake-3.1.1-Linux-i386.tar.gz', check_installed=False, download_dir=None) with Downloader().download('http://www.biicode.com/downloads/latest/ubuntu64') as f: check_call(['sudo', 'dpkg', '-i', f]) elif os_name == 'osx': with Downloader().download('http://www.biicode.com/downloads/latest/macos') as f: check_call(['sudo', 'installer', '-pkg', f, '-target', '/']) project_dir = 'biicode_project' check_call(['bii', 'init', project_dir]) cppformat_dir = os.path.join(project_dir, 'blocks/vitaut/cppformat') shutil.copytree('.', cppformat_dir, ignore=shutil.ignore_patterns('biicode_project')) for f in glob.glob('support/biicode/*'): shutil.copy(f, cppformat_dir) check_call(['bii', 'cpp:build'], cwd=project_dir)
Install CMake in system dirs
Install CMake in system dirs
Python
bsd-2-clause
blaquee/cppformat,mojoBrendan/fmt,cppformat/cppformat,mojoBrendan/fmt,seungrye/cppformat,lightslife/cppformat,nelson4722/cppformat,alabuzhev/fmt,alabuzhev/fmt,lightslife/cppformat,lightslife/cppformat,cppformat/cppformat,alabuzhev/fmt,cppformat/cppformat,Jopie64/cppformat,blaquee/cppformat,mojoBrendan/fmt,dean0x7d/cppformat,seungrye/cppformat,nelson4722/cppformat,wangshijin/cppformat,dean0x7d/cppformat,nelson4722/cppformat,Jopie64/cppformat,blaquee/cppformat,wangshijin/cppformat,dean0x7d/cppformat,Jopie64/cppformat,seungrye/cppformat,wangshijin/cppformat
import bootstrap, glob, os, shutil from download import Downloader from subprocess import check_call os_name = os.environ['TRAVIS_OS_NAME'] if os_name == 'linux': # Install newer version of CMake. bootstrap.install_cmake( - 'cmake-3.1.1-Linux-i386.tar.gz', check_installed=False, download_dir=None, install_dir='.') + 'cmake-3.1.1-Linux-i386.tar.gz', check_installed=False, download_dir=None) with Downloader().download('http://www.biicode.com/downloads/latest/ubuntu64') as f: check_call(['sudo', 'dpkg', '-i', f]) elif os_name == 'osx': with Downloader().download('http://www.biicode.com/downloads/latest/macos') as f: check_call(['sudo', 'installer', '-pkg', f, '-target', '/']) project_dir = 'biicode_project' check_call(['bii', 'init', project_dir]) cppformat_dir = os.path.join(project_dir, 'blocks/vitaut/cppformat') shutil.copytree('.', cppformat_dir, ignore=shutil.ignore_patterns('biicode_project')) for f in glob.glob('support/biicode/*'): shutil.copy(f, cppformat_dir) check_call(['bii', 'cpp:build'], cwd=project_dir)
Install CMake in system dirs
## Code Before: import bootstrap, glob, os, shutil from download import Downloader from subprocess import check_call os_name = os.environ['TRAVIS_OS_NAME'] if os_name == 'linux': # Install newer version of CMake. bootstrap.install_cmake( 'cmake-3.1.1-Linux-i386.tar.gz', check_installed=False, download_dir=None, install_dir='.') with Downloader().download('http://www.biicode.com/downloads/latest/ubuntu64') as f: check_call(['sudo', 'dpkg', '-i', f]) elif os_name == 'osx': with Downloader().download('http://www.biicode.com/downloads/latest/macos') as f: check_call(['sudo', 'installer', '-pkg', f, '-target', '/']) project_dir = 'biicode_project' check_call(['bii', 'init', project_dir]) cppformat_dir = os.path.join(project_dir, 'blocks/vitaut/cppformat') shutil.copytree('.', cppformat_dir, ignore=shutil.ignore_patterns('biicode_project')) for f in glob.glob('support/biicode/*'): shutil.copy(f, cppformat_dir) check_call(['bii', 'cpp:build'], cwd=project_dir) ## Instruction: Install CMake in system dirs ## Code After: import bootstrap, glob, os, shutil from download import Downloader from subprocess import check_call os_name = os.environ['TRAVIS_OS_NAME'] if os_name == 'linux': # Install newer version of CMake. bootstrap.install_cmake( 'cmake-3.1.1-Linux-i386.tar.gz', check_installed=False, download_dir=None) with Downloader().download('http://www.biicode.com/downloads/latest/ubuntu64') as f: check_call(['sudo', 'dpkg', '-i', f]) elif os_name == 'osx': with Downloader().download('http://www.biicode.com/downloads/latest/macos') as f: check_call(['sudo', 'installer', '-pkg', f, '-target', '/']) project_dir = 'biicode_project' check_call(['bii', 'init', project_dir]) cppformat_dir = os.path.join(project_dir, 'blocks/vitaut/cppformat') shutil.copytree('.', cppformat_dir, ignore=shutil.ignore_patterns('biicode_project')) for f in glob.glob('support/biicode/*'): shutil.copy(f, cppformat_dir) check_call(['bii', 'cpp:build'], cwd=project_dir)
import bootstrap, glob, os, shutil from download import Downloader from subprocess import check_call os_name = os.environ['TRAVIS_OS_NAME'] if os_name == 'linux': # Install newer version of CMake. bootstrap.install_cmake( - 'cmake-3.1.1-Linux-i386.tar.gz', check_installed=False, download_dir=None, install_dir='.') ? ----------------- + 'cmake-3.1.1-Linux-i386.tar.gz', check_installed=False, download_dir=None) with Downloader().download('http://www.biicode.com/downloads/latest/ubuntu64') as f: check_call(['sudo', 'dpkg', '-i', f]) elif os_name == 'osx': with Downloader().download('http://www.biicode.com/downloads/latest/macos') as f: check_call(['sudo', 'installer', '-pkg', f, '-target', '/']) project_dir = 'biicode_project' check_call(['bii', 'init', project_dir]) cppformat_dir = os.path.join(project_dir, 'blocks/vitaut/cppformat') shutil.copytree('.', cppformat_dir, ignore=shutil.ignore_patterns('biicode_project')) for f in glob.glob('support/biicode/*'): shutil.copy(f, cppformat_dir) check_call(['bii', 'cpp:build'], cwd=project_dir)
f350de4b748c8a6e8368a8d4500be92ad14b78c3
pip/vendor/__init__.py
pip/vendor/__init__.py
from __future__ import absolute_import # Monkeypatch pip.vendor.six into just six try: import six except ImportError: import sys from . import six sys.modules["six"] = six
from __future__ import absolute_import # Monkeypatch pip.vendor.six into just six # This is kind of terrible, however it is the least bad of 3 bad options # #1 Ship pip with ``six`` such that it gets installed as a regular module # #2 Modify pip.vendor.html5lib so that instead of ``import six`` it uses # ``from pip.vendor import six``. # #3 This monkeypatch which adds six to the top level modules only when # pip.vendor.* is being used. # # #1 involves pollutiong the globally installed packages and possibly # preventing people from using older or newer versions of the six library # #2 Means we've modified upstream which makes it more dificult to upgrade # in the future and paves the way for us to be in charge of maintaining it. # #3 Allows us to not modify upstream while only pollutiong the global # namespace when ``pip.vendor`` has been imported, which in typical usage # is isolated to command line evocations. try: import six except ImportError: import sys from . import six sys.modules["six"] = six
Document the reasons why we are monkeypatching six into sys.modules
Document the reasons why we are monkeypatching six into sys.modules
Python
mit
minrk/pip,harrisonfeng/pip,tdsmith/pip,xavfernandez/pip,caosmo/pip,zenlambda/pip,haridsv/pip,Carreau/pip,pjdelport/pip,habnabit/pip,alex/pip,davidovich/pip,luzfcb/pip,alex/pip,jamezpolley/pip,pradyunsg/pip,mujiansu/pip,natefoo/pip,sigmavirus24/pip,luzfcb/pip,patricklaw/pip,h4ck3rm1k3/pip,James-Firth/pip,ChristopherHogan/pip,atdaemon/pip,sigmavirus24/pip,James-Firth/pip,zenlambda/pip,qwcode/pip,prasaianooz/pip,RonnyPfannschmidt/pip,nthall/pip,RonnyPfannschmidt/pip,nthall/pip,pypa/pip,ncoghlan/pip,mujiansu/pip,chaoallsome/pip,atdaemon/pip,yati-sagade/pip,squidsoup/pip,caosmo/pip,zorosteven/pip,msabramo/pip,pjdelport/pip,wkeyword/pip,yati-sagade/pip,zorosteven/pip,squidsoup/pip,jasonkying/pip,mattrobenolt/pip,willingc/pip,KarelJakubec/pip,jamezpolley/pip,willingc/pip,ChristopherHogan/pip,ianw/pip,natefoo/pip,pradyunsg/pip,blarghmatey/pip,sbidoul/pip,fiber-space/pip,alex/pip,techtonik/pip,minrk/pip,prasaianooz/pip,davidovich/pip,luzfcb/pip,xavfernandez/pip,zenlambda/pip,pfmoore/pip,rouge8/pip,zvezdan/pip,atdaemon/pip,dstufft/pip,mindw/pip,techtonik/pip,h4ck3rm1k3/pip,natefoo/pip,jamezpolley/pip,dstufft/pip,nthall/pip,benesch/pip,qbdsoft/pip,rouge8/pip,supriyantomaftuh/pip,haridsv/pip,mindw/pip,qbdsoft/pip,erikrose/pip,davidovich/pip,Gabriel439/pip,ChristopherHogan/pip,graingert/pip,harrisonfeng/pip,Ivoz/pip,dstufft/pip,jasonkying/pip,wkeyword/pip,sigmavirus24/pip,cjerdonek/pip,graingert/pip,prasaianooz/pip,yati-sagade/pip,rbtcollins/pip,mujiansu/pip,benesch/pip,rbtcollins/pip,esc/pip,jythontools/pip,chaoallsome/pip,zvezdan/pip,esc/pip,rouge8/pip,tdsmith/pip,jmagnusson/pip,blarghmatey/pip,zvezdan/pip,alquerci/pip,zorosteven/pip,erikrose/pip,KarelJakubec/pip,habnabit/pip,RonnyPfannschmidt/pip,Ivoz/pip,pjdelport/pip,msabramo/pip,ncoghlan/pip,esc/pip,techtonik/pip,jmagnusson/pip,alquerci/pip,supriyantomaftuh/pip,wkeyword/pip,Carreau/pip,haridsv/pip,mindw/pip,xavfernandez/pip,benesch/pip,ianw/pip,blarghmatey/pip,patricklaw/pip,fiber-space/pip,pypa/pip,rbtcollins/pip,qbdsoft/pip,pfmoore/pip,Gabriel439/pip,fiber-space/pip,jythontools/pip,qwcode/pip,jmagnusson/pip,supriyantomaftuh/pip,erikrose/pip,willingc/pip,graingert/pip,squidsoup/pip,habnabit/pip,Gabriel439/pip,KarelJakubec/pip,jasonkying/pip,James-Firth/pip,harrisonfeng/pip,jythontools/pip,ncoghlan/pip,h4ck3rm1k3/pip,cjerdonek/pip,mattrobenolt/pip,chaoallsome/pip,sbidoul/pip,tdsmith/pip,caosmo/pip
from __future__ import absolute_import # Monkeypatch pip.vendor.six into just six + # This is kind of terrible, however it is the least bad of 3 bad options + # #1 Ship pip with ``six`` such that it gets installed as a regular module + # #2 Modify pip.vendor.html5lib so that instead of ``import six`` it uses + # ``from pip.vendor import six``. + # #3 This monkeypatch which adds six to the top level modules only when + # pip.vendor.* is being used. + # + # #1 involves pollutiong the globally installed packages and possibly + # preventing people from using older or newer versions of the six library + # #2 Means we've modified upstream which makes it more dificult to upgrade + # in the future and paves the way for us to be in charge of maintaining it. + # #3 Allows us to not modify upstream while only pollutiong the global + # namespace when ``pip.vendor`` has been imported, which in typical usage + # is isolated to command line evocations. try: import six except ImportError: import sys from . import six sys.modules["six"] = six
Document the reasons why we are monkeypatching six into sys.modules
## Code Before: from __future__ import absolute_import # Monkeypatch pip.vendor.six into just six try: import six except ImportError: import sys from . import six sys.modules["six"] = six ## Instruction: Document the reasons why we are monkeypatching six into sys.modules ## Code After: from __future__ import absolute_import # Monkeypatch pip.vendor.six into just six # This is kind of terrible, however it is the least bad of 3 bad options # #1 Ship pip with ``six`` such that it gets installed as a regular module # #2 Modify pip.vendor.html5lib so that instead of ``import six`` it uses # ``from pip.vendor import six``. # #3 This monkeypatch which adds six to the top level modules only when # pip.vendor.* is being used. # # #1 involves pollutiong the globally installed packages and possibly # preventing people from using older or newer versions of the six library # #2 Means we've modified upstream which makes it more dificult to upgrade # in the future and paves the way for us to be in charge of maintaining it. # #3 Allows us to not modify upstream while only pollutiong the global # namespace when ``pip.vendor`` has been imported, which in typical usage # is isolated to command line evocations. try: import six except ImportError: import sys from . import six sys.modules["six"] = six
from __future__ import absolute_import # Monkeypatch pip.vendor.six into just six + # This is kind of terrible, however it is the least bad of 3 bad options + # #1 Ship pip with ``six`` such that it gets installed as a regular module + # #2 Modify pip.vendor.html5lib so that instead of ``import six`` it uses + # ``from pip.vendor import six``. + # #3 This monkeypatch which adds six to the top level modules only when + # pip.vendor.* is being used. + # + # #1 involves pollutiong the globally installed packages and possibly + # preventing people from using older or newer versions of the six library + # #2 Means we've modified upstream which makes it more dificult to upgrade + # in the future and paves the way for us to be in charge of maintaining it. + # #3 Allows us to not modify upstream while only pollutiong the global + # namespace when ``pip.vendor`` has been imported, which in typical usage + # is isolated to command line evocations. try: import six except ImportError: import sys from . import six sys.modules["six"] = six
e4a799d96ad80a8f7960824e7b9ec1192e81deeb
turbasen/__init__.py
turbasen/__init__.py
from __future__ import absolute_import, division, print_function, unicode_literals # Import the models we want directly available through the root module from .models import \ Omrade, \ Sted # Make configure directly available through the root module from .settings import configure # Make handle_available directly available through the root module from .events import handle_event
from __future__ import absolute_import, division, print_function, unicode_literals # Import the models we want directly available through the root module from .models import \ Gruppe, \ Omrade, \ Sted # Make configure directly available through the root module from .settings import configure # Make handle_available directly available through the root module from .events import handle_event
Add Gruppe to Turbasen import __inti
Add Gruppe to Turbasen import __inti
Python
mit
Turbasen/turbasen.py
from __future__ import absolute_import, division, print_function, unicode_literals # Import the models we want directly available through the root module from .models import \ + Gruppe, \ Omrade, \ Sted # Make configure directly available through the root module from .settings import configure # Make handle_available directly available through the root module from .events import handle_event
Add Gruppe to Turbasen import __inti
## Code Before: from __future__ import absolute_import, division, print_function, unicode_literals # Import the models we want directly available through the root module from .models import \ Omrade, \ Sted # Make configure directly available through the root module from .settings import configure # Make handle_available directly available through the root module from .events import handle_event ## Instruction: Add Gruppe to Turbasen import __inti ## Code After: from __future__ import absolute_import, division, print_function, unicode_literals # Import the models we want directly available through the root module from .models import \ Gruppe, \ Omrade, \ Sted # Make configure directly available through the root module from .settings import configure # Make handle_available directly available through the root module from .events import handle_event
from __future__ import absolute_import, division, print_function, unicode_literals # Import the models we want directly available through the root module from .models import \ + Gruppe, \ Omrade, \ Sted # Make configure directly available through the root module from .settings import configure # Make handle_available directly available through the root module from .events import handle_event
f2752572d915563ea5a3361dbb7a3fee08b04660
tests/test_mmstats.py
tests/test_mmstats.py
import mmstats def test_uint(): class MyStats(mmstats.MmStats): apples = mmstats.UIntStat() oranges = mmstats.UIntStat() mmst = MyStats() # Basic format assert mmst.mmap[0] == '\x01' assert mmst.mmap.find('applesL') != -1 assert mmst.mmap.find('orangesL') != -1 # Stat manipulation assert mmst.apples == 0 assert mmst.oranges == 0 mmst.apples = 1 assert mmst.apples == 1 assert mmst.oranges == 0
import mmstats def test_uint(): class MyStats(mmstats.MmStats): zebras = mmstats.UIntStat() apples = mmstats.UIntStat() oranges = mmstats.UIntStat() mmst = MyStats() # Basic format assert mmst.mmap[0] == '\x01' assert mmst.mmap.find('applesL') != -1 assert mmst.mmap.find('orangesL') != -1 assert mmst.mmap.find('zebrasL') != -1 # Stat manipulation assert mmst.apples == 0 assert mmst.oranges == 0 assert mmst.zebras == 0 mmst.apples = 1 assert mmst.apples == 1 assert mmst.oranges == 0 assert mmst.zebras == 0 mmst.zebras = 9001 assert mmst.apples == 1 assert mmst.oranges == 0 assert mmst.zebras == 9001
Make basic test a bit more thorough
Make basic test a bit more thorough
Python
bsd-3-clause
schmichael/mmstats,schmichael/mmstats,schmichael/mmstats,schmichael/mmstats
import mmstats def test_uint(): class MyStats(mmstats.MmStats): + zebras = mmstats.UIntStat() apples = mmstats.UIntStat() oranges = mmstats.UIntStat() mmst = MyStats() # Basic format assert mmst.mmap[0] == '\x01' assert mmst.mmap.find('applesL') != -1 assert mmst.mmap.find('orangesL') != -1 + assert mmst.mmap.find('zebrasL') != -1 # Stat manipulation assert mmst.apples == 0 assert mmst.oranges == 0 + assert mmst.zebras == 0 mmst.apples = 1 assert mmst.apples == 1 assert mmst.oranges == 0 + assert mmst.zebras == 0 + mmst.zebras = 9001 + assert mmst.apples == 1 + assert mmst.oranges == 0 + assert mmst.zebras == 9001 +
Make basic test a bit more thorough
## Code Before: import mmstats def test_uint(): class MyStats(mmstats.MmStats): apples = mmstats.UIntStat() oranges = mmstats.UIntStat() mmst = MyStats() # Basic format assert mmst.mmap[0] == '\x01' assert mmst.mmap.find('applesL') != -1 assert mmst.mmap.find('orangesL') != -1 # Stat manipulation assert mmst.apples == 0 assert mmst.oranges == 0 mmst.apples = 1 assert mmst.apples == 1 assert mmst.oranges == 0 ## Instruction: Make basic test a bit more thorough ## Code After: import mmstats def test_uint(): class MyStats(mmstats.MmStats): zebras = mmstats.UIntStat() apples = mmstats.UIntStat() oranges = mmstats.UIntStat() mmst = MyStats() # Basic format assert mmst.mmap[0] == '\x01' assert mmst.mmap.find('applesL') != -1 assert mmst.mmap.find('orangesL') != -1 assert mmst.mmap.find('zebrasL') != -1 # Stat manipulation assert mmst.apples == 0 assert mmst.oranges == 0 assert mmst.zebras == 0 mmst.apples = 1 assert mmst.apples == 1 assert mmst.oranges == 0 assert mmst.zebras == 0 mmst.zebras = 9001 assert mmst.apples == 1 assert mmst.oranges == 0 assert mmst.zebras == 9001
import mmstats def test_uint(): class MyStats(mmstats.MmStats): + zebras = mmstats.UIntStat() apples = mmstats.UIntStat() oranges = mmstats.UIntStat() mmst = MyStats() # Basic format assert mmst.mmap[0] == '\x01' assert mmst.mmap.find('applesL') != -1 assert mmst.mmap.find('orangesL') != -1 + assert mmst.mmap.find('zebrasL') != -1 # Stat manipulation assert mmst.apples == 0 assert mmst.oranges == 0 + assert mmst.zebras == 0 mmst.apples = 1 assert mmst.apples == 1 assert mmst.oranges == 0 + assert mmst.zebras == 0 + + mmst.zebras = 9001 + assert mmst.apples == 1 + assert mmst.oranges == 0 + assert mmst.zebras == 9001
5d812fbacab2970a1a601f8d801b08305873490d
src/ekklesia_portal/concepts/argument/argument_contracts.py
src/ekklesia_portal/concepts/argument/argument_contracts.py
from colander import Length from deform import Form from deform.widget import TextAreaWidget from ekklesia_portal.helper.contract import Schema, string_property from ekklesia_portal.helper.translation import _ class ArgumentSchema(Schema): title = string_property(title=_('title'), validator=Length(min=5, max=80)) abstract = string_property(title=_('abstract'), validator=Length(min=5, max=140)) details = string_property(title=_('details'), validator=Length(min=10, max=4096), missing='') argument_widgets = { 'abstract': TextAreaWidget(rows=2), 'details': TextAreaWidget(rows=4) } class ArgumentForm(Form): def __init__(self, request, action): super().__init__(ArgumentSchema(), request, action, buttons=("submit", )) self.set_widgets(argument_widgets)
from colander import Length from deform import Form from deform.widget import TextAreaWidget, TextInputWidget from ekklesia_portal.helper.contract import Schema, string_property from ekklesia_portal.helper.translation import _ TITLE_MAXLENGTH = 80 ABSTRACT_MAXLENGTH = 160 class ArgumentSchema(Schema): title = string_property(title=_('title'), validator=Length(min=5, max=TITLE_MAXLENGTH)) abstract = string_property(title=_('abstract'), validator=Length(min=5, max=ABSTRACT_MAXLENGTH)) details = string_property(title=_('details'), validator=Length(min=10, max=4096), missing='') argument_widgets = { 'title': TextInputWidget(attributes={'maxlength': TITLE_MAXLENGTH}), 'abstract': TextAreaWidget(rows=2, attributes={'maxlength': ABSTRACT_MAXLENGTH}), 'details': TextAreaWidget(rows=4) } class ArgumentForm(Form): def __init__(self, request, action): super().__init__(ArgumentSchema(), request, action, buttons=("submit", )) self.set_widgets(argument_widgets)
Add maxlength to argument form title and abstract
Add maxlength to argument form title and abstract
Python
agpl-3.0
dpausp/arguments,dpausp/arguments,dpausp/arguments,dpausp/arguments
from colander import Length from deform import Form - from deform.widget import TextAreaWidget + from deform.widget import TextAreaWidget, TextInputWidget from ekklesia_portal.helper.contract import Schema, string_property from ekklesia_portal.helper.translation import _ + TITLE_MAXLENGTH = 80 + ABSTRACT_MAXLENGTH = 160 + + class ArgumentSchema(Schema): - title = string_property(title=_('title'), validator=Length(min=5, max=80)) + title = string_property(title=_('title'), validator=Length(min=5, max=TITLE_MAXLENGTH)) - abstract = string_property(title=_('abstract'), validator=Length(min=5, max=140)) + abstract = string_property(title=_('abstract'), validator=Length(min=5, max=ABSTRACT_MAXLENGTH)) details = string_property(title=_('details'), validator=Length(min=10, max=4096), missing='') argument_widgets = { - 'abstract': TextAreaWidget(rows=2), + 'title': TextInputWidget(attributes={'maxlength': TITLE_MAXLENGTH}), + 'abstract': TextAreaWidget(rows=2, attributes={'maxlength': ABSTRACT_MAXLENGTH}), 'details': TextAreaWidget(rows=4) } class ArgumentForm(Form): def __init__(self, request, action): super().__init__(ArgumentSchema(), request, action, buttons=("submit", )) self.set_widgets(argument_widgets)
Add maxlength to argument form title and abstract
## Code Before: from colander import Length from deform import Form from deform.widget import TextAreaWidget from ekklesia_portal.helper.contract import Schema, string_property from ekklesia_portal.helper.translation import _ class ArgumentSchema(Schema): title = string_property(title=_('title'), validator=Length(min=5, max=80)) abstract = string_property(title=_('abstract'), validator=Length(min=5, max=140)) details = string_property(title=_('details'), validator=Length(min=10, max=4096), missing='') argument_widgets = { 'abstract': TextAreaWidget(rows=2), 'details': TextAreaWidget(rows=4) } class ArgumentForm(Form): def __init__(self, request, action): super().__init__(ArgumentSchema(), request, action, buttons=("submit", )) self.set_widgets(argument_widgets) ## Instruction: Add maxlength to argument form title and abstract ## Code After: from colander import Length from deform import Form from deform.widget import TextAreaWidget, TextInputWidget from ekklesia_portal.helper.contract import Schema, string_property from ekklesia_portal.helper.translation import _ TITLE_MAXLENGTH = 80 ABSTRACT_MAXLENGTH = 160 class ArgumentSchema(Schema): title = string_property(title=_('title'), validator=Length(min=5, max=TITLE_MAXLENGTH)) abstract = string_property(title=_('abstract'), validator=Length(min=5, max=ABSTRACT_MAXLENGTH)) details = string_property(title=_('details'), validator=Length(min=10, max=4096), missing='') argument_widgets = { 'title': TextInputWidget(attributes={'maxlength': TITLE_MAXLENGTH}), 'abstract': TextAreaWidget(rows=2, attributes={'maxlength': ABSTRACT_MAXLENGTH}), 'details': TextAreaWidget(rows=4) } class ArgumentForm(Form): def __init__(self, request, action): super().__init__(ArgumentSchema(), request, action, buttons=("submit", )) self.set_widgets(argument_widgets)
from colander import Length from deform import Form - from deform.widget import TextAreaWidget + from deform.widget import TextAreaWidget, TextInputWidget ? +++++++++++++++++ from ekklesia_portal.helper.contract import Schema, string_property from ekklesia_portal.helper.translation import _ + TITLE_MAXLENGTH = 80 + ABSTRACT_MAXLENGTH = 160 + + class ArgumentSchema(Schema): - title = string_property(title=_('title'), validator=Length(min=5, max=80)) ? ^^ + title = string_property(title=_('title'), validator=Length(min=5, max=TITLE_MAXLENGTH)) ? ^^^^^^^^^^^^^^^ - abstract = string_property(title=_('abstract'), validator=Length(min=5, max=140)) ? ^^^ + abstract = string_property(title=_('abstract'), validator=Length(min=5, max=ABSTRACT_MAXLENGTH)) ? ^^^^^^^^^^^^^^^^^^ details = string_property(title=_('details'), validator=Length(min=10, max=4096), missing='') argument_widgets = { - 'abstract': TextAreaWidget(rows=2), + 'title': TextInputWidget(attributes={'maxlength': TITLE_MAXLENGTH}), + 'abstract': TextAreaWidget(rows=2, attributes={'maxlength': ABSTRACT_MAXLENGTH}), 'details': TextAreaWidget(rows=4) } class ArgumentForm(Form): def __init__(self, request, action): super().__init__(ArgumentSchema(), request, action, buttons=("submit", )) self.set_widgets(argument_widgets)
87f14e78e649487776585c0bb6f8253a12985ac5
Applications/SegmentVesselsCNNSeeds/SegmentVesselsCNNSeeds.py
Applications/SegmentVesselsCNNSeeds/SegmentVesselsCNNSeeds.py
import errno import os import ctk_cli import keras.models as M from tubetk.vseg.cnn import deploy, utils script_params = utils.script_params def main(args): utils.set_params_path(args.params) if (args.resampled is None) ^ (script_params['RESAMPLE_SPACING'] is None or args.preprocessed is None): raise ValueError("A resampled image should be supplied iff resampling is" " enabled in the parameters file and a preprocessed" " image is given.") if args.preprocessed is None: args.resampled, args.preprocessed = deploy.prep(args.inputImage, args.outputDir) elif args.resampled is None: args.resampled = args.inputImage try: os.mkdir(args.outputDir) except OSError as e: if e.errno != errno.EEXIST: raise model = M.load_model(args.model) prefix = os.path.join(args.outputDir, os.path.splitext(os.path.basename(args.inputImage))[0]) deploy.generate_seed_points(model, args.preprocessed, prefix) deploy.segmentTubes(args.resampled, args.vascularModelFile, prefix, script_params['VESSEL_SEED_PROBABILITY'], script_params['VESSEL_SCALE']) if __name__ == '__main__': main(ctk_cli.CLIArgumentParser().parse_args())
import errno import os import ctk_cli import keras.models as M from tubetk.vseg.cnn import deploy, utils script_params = utils.script_params def main(args): utils.set_params_path(args.params) if (args.resampled is None) ^ (script_params['RESAMPLE_SPACING'] is None or args.preprocessed is None): raise ValueError("A resampled image should be supplied iff resampling is" " enabled in the parameters file and a preprocessed" " image is given.") try: os.mkdir(args.outputDir) except OSError as e: if e.errno != errno.EEXIST: raise if args.preprocessed is None: args.resampled, args.preprocessed = deploy.prep(args.inputImage, args.outputDir) elif args.resampled is None: args.resampled = args.inputImage model = M.load_model(args.model) prefix = os.path.join(args.outputDir, os.path.splitext(os.path.basename(args.inputImage))[0]) deploy.generate_seed_points(model, args.preprocessed, prefix) deploy.segmentTubes(args.resampled, args.vascularModelFile, prefix, script_params['VESSEL_SEED_PROBABILITY'], script_params['VESSEL_SCALE']) if __name__ == '__main__': main(ctk_cli.CLIArgumentParser().parse_args())
Create directory, if necessary, before preprocessing files as well
Create directory, if necessary, before preprocessing files as well
Python
apache-2.0
KitwareMedical/ITKTubeTK,KitwareMedical/TubeTK,thewtex/TubeTK,thewtex/TubeTK,KitwareMedical/TubeTK,thewtex/TubeTK,aylward/ITKTubeTK,aylward/ITKTubeTK,KitwareMedical/TubeTK,KitwareMedical/ITKTubeTK,KitwareMedical/ITKTubeTK,thewtex/TubeTK,KitwareMedical/TubeTK,KitwareMedical/ITKTubeTK,aylward/ITKTubeTK,aylward/ITKTubeTK
import errno import os import ctk_cli import keras.models as M from tubetk.vseg.cnn import deploy, utils script_params = utils.script_params def main(args): utils.set_params_path(args.params) if (args.resampled is None) ^ (script_params['RESAMPLE_SPACING'] is None or args.preprocessed is None): raise ValueError("A resampled image should be supplied iff resampling is" " enabled in the parameters file and a preprocessed" " image is given.") - if args.preprocessed is None: - args.resampled, args.preprocessed = deploy.prep(args.inputImage, args.outputDir) - elif args.resampled is None: - args.resampled = args.inputImage try: os.mkdir(args.outputDir) except OSError as e: if e.errno != errno.EEXIST: raise + if args.preprocessed is None: + args.resampled, args.preprocessed = deploy.prep(args.inputImage, args.outputDir) + elif args.resampled is None: + args.resampled = args.inputImage model = M.load_model(args.model) prefix = os.path.join(args.outputDir, os.path.splitext(os.path.basename(args.inputImage))[0]) deploy.generate_seed_points(model, args.preprocessed, prefix) deploy.segmentTubes(args.resampled, args.vascularModelFile, prefix, script_params['VESSEL_SEED_PROBABILITY'], script_params['VESSEL_SCALE']) if __name__ == '__main__': main(ctk_cli.CLIArgumentParser().parse_args())
Create directory, if necessary, before preprocessing files as well
## Code Before: import errno import os import ctk_cli import keras.models as M from tubetk.vseg.cnn import deploy, utils script_params = utils.script_params def main(args): utils.set_params_path(args.params) if (args.resampled is None) ^ (script_params['RESAMPLE_SPACING'] is None or args.preprocessed is None): raise ValueError("A resampled image should be supplied iff resampling is" " enabled in the parameters file and a preprocessed" " image is given.") if args.preprocessed is None: args.resampled, args.preprocessed = deploy.prep(args.inputImage, args.outputDir) elif args.resampled is None: args.resampled = args.inputImage try: os.mkdir(args.outputDir) except OSError as e: if e.errno != errno.EEXIST: raise model = M.load_model(args.model) prefix = os.path.join(args.outputDir, os.path.splitext(os.path.basename(args.inputImage))[0]) deploy.generate_seed_points(model, args.preprocessed, prefix) deploy.segmentTubes(args.resampled, args.vascularModelFile, prefix, script_params['VESSEL_SEED_PROBABILITY'], script_params['VESSEL_SCALE']) if __name__ == '__main__': main(ctk_cli.CLIArgumentParser().parse_args()) ## Instruction: Create directory, if necessary, before preprocessing files as well ## Code After: import errno import os import ctk_cli import keras.models as M from tubetk.vseg.cnn import deploy, utils script_params = utils.script_params def main(args): utils.set_params_path(args.params) if (args.resampled is None) ^ (script_params['RESAMPLE_SPACING'] is None or args.preprocessed is None): raise ValueError("A resampled image should be supplied iff resampling is" " enabled in the parameters file and a preprocessed" " image is given.") try: os.mkdir(args.outputDir) except OSError as e: if e.errno != errno.EEXIST: raise if args.preprocessed is None: args.resampled, args.preprocessed = deploy.prep(args.inputImage, args.outputDir) elif args.resampled is None: args.resampled = args.inputImage model = M.load_model(args.model) prefix = os.path.join(args.outputDir, os.path.splitext(os.path.basename(args.inputImage))[0]) deploy.generate_seed_points(model, args.preprocessed, prefix) deploy.segmentTubes(args.resampled, args.vascularModelFile, prefix, script_params['VESSEL_SEED_PROBABILITY'], script_params['VESSEL_SCALE']) if __name__ == '__main__': main(ctk_cli.CLIArgumentParser().parse_args())
import errno import os import ctk_cli import keras.models as M from tubetk.vseg.cnn import deploy, utils script_params = utils.script_params def main(args): utils.set_params_path(args.params) if (args.resampled is None) ^ (script_params['RESAMPLE_SPACING'] is None or args.preprocessed is None): raise ValueError("A resampled image should be supplied iff resampling is" " enabled in the parameters file and a preprocessed" " image is given.") - if args.preprocessed is None: - args.resampled, args.preprocessed = deploy.prep(args.inputImage, args.outputDir) - elif args.resampled is None: - args.resampled = args.inputImage try: os.mkdir(args.outputDir) except OSError as e: if e.errno != errno.EEXIST: raise + if args.preprocessed is None: + args.resampled, args.preprocessed = deploy.prep(args.inputImage, args.outputDir) + elif args.resampled is None: + args.resampled = args.inputImage model = M.load_model(args.model) prefix = os.path.join(args.outputDir, os.path.splitext(os.path.basename(args.inputImage))[0]) deploy.generate_seed_points(model, args.preprocessed, prefix) deploy.segmentTubes(args.resampled, args.vascularModelFile, prefix, script_params['VESSEL_SEED_PROBABILITY'], script_params['VESSEL_SCALE']) if __name__ == '__main__': main(ctk_cli.CLIArgumentParser().parse_args())
7fa9fb24262c5ced8d09a2de34fd412cc5aa3758
private/realclearpolitics-scraper/realclearpolitics/spiders/spider.py
private/realclearpolitics-scraper/realclearpolitics/spiders/spider.py
import scrapy from realclearpolitics.items import TableItem class RcpSpider(scrapy.Spider): name = "realclearpoliticsSpider" start_urls = [] def __init__(self, url, state_code): self.url = url self.state_code = state_code def start_requests(self): return [scrapy.FormRequest(self.url, callback=self.parse)] def parse(self, response): table = response.css('.data').pop() legend = table.css('tr')[0] fieldNames = legend.css('th::text').extract() nb_fields = len(fieldNames) items = [] contentLines = table.css('tr')[1::] for line in contentLines: item = TableItem() values = line.css('td::text, td span::text, td a::text').extract() for i in range(nb_fields): item[fieldNames[i]] = values[i] item['locale'] = self.state_code items.append(item) return items
import scrapy from realclearpolitics.items import TableItem class RcpSpider(scrapy.Spider): name = "realclearpoliticsSpider" start_urls = [] columns = ['Poll','Date', 'Sample', 'Spread'] def __init__(self, url, state_code): self.url = url self.state_code = state_code def start_requests(self): return [scrapy.FormRequest(self.url, callback=self.parse)] def parse(self, response): table = response.css('.data').pop() legend = table.css('tr')[0] fieldNames = legend.css('th::text').extract() nb_fields = len(fieldNames) items = [] contentLines = table.css('tr')[1::] for line in contentLines: item = TableItem() item['field'] = {} values = line.css('td::text, td span::text, td a::text').extract() for i in range(nb_fields): if fieldNames[i] in RcpSpider.columns: item[fieldNames[i]] = values[i] else: item['field'][fieldNames[i]] = values[i] item['locale'] = self.state_code items.append(item) return items
Put candidate score in field object
Put candidate score in field object
Python
mit
dpxxdp/berniemetrics,Rumel/berniemetrics,dpxxdp/berniemetrics,fpagnoux/berniemetrics,fpagnoux/berniemetrics,dpxxdp/berniemetrics,fpagnoux/berniemetrics,Rumel/berniemetrics,dpxxdp/berniemetrics,fpagnoux/berniemetrics,Rumel/berniemetrics,Rumel/berniemetrics
import scrapy from realclearpolitics.items import TableItem class RcpSpider(scrapy.Spider): name = "realclearpoliticsSpider" start_urls = [] + columns = ['Poll','Date', 'Sample', 'Spread'] def __init__(self, url, state_code): self.url = url self.state_code = state_code def start_requests(self): return [scrapy.FormRequest(self.url, callback=self.parse)] def parse(self, response): table = response.css('.data').pop() legend = table.css('tr')[0] fieldNames = legend.css('th::text').extract() nb_fields = len(fieldNames) items = [] contentLines = table.css('tr')[1::] for line in contentLines: item = TableItem() + item['field'] = {} values = line.css('td::text, td span::text, td a::text').extract() for i in range(nb_fields): + if fieldNames[i] in RcpSpider.columns: - item[fieldNames[i]] = values[i] + item[fieldNames[i]] = values[i] + else: + item['field'][fieldNames[i]] = values[i] + item['locale'] = self.state_code items.append(item) return items
Put candidate score in field object
## Code Before: import scrapy from realclearpolitics.items import TableItem class RcpSpider(scrapy.Spider): name = "realclearpoliticsSpider" start_urls = [] def __init__(self, url, state_code): self.url = url self.state_code = state_code def start_requests(self): return [scrapy.FormRequest(self.url, callback=self.parse)] def parse(self, response): table = response.css('.data').pop() legend = table.css('tr')[0] fieldNames = legend.css('th::text').extract() nb_fields = len(fieldNames) items = [] contentLines = table.css('tr')[1::] for line in contentLines: item = TableItem() values = line.css('td::text, td span::text, td a::text').extract() for i in range(nb_fields): item[fieldNames[i]] = values[i] item['locale'] = self.state_code items.append(item) return items ## Instruction: Put candidate score in field object ## Code After: import scrapy from realclearpolitics.items import TableItem class RcpSpider(scrapy.Spider): name = "realclearpoliticsSpider" start_urls = [] columns = ['Poll','Date', 'Sample', 'Spread'] def __init__(self, url, state_code): self.url = url self.state_code = state_code def start_requests(self): return [scrapy.FormRequest(self.url, callback=self.parse)] def parse(self, response): table = response.css('.data').pop() legend = table.css('tr')[0] fieldNames = legend.css('th::text').extract() nb_fields = len(fieldNames) items = [] contentLines = table.css('tr')[1::] for line in contentLines: item = TableItem() item['field'] = {} values = line.css('td::text, td span::text, td a::text').extract() for i in range(nb_fields): if fieldNames[i] in RcpSpider.columns: item[fieldNames[i]] = values[i] else: item['field'][fieldNames[i]] = values[i] item['locale'] = self.state_code items.append(item) return items
import scrapy from realclearpolitics.items import TableItem class RcpSpider(scrapy.Spider): name = "realclearpoliticsSpider" start_urls = [] + columns = ['Poll','Date', 'Sample', 'Spread'] def __init__(self, url, state_code): self.url = url self.state_code = state_code def start_requests(self): return [scrapy.FormRequest(self.url, callback=self.parse)] def parse(self, response): table = response.css('.data').pop() legend = table.css('tr')[0] fieldNames = legend.css('th::text').extract() nb_fields = len(fieldNames) items = [] contentLines = table.css('tr')[1::] for line in contentLines: item = TableItem() + item['field'] = {} values = line.css('td::text, td span::text, td a::text').extract() for i in range(nb_fields): + if fieldNames[i] in RcpSpider.columns: - item[fieldNames[i]] = values[i] + item[fieldNames[i]] = values[i] ? ++++ + else: + item['field'][fieldNames[i]] = values[i] + item['locale'] = self.state_code items.append(item) return items
1a1f0a9bca7458153ef84316fd84dfbe56be08ef
dolo/config.py
dolo/config.py
save_plots = False try: import dolo.misc.printing as printing from numpy import ndarray from dolo.symbolic.model import Model from dolo.numeric.decision_rules import DynareDecisionRule ip = get_ipython() # there could be some kind of autodecovery there ip.display_formatter.formatters['text/html'].for_type( ndarray, printing.print_array ) ip.display_formatter.formatters['text/html'].for_type( Model, printing.print_model ) ip.display_formatter.formatters['text/html'].for_type( DynareDecisionRule, printing.print_dynare_decision_rule ) from IPython.core.display import display except: print("failing back on pretty_print") from pprint import pprint def display(txt): pprint(txt)
save_plots = False try: import dolo.misc.printing as printing from numpy import ndarray from dolo.symbolic.model import Model from dolo.numeric.decision_rules import DynareDecisionRule ip = get_ipython() # there could be some kind of autodecovery there ip.display_formatter.formatters['text/html'].for_type( ndarray, printing.print_array ) ip.display_formatter.formatters['text/html'].for_type( Model, printing.print_model ) ip.display_formatter.formatters['text/html'].for_type( DynareDecisionRule, printing.print_dynare_decision_rule ) from IPython.core.display import display except: import re import sys if not re.search('dolo-recs',sys.argv[0]): print("failing back on pretty_print") from pprint import pprint def display(txt): pprint(txt)
Remove print("failing back on pretty_print") when using dolo-recs
Remove print("failing back on pretty_print") when using dolo-recs
Python
bsd-2-clause
EconForge/dolo
save_plots = False try: import dolo.misc.printing as printing from numpy import ndarray from dolo.symbolic.model import Model from dolo.numeric.decision_rules import DynareDecisionRule ip = get_ipython() # there could be some kind of autodecovery there ip.display_formatter.formatters['text/html'].for_type( ndarray, printing.print_array ) ip.display_formatter.formatters['text/html'].for_type( Model, printing.print_model ) ip.display_formatter.formatters['text/html'].for_type( DynareDecisionRule, printing.print_dynare_decision_rule ) from IPython.core.display import display except: - + import re + import sys + if not re.search('dolo-recs',sys.argv[0]): - print("failing back on pretty_print") + print("failing back on pretty_print") from pprint import pprint def display(txt): pprint(txt)
Remove print("failing back on pretty_print") when using dolo-recs
## Code Before: save_plots = False try: import dolo.misc.printing as printing from numpy import ndarray from dolo.symbolic.model import Model from dolo.numeric.decision_rules import DynareDecisionRule ip = get_ipython() # there could be some kind of autodecovery there ip.display_formatter.formatters['text/html'].for_type( ndarray, printing.print_array ) ip.display_formatter.formatters['text/html'].for_type( Model, printing.print_model ) ip.display_formatter.formatters['text/html'].for_type( DynareDecisionRule, printing.print_dynare_decision_rule ) from IPython.core.display import display except: print("failing back on pretty_print") from pprint import pprint def display(txt): pprint(txt) ## Instruction: Remove print("failing back on pretty_print") when using dolo-recs ## Code After: save_plots = False try: import dolo.misc.printing as printing from numpy import ndarray from dolo.symbolic.model import Model from dolo.numeric.decision_rules import DynareDecisionRule ip = get_ipython() # there could be some kind of autodecovery there ip.display_formatter.formatters['text/html'].for_type( ndarray, printing.print_array ) ip.display_formatter.formatters['text/html'].for_type( Model, printing.print_model ) ip.display_formatter.formatters['text/html'].for_type( DynareDecisionRule, printing.print_dynare_decision_rule ) from IPython.core.display import display except: import re import sys if not re.search('dolo-recs',sys.argv[0]): print("failing back on pretty_print") from pprint import pprint def display(txt): pprint(txt)
save_plots = False try: import dolo.misc.printing as printing from numpy import ndarray from dolo.symbolic.model import Model from dolo.numeric.decision_rules import DynareDecisionRule ip = get_ipython() # there could be some kind of autodecovery there ip.display_formatter.formatters['text/html'].for_type( ndarray, printing.print_array ) ip.display_formatter.formatters['text/html'].for_type( Model, printing.print_model ) ip.display_formatter.formatters['text/html'].for_type( DynareDecisionRule, printing.print_dynare_decision_rule ) from IPython.core.display import display except: - + import re + import sys + if not re.search('dolo-recs',sys.argv[0]): - print("failing back on pretty_print") + print("failing back on pretty_print") ? ++++ from pprint import pprint def display(txt): pprint(txt)
2f5f2112c4c6b97b76fd268d1e79537dac696f0e
src/nyc_trees/apps/home/training/decorators.py
src/nyc_trees/apps/home/training/decorators.py
from __future__ import print_function from __future__ import unicode_literals from __future__ import division from functools import wraps from django.db import transaction from django.http import Http404 from django.contrib.flatpages.views import flatpage def render_flatpage(url): def fn(request, *args, **kwargs): return flatpage(request, url) return fn def mark_user(attr): def outer_decorator(view_fn): @wraps(view_fn) @transaction.atomic def wrapper(request, *args, **kwargs): user = request.user if user.is_authenticated() and not getattr(user, attr): setattr(user, attr, True) user.save() ctx = view_fn(request, *args, **kwargs) return ctx return wrapper return outer_decorator def require_visitability(step): def outer_decorator(view_fn): @wraps(view_fn) @transaction.atomic def wrapper(request, *args, **kwargs): if not step.is_visitable(request.user): raise Http404() else: return view_fn(request, *args, **kwargs) return wrapper return outer_decorator
from __future__ import print_function from __future__ import unicode_literals from __future__ import division from functools import wraps from django.db import transaction from django.http import Http404 from django.contrib.flatpages.views import flatpage def render_flatpage(url): def fn(request, *args, **kwargs): return flatpage(request, url) return fn def mark_user(attr, extra_block=None): def outer_decorator(view_fn): @wraps(view_fn) @transaction.atomic def wrapper(request, *args, **kwargs): user = request.user if user.is_authenticated() and not getattr(user, attr): setattr(user, attr, True) user.save() should_trigger_extra_block = True else: should_trigger_extra_block = False ctx = view_fn(request, *args, **kwargs) # in case the extra block does things that don't # work well with database transactions (like email) # postpone it to the end of the transaction block # to avoid cases in which an email is sent but the # transaction is rolled back due to a later exception if extra_block and should_trigger_extra_block: extra_block(user) return ctx return wrapper return outer_decorator def require_visitability(step): def outer_decorator(view_fn): @wraps(view_fn) @transaction.atomic def wrapper(request, *args, **kwargs): if not step.is_visitable(request.user): raise Http404() else: return view_fn(request, *args, **kwargs) return wrapper return outer_decorator
Add support for arbitrary user actions on marking
Add support for arbitrary user actions on marking
Python
agpl-3.0
azavea/nyc-trees,maurizi/nyc-trees,azavea/nyc-trees,azavea/nyc-trees,maurizi/nyc-trees,kdeloach/nyc-trees,kdeloach/nyc-trees,maurizi/nyc-trees,azavea/nyc-trees,maurizi/nyc-trees,kdeloach/nyc-trees,kdeloach/nyc-trees,kdeloach/nyc-trees,azavea/nyc-trees
from __future__ import print_function from __future__ import unicode_literals from __future__ import division from functools import wraps from django.db import transaction from django.http import Http404 from django.contrib.flatpages.views import flatpage def render_flatpage(url): def fn(request, *args, **kwargs): return flatpage(request, url) return fn - def mark_user(attr): + def mark_user(attr, extra_block=None): def outer_decorator(view_fn): @wraps(view_fn) @transaction.atomic def wrapper(request, *args, **kwargs): user = request.user if user.is_authenticated() and not getattr(user, attr): setattr(user, attr, True) user.save() + should_trigger_extra_block = True + else: + should_trigger_extra_block = False ctx = view_fn(request, *args, **kwargs) + + # in case the extra block does things that don't + # work well with database transactions (like email) + # postpone it to the end of the transaction block + # to avoid cases in which an email is sent but the + # transaction is rolled back due to a later exception + if extra_block and should_trigger_extra_block: + extra_block(user) + return ctx return wrapper return outer_decorator def require_visitability(step): def outer_decorator(view_fn): @wraps(view_fn) @transaction.atomic def wrapper(request, *args, **kwargs): if not step.is_visitable(request.user): raise Http404() else: return view_fn(request, *args, **kwargs) return wrapper return outer_decorator
Add support for arbitrary user actions on marking
## Code Before: from __future__ import print_function from __future__ import unicode_literals from __future__ import division from functools import wraps from django.db import transaction from django.http import Http404 from django.contrib.flatpages.views import flatpage def render_flatpage(url): def fn(request, *args, **kwargs): return flatpage(request, url) return fn def mark_user(attr): def outer_decorator(view_fn): @wraps(view_fn) @transaction.atomic def wrapper(request, *args, **kwargs): user = request.user if user.is_authenticated() and not getattr(user, attr): setattr(user, attr, True) user.save() ctx = view_fn(request, *args, **kwargs) return ctx return wrapper return outer_decorator def require_visitability(step): def outer_decorator(view_fn): @wraps(view_fn) @transaction.atomic def wrapper(request, *args, **kwargs): if not step.is_visitable(request.user): raise Http404() else: return view_fn(request, *args, **kwargs) return wrapper return outer_decorator ## Instruction: Add support for arbitrary user actions on marking ## Code After: from __future__ import print_function from __future__ import unicode_literals from __future__ import division from functools import wraps from django.db import transaction from django.http import Http404 from django.contrib.flatpages.views import flatpage def render_flatpage(url): def fn(request, *args, **kwargs): return flatpage(request, url) return fn def mark_user(attr, extra_block=None): def outer_decorator(view_fn): @wraps(view_fn) @transaction.atomic def wrapper(request, *args, **kwargs): user = request.user if user.is_authenticated() and not getattr(user, attr): setattr(user, attr, True) user.save() should_trigger_extra_block = True else: should_trigger_extra_block = False ctx = view_fn(request, *args, **kwargs) # in case the extra block does things that don't # work well with database transactions (like email) # postpone it to the end of the transaction block # to avoid cases in which an email is sent but the # transaction is rolled back due to a later exception if extra_block and should_trigger_extra_block: extra_block(user) return ctx return wrapper return outer_decorator def require_visitability(step): def outer_decorator(view_fn): @wraps(view_fn) @transaction.atomic def wrapper(request, *args, **kwargs): if not step.is_visitable(request.user): raise Http404() else: return view_fn(request, *args, **kwargs) return wrapper return outer_decorator
from __future__ import print_function from __future__ import unicode_literals from __future__ import division from functools import wraps from django.db import transaction from django.http import Http404 from django.contrib.flatpages.views import flatpage def render_flatpage(url): def fn(request, *args, **kwargs): return flatpage(request, url) return fn - def mark_user(attr): + def mark_user(attr, extra_block=None): def outer_decorator(view_fn): @wraps(view_fn) @transaction.atomic def wrapper(request, *args, **kwargs): user = request.user if user.is_authenticated() and not getattr(user, attr): setattr(user, attr, True) user.save() + should_trigger_extra_block = True + else: + should_trigger_extra_block = False ctx = view_fn(request, *args, **kwargs) + + # in case the extra block does things that don't + # work well with database transactions (like email) + # postpone it to the end of the transaction block + # to avoid cases in which an email is sent but the + # transaction is rolled back due to a later exception + if extra_block and should_trigger_extra_block: + extra_block(user) + return ctx return wrapper return outer_decorator def require_visitability(step): def outer_decorator(view_fn): @wraps(view_fn) @transaction.atomic def wrapper(request, *args, **kwargs): if not step.is_visitable(request.user): raise Http404() else: return view_fn(request, *args, **kwargs) return wrapper return outer_decorator
d06adea5117eb3ebfddd8592889346089c7391f7
dictlearn/wordnik_api_demo.py
dictlearn/wordnik_api_demo.py
from wordnik import swagger, WordApi, AccountApi client = swagger.ApiClient( 'dd3d32ae6b4709e1150040139c308fb77446e0a8ecc93db31', 'https://api.wordnik.com/v4') word_api = WordApi.WordApi(client) words = ['paint', 'mimic', 'mimics', 'francie', 'frolic', 'funhouse'] for word in words: print('=== {} ==='.format(word)) defs = word_api.getDefinitions(word) if not defs: print("no definitions") continue for def_ in defs: fmt_str = "{} --- {}" print(fmt_str.format(def_.sourceDictionary, def_.text.encode('utf-8'))) account_api = AccountApi.AccountApi(client) for i in range(5): print("Attempt {}".format(i)) status = account_api.getApiTokenStatus() print("Remaining_calls: {}".format(status.remainingCalls))
import nltk from wordnik import swagger, WordApi, AccountApi client = swagger.ApiClient( 'dd3d32ae6b4709e1150040139c308fb77446e0a8ecc93db31', 'https://api.wordnik.com/v4') word_api = WordApi.WordApi(client) toktok = nltk.ToktokTokenizer() words = ['paint', 'mimic', 'mimics', 'francie', 'frolic', 'funhouse'] for word in words: print('=== {} ==='.format(word)) defs = word_api.getDefinitions(word) if not defs: print("no definitions") continue for def_ in defs: fmt_str = "{} --- {}" tokenized_def = toktok.tokenize(def_.text.lower()) tokenized_def = [s.encode('utf-8') for s in tokenized_def] print(fmt_str.format(def_.sourceDictionary, tokenized_def)) account_api = AccountApi.AccountApi(client) for i in range(5): print("Attempt {}".format(i)) status = account_api.getApiTokenStatus() print("Remaining_calls: {}".format(status.remainingCalls))
Add tokenization to the WordNik demo
Add tokenization to the WordNik demo
Python
mit
tombosc/dict_based_learning,tombosc/dict_based_learning
+ import nltk from wordnik import swagger, WordApi, AccountApi client = swagger.ApiClient( 'dd3d32ae6b4709e1150040139c308fb77446e0a8ecc93db31', 'https://api.wordnik.com/v4') word_api = WordApi.WordApi(client) + toktok = nltk.ToktokTokenizer() words = ['paint', 'mimic', 'mimics', 'francie', 'frolic', 'funhouse'] for word in words: print('=== {} ==='.format(word)) defs = word_api.getDefinitions(word) if not defs: print("no definitions") continue for def_ in defs: fmt_str = "{} --- {}" + tokenized_def = toktok.tokenize(def_.text.lower()) + tokenized_def = [s.encode('utf-8') for s in tokenized_def] - print(fmt_str.format(def_.sourceDictionary, def_.text.encode('utf-8'))) + print(fmt_str.format(def_.sourceDictionary, + tokenized_def)) account_api = AccountApi.AccountApi(client) for i in range(5): print("Attempt {}".format(i)) status = account_api.getApiTokenStatus() print("Remaining_calls: {}".format(status.remainingCalls))
Add tokenization to the WordNik demo
## Code Before: from wordnik import swagger, WordApi, AccountApi client = swagger.ApiClient( 'dd3d32ae6b4709e1150040139c308fb77446e0a8ecc93db31', 'https://api.wordnik.com/v4') word_api = WordApi.WordApi(client) words = ['paint', 'mimic', 'mimics', 'francie', 'frolic', 'funhouse'] for word in words: print('=== {} ==='.format(word)) defs = word_api.getDefinitions(word) if not defs: print("no definitions") continue for def_ in defs: fmt_str = "{} --- {}" print(fmt_str.format(def_.sourceDictionary, def_.text.encode('utf-8'))) account_api = AccountApi.AccountApi(client) for i in range(5): print("Attempt {}".format(i)) status = account_api.getApiTokenStatus() print("Remaining_calls: {}".format(status.remainingCalls)) ## Instruction: Add tokenization to the WordNik demo ## Code After: import nltk from wordnik import swagger, WordApi, AccountApi client = swagger.ApiClient( 'dd3d32ae6b4709e1150040139c308fb77446e0a8ecc93db31', 'https://api.wordnik.com/v4') word_api = WordApi.WordApi(client) toktok = nltk.ToktokTokenizer() words = ['paint', 'mimic', 'mimics', 'francie', 'frolic', 'funhouse'] for word in words: print('=== {} ==='.format(word)) defs = word_api.getDefinitions(word) if not defs: print("no definitions") continue for def_ in defs: fmt_str = "{} --- {}" tokenized_def = toktok.tokenize(def_.text.lower()) tokenized_def = [s.encode('utf-8') for s in tokenized_def] print(fmt_str.format(def_.sourceDictionary, tokenized_def)) account_api = AccountApi.AccountApi(client) for i in range(5): print("Attempt {}".format(i)) status = account_api.getApiTokenStatus() print("Remaining_calls: {}".format(status.remainingCalls))
+ import nltk from wordnik import swagger, WordApi, AccountApi client = swagger.ApiClient( 'dd3d32ae6b4709e1150040139c308fb77446e0a8ecc93db31', 'https://api.wordnik.com/v4') word_api = WordApi.WordApi(client) + toktok = nltk.ToktokTokenizer() words = ['paint', 'mimic', 'mimics', 'francie', 'frolic', 'funhouse'] for word in words: print('=== {} ==='.format(word)) defs = word_api.getDefinitions(word) if not defs: print("no definitions") continue for def_ in defs: fmt_str = "{} --- {}" + tokenized_def = toktok.tokenize(def_.text.lower()) + tokenized_def = [s.encode('utf-8') for s in tokenized_def] - print(fmt_str.format(def_.sourceDictionary, def_.text.encode('utf-8'))) ? ---------------------------- + print(fmt_str.format(def_.sourceDictionary, + tokenized_def)) account_api = AccountApi.AccountApi(client) for i in range(5): print("Attempt {}".format(i)) status = account_api.getApiTokenStatus() print("Remaining_calls: {}".format(status.remainingCalls))
71cffcb8a8ec7e36dc389a5aa6dc2cc9769a9e97
distutils/tests/test_ccompiler.py
distutils/tests/test_ccompiler.py
import os import sys import platform import textwrap import sysconfig import pytest from distutils import ccompiler def _make_strs(paths): """ Convert paths to strings for legacy compatibility. """ if sys.version_info > (3, 8) and platform.system() != "Windows": return paths return list(map(os.fspath, paths)) @pytest.fixture def c_file(tmp_path): c_file = tmp_path / 'foo.c' gen_headers = ('Python.h',) is_windows = platform.system() == "Windows" plat_headers = ('windows.h',) * is_windows all_headers = gen_headers + plat_headers headers = '\n'.join(f'#include <{header}>\n' for header in all_headers) payload = ( textwrap.dedent( """ #headers void PyInit_foo(void) {} """ ) .lstrip() .replace('#headers', headers) ) c_file.write_text(payload) return c_file def test_set_include_dirs(c_file): """ Extensions should build even if set_include_dirs is invoked. In particular, compiler-specific paths should not be overridden. """ compiler = ccompiler.new_compiler() python = sysconfig.get_paths()['include'] compiler.set_include_dirs([python]) compiler.compile(_make_strs([c_file]))
import os import sys import platform import textwrap import sysconfig import pytest from distutils import ccompiler def _make_strs(paths): """ Convert paths to strings for legacy compatibility. """ if sys.version_info > (3, 8) and platform.system() != "Windows": return paths return list(map(os.fspath, paths)) @pytest.fixture def c_file(tmp_path): c_file = tmp_path / 'foo.c' gen_headers = ('Python.h',) is_windows = platform.system() == "Windows" plat_headers = ('windows.h',) * is_windows all_headers = gen_headers + plat_headers headers = '\n'.join(f'#include <{header}>\n' for header in all_headers) payload = ( textwrap.dedent( """ #headers void PyInit_foo(void) {} """ ) .lstrip() .replace('#headers', headers) ) c_file.write_text(payload) return c_file def test_set_include_dirs(c_file): """ Extensions should build even if set_include_dirs is invoked. In particular, compiler-specific paths should not be overridden. """ compiler = ccompiler.new_compiler() python = sysconfig.get_paths()['include'] compiler.set_include_dirs([python]) compiler.compile(_make_strs([c_file])) # do it again, setting include dirs after any initialization compiler.set_include_dirs([python]) compiler.compile(_make_strs([c_file]))
Extend the test to compile a second time after setting include dirs again.
Extend the test to compile a second time after setting include dirs again.
Python
mit
pypa/setuptools,pypa/setuptools,pypa/setuptools
import os import sys import platform import textwrap import sysconfig import pytest from distutils import ccompiler def _make_strs(paths): """ Convert paths to strings for legacy compatibility. """ if sys.version_info > (3, 8) and platform.system() != "Windows": return paths return list(map(os.fspath, paths)) @pytest.fixture def c_file(tmp_path): c_file = tmp_path / 'foo.c' gen_headers = ('Python.h',) is_windows = platform.system() == "Windows" plat_headers = ('windows.h',) * is_windows all_headers = gen_headers + plat_headers headers = '\n'.join(f'#include <{header}>\n' for header in all_headers) payload = ( textwrap.dedent( """ #headers void PyInit_foo(void) {} """ ) .lstrip() .replace('#headers', headers) ) c_file.write_text(payload) return c_file def test_set_include_dirs(c_file): """ Extensions should build even if set_include_dirs is invoked. In particular, compiler-specific paths should not be overridden. """ compiler = ccompiler.new_compiler() python = sysconfig.get_paths()['include'] compiler.set_include_dirs([python]) compiler.compile(_make_strs([c_file])) + # do it again, setting include dirs after any initialization + compiler.set_include_dirs([python]) + compiler.compile(_make_strs([c_file])) +
Extend the test to compile a second time after setting include dirs again.
## Code Before: import os import sys import platform import textwrap import sysconfig import pytest from distutils import ccompiler def _make_strs(paths): """ Convert paths to strings for legacy compatibility. """ if sys.version_info > (3, 8) and platform.system() != "Windows": return paths return list(map(os.fspath, paths)) @pytest.fixture def c_file(tmp_path): c_file = tmp_path / 'foo.c' gen_headers = ('Python.h',) is_windows = platform.system() == "Windows" plat_headers = ('windows.h',) * is_windows all_headers = gen_headers + plat_headers headers = '\n'.join(f'#include <{header}>\n' for header in all_headers) payload = ( textwrap.dedent( """ #headers void PyInit_foo(void) {} """ ) .lstrip() .replace('#headers', headers) ) c_file.write_text(payload) return c_file def test_set_include_dirs(c_file): """ Extensions should build even if set_include_dirs is invoked. In particular, compiler-specific paths should not be overridden. """ compiler = ccompiler.new_compiler() python = sysconfig.get_paths()['include'] compiler.set_include_dirs([python]) compiler.compile(_make_strs([c_file])) ## Instruction: Extend the test to compile a second time after setting include dirs again. ## Code After: import os import sys import platform import textwrap import sysconfig import pytest from distutils import ccompiler def _make_strs(paths): """ Convert paths to strings for legacy compatibility. """ if sys.version_info > (3, 8) and platform.system() != "Windows": return paths return list(map(os.fspath, paths)) @pytest.fixture def c_file(tmp_path): c_file = tmp_path / 'foo.c' gen_headers = ('Python.h',) is_windows = platform.system() == "Windows" plat_headers = ('windows.h',) * is_windows all_headers = gen_headers + plat_headers headers = '\n'.join(f'#include <{header}>\n' for header in all_headers) payload = ( textwrap.dedent( """ #headers void PyInit_foo(void) {} """ ) .lstrip() .replace('#headers', headers) ) c_file.write_text(payload) return c_file def test_set_include_dirs(c_file): """ Extensions should build even if set_include_dirs is invoked. In particular, compiler-specific paths should not be overridden. """ compiler = ccompiler.new_compiler() python = sysconfig.get_paths()['include'] compiler.set_include_dirs([python]) compiler.compile(_make_strs([c_file])) # do it again, setting include dirs after any initialization compiler.set_include_dirs([python]) compiler.compile(_make_strs([c_file]))
import os import sys import platform import textwrap import sysconfig import pytest from distutils import ccompiler def _make_strs(paths): """ Convert paths to strings for legacy compatibility. """ if sys.version_info > (3, 8) and platform.system() != "Windows": return paths return list(map(os.fspath, paths)) @pytest.fixture def c_file(tmp_path): c_file = tmp_path / 'foo.c' gen_headers = ('Python.h',) is_windows = platform.system() == "Windows" plat_headers = ('windows.h',) * is_windows all_headers = gen_headers + plat_headers headers = '\n'.join(f'#include <{header}>\n' for header in all_headers) payload = ( textwrap.dedent( """ #headers void PyInit_foo(void) {} """ ) .lstrip() .replace('#headers', headers) ) c_file.write_text(payload) return c_file def test_set_include_dirs(c_file): """ Extensions should build even if set_include_dirs is invoked. In particular, compiler-specific paths should not be overridden. """ compiler = ccompiler.new_compiler() python = sysconfig.get_paths()['include'] compiler.set_include_dirs([python]) compiler.compile(_make_strs([c_file])) + + # do it again, setting include dirs after any initialization + compiler.set_include_dirs([python]) + compiler.compile(_make_strs([c_file]))
5ea25bc6c72e5c934e56a90c44f8019ad176bb27
comet/utility/test/test_spawn.py
comet/utility/test/test_spawn.py
import sys from twisted.trial import unittest from twisted.python import failure from ..spawn import SpawnCommand class DummyEvent(object): text = "" class SpawnCommandProtocolTestCase(unittest.TestCase): def test_good_process(self): spawn = SpawnCommand(sys.executable) d = spawn(DummyEvent()) d.addCallback(self.assertEqual, True) return d def test_bad_process(self): spawn = SpawnCommand("/not/a/real/executable") d = spawn(DummyEvent()) d.addErrback(self.assertIsInstance, failure.Failure) return d
import sys from twisted.trial import unittest from twisted.python import failure from twisted.python import util from ..spawn import SpawnCommand class DummyEvent(object): def __init__(self, text=None): self.text = text class SpawnCommandProtocolTestCase(unittest.TestCase): def test_good_process(self): spawn = SpawnCommand(sys.executable) d = spawn(DummyEvent()) d.addCallback(self.assertEqual, True) return d def test_bad_process(self): spawn = SpawnCommand("/not/a/real/executable") d = spawn(DummyEvent()) d.addErrback(self.assertIsInstance, failure.Failure) return d def test_write_data(self): TEXT = "Test spawn process" def read_data(result): f = open("spawnfile.txt") try: self.assertEqual(f.read(), TEXT) finally: f.close() spawn = SpawnCommand(util.sibpath(__file__, "test_spawn.sh")) d = spawn(DummyEvent(TEXT)) d.addCallback(read_data) return d
Test that spawned process actually writes data
Test that spawned process actually writes data
Python
bsd-2-clause
jdswinbank/Comet,jdswinbank/Comet
import sys from twisted.trial import unittest from twisted.python import failure + from twisted.python import util from ..spawn import SpawnCommand class DummyEvent(object): - text = "" + def __init__(self, text=None): + self.text = text class SpawnCommandProtocolTestCase(unittest.TestCase): def test_good_process(self): spawn = SpawnCommand(sys.executable) d = spawn(DummyEvent()) d.addCallback(self.assertEqual, True) return d def test_bad_process(self): spawn = SpawnCommand("/not/a/real/executable") d = spawn(DummyEvent()) d.addErrback(self.assertIsInstance, failure.Failure) return d + def test_write_data(self): + TEXT = "Test spawn process" + def read_data(result): + f = open("spawnfile.txt") + try: + self.assertEqual(f.read(), TEXT) + finally: + f.close() + spawn = SpawnCommand(util.sibpath(__file__, "test_spawn.sh")) + d = spawn(DummyEvent(TEXT)) + d.addCallback(read_data) + return d +
Test that spawned process actually writes data
## Code Before: import sys from twisted.trial import unittest from twisted.python import failure from ..spawn import SpawnCommand class DummyEvent(object): text = "" class SpawnCommandProtocolTestCase(unittest.TestCase): def test_good_process(self): spawn = SpawnCommand(sys.executable) d = spawn(DummyEvent()) d.addCallback(self.assertEqual, True) return d def test_bad_process(self): spawn = SpawnCommand("/not/a/real/executable") d = spawn(DummyEvent()) d.addErrback(self.assertIsInstance, failure.Failure) return d ## Instruction: Test that spawned process actually writes data ## Code After: import sys from twisted.trial import unittest from twisted.python import failure from twisted.python import util from ..spawn import SpawnCommand class DummyEvent(object): def __init__(self, text=None): self.text = text class SpawnCommandProtocolTestCase(unittest.TestCase): def test_good_process(self): spawn = SpawnCommand(sys.executable) d = spawn(DummyEvent()) d.addCallback(self.assertEqual, True) return d def test_bad_process(self): spawn = SpawnCommand("/not/a/real/executable") d = spawn(DummyEvent()) d.addErrback(self.assertIsInstance, failure.Failure) return d def test_write_data(self): TEXT = "Test spawn process" def read_data(result): f = open("spawnfile.txt") try: self.assertEqual(f.read(), TEXT) finally: f.close() spawn = SpawnCommand(util.sibpath(__file__, "test_spawn.sh")) d = spawn(DummyEvent(TEXT)) d.addCallback(read_data) return d
import sys from twisted.trial import unittest from twisted.python import failure + from twisted.python import util from ..spawn import SpawnCommand class DummyEvent(object): - text = "" + def __init__(self, text=None): + self.text = text class SpawnCommandProtocolTestCase(unittest.TestCase): def test_good_process(self): spawn = SpawnCommand(sys.executable) d = spawn(DummyEvent()) d.addCallback(self.assertEqual, True) return d def test_bad_process(self): spawn = SpawnCommand("/not/a/real/executable") d = spawn(DummyEvent()) d.addErrback(self.assertIsInstance, failure.Failure) return d + + def test_write_data(self): + TEXT = "Test spawn process" + def read_data(result): + f = open("spawnfile.txt") + try: + self.assertEqual(f.read(), TEXT) + finally: + f.close() + spawn = SpawnCommand(util.sibpath(__file__, "test_spawn.sh")) + d = spawn(DummyEvent(TEXT)) + d.addCallback(read_data) + return d
49c73b00b5528706fbb340e53b37e59c8303d70d
oneflow/settings/snippets/common_production.py
oneflow/settings/snippets/common_production.py
ALLOWED_HOSTS += [ '1flow.io', 'app.1flow.io', 'api.1flow.io', ]
MANAGERS += (('Matthieu Chaignot', '[email protected]'), ) ALLOWED_HOSTS += [ '1flow.io', 'app.1flow.io', 'api.1flow.io', ]
Add Matthieu to MANAGERS, for him to receive the warn-closed-feed mail.
Add Matthieu to MANAGERS, for him to receive the warn-closed-feed mail.
Python
agpl-3.0
1flow/1flow,1flow/1flow,1flow/1flow,WillianPaiva/1flow,1flow/1flow,WillianPaiva/1flow,WillianPaiva/1flow,1flow/1flow,WillianPaiva/1flow,WillianPaiva/1flow
+ + MANAGERS += (('Matthieu Chaignot', '[email protected]'), ) ALLOWED_HOSTS += [ '1flow.io', 'app.1flow.io', 'api.1flow.io', ]
Add Matthieu to MANAGERS, for him to receive the warn-closed-feed mail.
## Code Before: ALLOWED_HOSTS += [ '1flow.io', 'app.1flow.io', 'api.1flow.io', ] ## Instruction: Add Matthieu to MANAGERS, for him to receive the warn-closed-feed mail. ## Code After: MANAGERS += (('Matthieu Chaignot', '[email protected]'), ) ALLOWED_HOSTS += [ '1flow.io', 'app.1flow.io', 'api.1flow.io', ]
+ + MANAGERS += (('Matthieu Chaignot', '[email protected]'), ) ALLOWED_HOSTS += [ '1flow.io', 'app.1flow.io', 'api.1flow.io', ]
eda0dc8bdc89e815ff21be91ade9d53f0c13721a
mockito/tests/numpy_test.py
mockito/tests/numpy_test.py
import mockito from mockito import when, patch import numpy as np from . import module def xcompare(a, b): if isinstance(a, mockito.matchers.Matcher): return a.matches(b) return np.array_equal(a, b) class TestEnsureNumpyWorks: def testEnsureNumpyArrayAllowedWhenStubbing(self): array = np.array([1, 2, 3]) with patch(mockito.invocation.MatchingInvocation.compare, xcompare): when(module).one_arg(array).thenReturn('yep') assert module.one_arg(array) == 'yep' def testEnsureNumpyArrayAllowedWhenCalling(self): array = np.array([1, 2, 3]) when(module).one_arg(Ellipsis).thenReturn('yep') assert module.one_arg(array) == 'yep'
import mockito from mockito import when, patch import pytest import numpy as np from . import module pytestmark = pytest.mark.usefixtures("unstub") def xcompare(a, b): if isinstance(a, mockito.matchers.Matcher): return a.matches(b) return np.array_equal(a, b) class TestEnsureNumpyWorks: def testEnsureNumpyArrayAllowedWhenStubbing(self): array = np.array([1, 2, 3]) when(module).one_arg(array).thenReturn('yep') with patch(mockito.invocation.MatchingInvocation.compare, xcompare): assert module.one_arg(array) == 'yep' def testEnsureNumpyArrayAllowedWhenCalling(self): array = np.array([1, 2, 3]) when(module).one_arg(Ellipsis).thenReturn('yep') assert module.one_arg(array) == 'yep'
Make numpy test clearer and ensure unstub
Make numpy test clearer and ensure unstub
Python
mit
kaste/mockito-python
import mockito from mockito import when, patch + import pytest import numpy as np from . import module + + + pytestmark = pytest.mark.usefixtures("unstub") def xcompare(a, b): if isinstance(a, mockito.matchers.Matcher): return a.matches(b) return np.array_equal(a, b) class TestEnsureNumpyWorks: def testEnsureNumpyArrayAllowedWhenStubbing(self): array = np.array([1, 2, 3]) + when(module).one_arg(array).thenReturn('yep') + with patch(mockito.invocation.MatchingInvocation.compare, xcompare): - when(module).one_arg(array).thenReturn('yep') assert module.one_arg(array) == 'yep' def testEnsureNumpyArrayAllowedWhenCalling(self): array = np.array([1, 2, 3]) when(module).one_arg(Ellipsis).thenReturn('yep') assert module.one_arg(array) == 'yep'
Make numpy test clearer and ensure unstub
## Code Before: import mockito from mockito import when, patch import numpy as np from . import module def xcompare(a, b): if isinstance(a, mockito.matchers.Matcher): return a.matches(b) return np.array_equal(a, b) class TestEnsureNumpyWorks: def testEnsureNumpyArrayAllowedWhenStubbing(self): array = np.array([1, 2, 3]) with patch(mockito.invocation.MatchingInvocation.compare, xcompare): when(module).one_arg(array).thenReturn('yep') assert module.one_arg(array) == 'yep' def testEnsureNumpyArrayAllowedWhenCalling(self): array = np.array([1, 2, 3]) when(module).one_arg(Ellipsis).thenReturn('yep') assert module.one_arg(array) == 'yep' ## Instruction: Make numpy test clearer and ensure unstub ## Code After: import mockito from mockito import when, patch import pytest import numpy as np from . import module pytestmark = pytest.mark.usefixtures("unstub") def xcompare(a, b): if isinstance(a, mockito.matchers.Matcher): return a.matches(b) return np.array_equal(a, b) class TestEnsureNumpyWorks: def testEnsureNumpyArrayAllowedWhenStubbing(self): array = np.array([1, 2, 3]) when(module).one_arg(array).thenReturn('yep') with patch(mockito.invocation.MatchingInvocation.compare, xcompare): assert module.one_arg(array) == 'yep' def testEnsureNumpyArrayAllowedWhenCalling(self): array = np.array([1, 2, 3]) when(module).one_arg(Ellipsis).thenReturn('yep') assert module.one_arg(array) == 'yep'
import mockito from mockito import when, patch + import pytest import numpy as np from . import module + + + pytestmark = pytest.mark.usefixtures("unstub") def xcompare(a, b): if isinstance(a, mockito.matchers.Matcher): return a.matches(b) return np.array_equal(a, b) class TestEnsureNumpyWorks: def testEnsureNumpyArrayAllowedWhenStubbing(self): array = np.array([1, 2, 3]) + when(module).one_arg(array).thenReturn('yep') + with patch(mockito.invocation.MatchingInvocation.compare, xcompare): - when(module).one_arg(array).thenReturn('yep') assert module.one_arg(array) == 'yep' def testEnsureNumpyArrayAllowedWhenCalling(self): array = np.array([1, 2, 3]) when(module).one_arg(Ellipsis).thenReturn('yep') assert module.one_arg(array) == 'yep'
8a71fe98d50f7603742c60273502fb840e967c97
scalpel/event.py
scalpel/event.py
from weakref import WeakValueDictionary class Signal(object): def __init__(self): self.__slots = WeakValueDictionary() def __call__(self, *args, **kargs): for key in self.__slots: func, _ = key func(self.__slots[key], *args, **kargs) def connect(self, slot): key = (slot.im_func, id(slot.im_self)) self.__slots[key] = slot.im_self def disconnect(self, slot): key = (slot.im_func, id(slot.im_self)) if key in self.__slots: self.__slots.pop(key) def clear(self): self.__slots.clear()
from weakref import WeakValueDictionary import inspect class Signal(object): def __init__(self): self.__slots = WeakValueDictionary() # For keeping references to _FuncHost objects. self.__funchosts = {} def __call__(self, *args, **kargs): for key in self.__slots: func, _ = key func(self.__slots[key], *args, **kargs) def connect(self, slot): if inspect.ismethod(slot): key = (slot.im_func, id(slot.im_self)) self.__slots[key] = slot.im_self else: host = _FuncHost(slot) self.connect(host.meth) # We stick a copy in here just to keep the instance alive. self.__funchosts[slot] = host def disconnect(self, slot): if inspect.ismethod(slot): key = (slot.im_func, id(slot.im_self)) if key in self.__slots: self.__slots.pop(key) else: if slot in self.__funchosts: self.disconnect(self.__funchosts[slot].meth) self.__funchosts.pop(slot) def clear(self): self.__slots.clear() class _FuncHost(object): """Turn a function into a method.""" def __init__(self, func): self.func = func def meth(self, *args, **kwargs): self.func(*args, **kwargs) if __name__ == '__main__': a = 0 def test_func(): def foo(): global a a = a + 1 s = Signal() s() s.connect(foo) s() s.disconnect(foo) s() assert a == 1 test_func()
Add support for connecting functions to Signal objects.
Add support for connecting functions to Signal objects. Less elegant than the original recipe, but more functional.
Python
bsd-3-clause
stackp/Gum,stackp/Gum,stackp/Gum
from weakref import WeakValueDictionary + import inspect class Signal(object): + def __init__(self): self.__slots = WeakValueDictionary() + + # For keeping references to _FuncHost objects. + self.__funchosts = {} def __call__(self, *args, **kargs): for key in self.__slots: func, _ = key func(self.__slots[key], *args, **kargs) def connect(self, slot): + if inspect.ismethod(slot): - key = (slot.im_func, id(slot.im_self)) + key = (slot.im_func, id(slot.im_self)) - self.__slots[key] = slot.im_self + self.__slots[key] = slot.im_self + else: + host = _FuncHost(slot) + self.connect(host.meth) + # We stick a copy in here just to keep the instance alive. + self.__funchosts[slot] = host def disconnect(self, slot): + if inspect.ismethod(slot): - key = (slot.im_func, id(slot.im_self)) + key = (slot.im_func, id(slot.im_self)) - if key in self.__slots: + if key in self.__slots: - self.__slots.pop(key) + self.__slots.pop(key) + else: + if slot in self.__funchosts: + self.disconnect(self.__funchosts[slot].meth) + self.__funchosts.pop(slot) def clear(self): self.__slots.clear() + + + class _FuncHost(object): + """Turn a function into a method.""" + def __init__(self, func): + self.func = func + + def meth(self, *args, **kwargs): + self.func(*args, **kwargs) + + + if __name__ == '__main__': + + a = 0 + def test_func(): + def foo(): + global a + a = a + 1 + s = Signal() + s() + s.connect(foo) + s() + s.disconnect(foo) + s() + assert a == 1 + + test_func()
Add support for connecting functions to Signal objects.
## Code Before: from weakref import WeakValueDictionary class Signal(object): def __init__(self): self.__slots = WeakValueDictionary() def __call__(self, *args, **kargs): for key in self.__slots: func, _ = key func(self.__slots[key], *args, **kargs) def connect(self, slot): key = (slot.im_func, id(slot.im_self)) self.__slots[key] = slot.im_self def disconnect(self, slot): key = (slot.im_func, id(slot.im_self)) if key in self.__slots: self.__slots.pop(key) def clear(self): self.__slots.clear() ## Instruction: Add support for connecting functions to Signal objects. ## Code After: from weakref import WeakValueDictionary import inspect class Signal(object): def __init__(self): self.__slots = WeakValueDictionary() # For keeping references to _FuncHost objects. self.__funchosts = {} def __call__(self, *args, **kargs): for key in self.__slots: func, _ = key func(self.__slots[key], *args, **kargs) def connect(self, slot): if inspect.ismethod(slot): key = (slot.im_func, id(slot.im_self)) self.__slots[key] = slot.im_self else: host = _FuncHost(slot) self.connect(host.meth) # We stick a copy in here just to keep the instance alive. self.__funchosts[slot] = host def disconnect(self, slot): if inspect.ismethod(slot): key = (slot.im_func, id(slot.im_self)) if key in self.__slots: self.__slots.pop(key) else: if slot in self.__funchosts: self.disconnect(self.__funchosts[slot].meth) self.__funchosts.pop(slot) def clear(self): self.__slots.clear() class _FuncHost(object): """Turn a function into a method.""" def __init__(self, func): self.func = func def meth(self, *args, **kwargs): self.func(*args, **kwargs) if __name__ == '__main__': a = 0 def test_func(): def foo(): global a a = a + 1 s = Signal() s() s.connect(foo) s() s.disconnect(foo) s() assert a == 1 test_func()
from weakref import WeakValueDictionary + import inspect class Signal(object): + def __init__(self): self.__slots = WeakValueDictionary() + + # For keeping references to _FuncHost objects. + self.__funchosts = {} def __call__(self, *args, **kargs): for key in self.__slots: func, _ = key func(self.__slots[key], *args, **kargs) def connect(self, slot): + if inspect.ismethod(slot): - key = (slot.im_func, id(slot.im_self)) + key = (slot.im_func, id(slot.im_self)) ? ++++ - self.__slots[key] = slot.im_self + self.__slots[key] = slot.im_self ? ++++ + else: + host = _FuncHost(slot) + self.connect(host.meth) + # We stick a copy in here just to keep the instance alive. + self.__funchosts[slot] = host def disconnect(self, slot): + if inspect.ismethod(slot): - key = (slot.im_func, id(slot.im_self)) + key = (slot.im_func, id(slot.im_self)) ? ++++ - if key in self.__slots: + if key in self.__slots: ? ++++ - self.__slots.pop(key) + self.__slots.pop(key) ? ++++ + else: + if slot in self.__funchosts: + self.disconnect(self.__funchosts[slot].meth) + self.__funchosts.pop(slot) def clear(self): self.__slots.clear() + + + class _FuncHost(object): + """Turn a function into a method.""" + def __init__(self, func): + self.func = func + + def meth(self, *args, **kwargs): + self.func(*args, **kwargs) + + + if __name__ == '__main__': + + a = 0 + def test_func(): + def foo(): + global a + a = a + 1 + s = Signal() + s() + s.connect(foo) + s() + s.disconnect(foo) + s() + assert a == 1 + + test_func()
8b3d73ce9bbdcf39e7babd5637fcff9d1ad1dbf9
smartcard/Synchronization.py
smartcard/Synchronization.py
from threading import RLock def synchronized(method): def f(*args): self = args[0] self.mutex.acquire() # print method.__name__, 'acquired' try: return apply(method, args) finally: self.mutex.release() # print method.__name__, 'released' return f def synchronize(klass, names=None): """Synchronize methods in the given class. Only synchronize the methods whose names are given, or all methods if names=None.""" if type(names) == type(''): names = names.split() for (name, val) in klass.__dict__.items(): if callable(val) and name != '__init__' and \ (names == None or name in names): # print "synchronizing", name klass.__dict__[name] = synchronized(val) class Synchronization: # You can create your own self.mutex, or inherit from this class: def __init__(self): self.mutex = RLock()
from threading import RLock def synchronized(method): def f(*args): self = args[0] self.mutex.acquire() # print method.__name__, 'acquired' try: return apply(method, args) finally: self.mutex.release() # print method.__name__, 'released' return f def synchronize(klass, names=None): """Synchronize methods in the given class. Only synchronize the methods whose names are given, or all methods if names=None.""" if type(names) == type(''): names = names.split() for (name, val) in klass.__dict__.items(): if callable(val) and name != '__init__' and \ (names == None or name in names): # print "synchronizing", name setattr(klass, name, synchronized(val)) class Synchronization(object): # You can create your own self.mutex, or inherit from this class: def __init__(self): self.mutex = RLock()
Use setattr() instead of a direct access to __dict__
Use setattr() instead of a direct access to __dict__ Closes Feature Request 3110077 "new style classes" https://sourceforge.net/tracker/?func=detail&aid=3110077&group_id=196342&atid=957075
Python
lgpl-2.1
moreati/pyscard,moreati/pyscard,LudovicRousseau/pyscard,moreati/pyscard,LudovicRousseau/pyscard,LudovicRousseau/pyscard
from threading import RLock def synchronized(method): def f(*args): self = args[0] self.mutex.acquire() # print method.__name__, 'acquired' try: return apply(method, args) finally: self.mutex.release() # print method.__name__, 'released' return f def synchronize(klass, names=None): """Synchronize methods in the given class. Only synchronize the methods whose names are given, or all methods if names=None.""" if type(names) == type(''): names = names.split() for (name, val) in klass.__dict__.items(): if callable(val) and name != '__init__' and \ (names == None or name in names): # print "synchronizing", name - klass.__dict__[name] = synchronized(val) + setattr(klass, name, synchronized(val)) - class Synchronization: + class Synchronization(object): # You can create your own self.mutex, or inherit from this class: def __init__(self): self.mutex = RLock()
Use setattr() instead of a direct access to __dict__
## Code Before: from threading import RLock def synchronized(method): def f(*args): self = args[0] self.mutex.acquire() # print method.__name__, 'acquired' try: return apply(method, args) finally: self.mutex.release() # print method.__name__, 'released' return f def synchronize(klass, names=None): """Synchronize methods in the given class. Only synchronize the methods whose names are given, or all methods if names=None.""" if type(names) == type(''): names = names.split() for (name, val) in klass.__dict__.items(): if callable(val) and name != '__init__' and \ (names == None or name in names): # print "synchronizing", name klass.__dict__[name] = synchronized(val) class Synchronization: # You can create your own self.mutex, or inherit from this class: def __init__(self): self.mutex = RLock() ## Instruction: Use setattr() instead of a direct access to __dict__ ## Code After: from threading import RLock def synchronized(method): def f(*args): self = args[0] self.mutex.acquire() # print method.__name__, 'acquired' try: return apply(method, args) finally: self.mutex.release() # print method.__name__, 'released' return f def synchronize(klass, names=None): """Synchronize methods in the given class. Only synchronize the methods whose names are given, or all methods if names=None.""" if type(names) == type(''): names = names.split() for (name, val) in klass.__dict__.items(): if callable(val) and name != '__init__' and \ (names == None or name in names): # print "synchronizing", name setattr(klass, name, synchronized(val)) class Synchronization(object): # You can create your own self.mutex, or inherit from this class: def __init__(self): self.mutex = RLock()
from threading import RLock def synchronized(method): def f(*args): self = args[0] self.mutex.acquire() # print method.__name__, 'acquired' try: return apply(method, args) finally: self.mutex.release() # print method.__name__, 'released' return f def synchronize(klass, names=None): """Synchronize methods in the given class. Only synchronize the methods whose names are given, or all methods if names=None.""" if type(names) == type(''): names = names.split() for (name, val) in klass.__dict__.items(): if callable(val) and name != '__init__' and \ (names == None or name in names): # print "synchronizing", name - klass.__dict__[name] = synchronized(val) ? ^^^^^^^^^^ ^^^ + setattr(klass, name, synchronized(val)) ? ++++++++ ^^ ^ + - class Synchronization: + class Synchronization(object): ? ++++++++ # You can create your own self.mutex, or inherit from this class: def __init__(self): self.mutex = RLock()
b2f1f97000c8d3479e1df6778f0cc85ec0680571
garden-watering01/mybuddy.py
garden-watering01/mybuddy.py
import machine def setntptime(maxretries=10): # ntptime is a helper module which gets packaged into the firmware # Check https://raw.githubusercontent.com/micropython/micropython/master/esp8266/scripts/ntptime.py import ntptime for i in range (maxretries): try: ntptime.settime() break except: if i+1 == maxretries: raise def deepsleep(sleeptime=15*60*1000): # configure RTC.ALARM0 to be able to wake the device rtc = machine.RTC() rtc.irq(trigger=rtc.ALARM0, wake=machine.DEEPSLEEP) # set RTC.ALARM0 to fire after some time. Time is given in milliseconds here rtc.alarm(rtc.ALARM0, sleeptime) #Make sure you have GPIO16 connected RST to wake from deepSleep. # put the device to sleep print ("Going into Sleep now") machine.deepsleep()
import machine def have_internet(): import urequests try: resp = urequests.request("HEAD", "http://jsonip.com/") return True except: return False def setntptime(maxretries=10): # ntptime is a helper module which gets packaged into the firmware # Check https://raw.githubusercontent.com/micropython/micropython/master/esp8266/scripts/ntptime.py import ntptime for i in range (maxretries): try: ntptime.settime() break except: if i+1 == maxretries: raise def deepsleep(sleeptime=15*60*1000): # configure RTC.ALARM0 to be able to wake the device rtc = machine.RTC() rtc.irq(trigger=rtc.ALARM0, wake=machine.DEEPSLEEP) # set RTC.ALARM0 to fire after some time. Time is given in milliseconds here rtc.alarm(rtc.ALARM0, sleeptime) #Make sure you have GPIO16 connected RST to wake from deepSleep. # put the device to sleep print ("Going into Sleep now") machine.deepsleep()
Add a function to check status of internet connectivity
Add a function to check status of internet connectivity
Python
mit
fuzzyhandle/esp8266hangout,fuzzyhandle/esp8266hangout,fuzzyhandle/esp8266hangout
import machine + + def have_internet(): + import urequests + try: + resp = urequests.request("HEAD", "http://jsonip.com/") + return True + except: + return False def setntptime(maxretries=10): # ntptime is a helper module which gets packaged into the firmware # Check https://raw.githubusercontent.com/micropython/micropython/master/esp8266/scripts/ntptime.py import ntptime for i in range (maxretries): try: ntptime.settime() break except: if i+1 == maxretries: raise def deepsleep(sleeptime=15*60*1000): # configure RTC.ALARM0 to be able to wake the device rtc = machine.RTC() rtc.irq(trigger=rtc.ALARM0, wake=machine.DEEPSLEEP) # set RTC.ALARM0 to fire after some time. Time is given in milliseconds here rtc.alarm(rtc.ALARM0, sleeptime) #Make sure you have GPIO16 connected RST to wake from deepSleep. # put the device to sleep print ("Going into Sleep now") machine.deepsleep()
Add a function to check status of internet connectivity
## Code Before: import machine def setntptime(maxretries=10): # ntptime is a helper module which gets packaged into the firmware # Check https://raw.githubusercontent.com/micropython/micropython/master/esp8266/scripts/ntptime.py import ntptime for i in range (maxretries): try: ntptime.settime() break except: if i+1 == maxretries: raise def deepsleep(sleeptime=15*60*1000): # configure RTC.ALARM0 to be able to wake the device rtc = machine.RTC() rtc.irq(trigger=rtc.ALARM0, wake=machine.DEEPSLEEP) # set RTC.ALARM0 to fire after some time. Time is given in milliseconds here rtc.alarm(rtc.ALARM0, sleeptime) #Make sure you have GPIO16 connected RST to wake from deepSleep. # put the device to sleep print ("Going into Sleep now") machine.deepsleep() ## Instruction: Add a function to check status of internet connectivity ## Code After: import machine def have_internet(): import urequests try: resp = urequests.request("HEAD", "http://jsonip.com/") return True except: return False def setntptime(maxretries=10): # ntptime is a helper module which gets packaged into the firmware # Check https://raw.githubusercontent.com/micropython/micropython/master/esp8266/scripts/ntptime.py import ntptime for i in range (maxretries): try: ntptime.settime() break except: if i+1 == maxretries: raise def deepsleep(sleeptime=15*60*1000): # configure RTC.ALARM0 to be able to wake the device rtc = machine.RTC() rtc.irq(trigger=rtc.ALARM0, wake=machine.DEEPSLEEP) # set RTC.ALARM0 to fire after some time. Time is given in milliseconds here rtc.alarm(rtc.ALARM0, sleeptime) #Make sure you have GPIO16 connected RST to wake from deepSleep. # put the device to sleep print ("Going into Sleep now") machine.deepsleep()
import machine + + def have_internet(): + import urequests + try: + resp = urequests.request("HEAD", "http://jsonip.com/") + return True + except: + return False def setntptime(maxretries=10): # ntptime is a helper module which gets packaged into the firmware # Check https://raw.githubusercontent.com/micropython/micropython/master/esp8266/scripts/ntptime.py import ntptime for i in range (maxretries): try: ntptime.settime() break except: if i+1 == maxretries: raise def deepsleep(sleeptime=15*60*1000): # configure RTC.ALARM0 to be able to wake the device rtc = machine.RTC() rtc.irq(trigger=rtc.ALARM0, wake=machine.DEEPSLEEP) # set RTC.ALARM0 to fire after some time. Time is given in milliseconds here rtc.alarm(rtc.ALARM0, sleeptime) #Make sure you have GPIO16 connected RST to wake from deepSleep. # put the device to sleep print ("Going into Sleep now") machine.deepsleep()
7e2b60a7f7b32c235f931f9e7263ccefc84c79e2
gittip/orm/__init__.py
gittip/orm/__init__.py
from __future__ import unicode_literals import os import pdb from sqlalchemy import create_engine, MetaData from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import sessionmaker, scoped_session class Model(object): def __repr__(self): cols = self.__mapper__.c.keys() class_name = self.__class__.__name__ items = ', '.join(['%s=%s' % (col, repr(getattr(self, col))) for col in cols]) return '%s(%s)' % (class_name, items) def attrs_dict(self): keys = self.__mapper__.c.keys() attrs = {} for key in keys: attrs[key] = getattr(self, key) return attrs class SQLAlchemy(object): def __init__(self): self.session = self.create_session() self.Model = self.make_declarative_base() @property def engine(self): dburl = os.environ['DATABASE_URL'] return create_engine(dburl) def create_session(self): session = scoped_session(sessionmaker()) session.configure(bind=self.engine) return session def make_declarative_base(self): base = declarative_base(cls=Model) base.query = self.session.query_property() return base db = SQLAlchemy() all = [db] def rollback(*_): db.session.rollback()
from __future__ import unicode_literals import os import pdb from sqlalchemy import create_engine, MetaData from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import sessionmaker, scoped_session class Model(object): def __repr__(self): cols = self.__mapper__.c.keys() class_name = self.__class__.__name__ items = ', '.join(['%s=%s' % (col, repr(getattr(self, col))) for col in cols]) return '%s(%s)' % (class_name, items) def attrs_dict(self): keys = self.__mapper__.c.keys() attrs = {} for key in keys: attrs[key] = getattr(self, key) return attrs class SQLAlchemy(object): def __init__(self): self.session = self.create_session() self.Model = self.make_declarative_base() @property def engine(self): dburl = os.environ['DATABASE_URL'] return create_engine(dburl) def create_session(self): session = scoped_session(sessionmaker()) session.configure(bind=self.engine) return session def make_declarative_base(self): base = declarative_base(cls=Model) base.query = self.session.query_property() return base def drop_all(self): self.Model.metadata.drop_all(bind=self.engine) def create_all(self): self.Model.metadata.create_all(bind=self.engine) db = SQLAlchemy() all = [db] def rollback(*_): db.session.rollback()
Add convenience methods for creating/deleting all tables, for bootstrapping/testing use
Add convenience methods for creating/deleting all tables, for bootstrapping/testing use Signed-off-by: Joonas Bergius <[email protected]>
Python
mit
studio666/gratipay.com,gratipay/gratipay.com,eXcomm/gratipay.com,MikeFair/www.gittip.com,MikeFair/www.gittip.com,studio666/gratipay.com,mccolgst/www.gittip.com,bountysource/www.gittip.com,eXcomm/gratipay.com,gratipay/gratipay.com,mccolgst/www.gittip.com,gratipay/gratipay.com,bountysource/www.gittip.com,MikeFair/www.gittip.com,mccolgst/www.gittip.com,eXcomm/gratipay.com,gratipay/gratipay.com,studio666/gratipay.com,mccolgst/www.gittip.com,bountysource/www.gittip.com,eXcomm/gratipay.com,studio666/gratipay.com,bountysource/www.gittip.com
from __future__ import unicode_literals import os import pdb from sqlalchemy import create_engine, MetaData from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import sessionmaker, scoped_session class Model(object): def __repr__(self): cols = self.__mapper__.c.keys() class_name = self.__class__.__name__ items = ', '.join(['%s=%s' % (col, repr(getattr(self, col))) for col in cols]) return '%s(%s)' % (class_name, items) def attrs_dict(self): keys = self.__mapper__.c.keys() attrs = {} for key in keys: attrs[key] = getattr(self, key) return attrs class SQLAlchemy(object): def __init__(self): self.session = self.create_session() self.Model = self.make_declarative_base() @property def engine(self): dburl = os.environ['DATABASE_URL'] return create_engine(dburl) def create_session(self): session = scoped_session(sessionmaker()) session.configure(bind=self.engine) return session def make_declarative_base(self): base = declarative_base(cls=Model) base.query = self.session.query_property() return base + def drop_all(self): + self.Model.metadata.drop_all(bind=self.engine) + + def create_all(self): + self.Model.metadata.create_all(bind=self.engine) + db = SQLAlchemy() all = [db] def rollback(*_): db.session.rollback()
Add convenience methods for creating/deleting all tables, for bootstrapping/testing use
## Code Before: from __future__ import unicode_literals import os import pdb from sqlalchemy import create_engine, MetaData from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import sessionmaker, scoped_session class Model(object): def __repr__(self): cols = self.__mapper__.c.keys() class_name = self.__class__.__name__ items = ', '.join(['%s=%s' % (col, repr(getattr(self, col))) for col in cols]) return '%s(%s)' % (class_name, items) def attrs_dict(self): keys = self.__mapper__.c.keys() attrs = {} for key in keys: attrs[key] = getattr(self, key) return attrs class SQLAlchemy(object): def __init__(self): self.session = self.create_session() self.Model = self.make_declarative_base() @property def engine(self): dburl = os.environ['DATABASE_URL'] return create_engine(dburl) def create_session(self): session = scoped_session(sessionmaker()) session.configure(bind=self.engine) return session def make_declarative_base(self): base = declarative_base(cls=Model) base.query = self.session.query_property() return base db = SQLAlchemy() all = [db] def rollback(*_): db.session.rollback() ## Instruction: Add convenience methods for creating/deleting all tables, for bootstrapping/testing use ## Code After: from __future__ import unicode_literals import os import pdb from sqlalchemy import create_engine, MetaData from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import sessionmaker, scoped_session class Model(object): def __repr__(self): cols = self.__mapper__.c.keys() class_name = self.__class__.__name__ items = ', '.join(['%s=%s' % (col, repr(getattr(self, col))) for col in cols]) return '%s(%s)' % (class_name, items) def attrs_dict(self): keys = self.__mapper__.c.keys() attrs = {} for key in keys: attrs[key] = getattr(self, key) return attrs class SQLAlchemy(object): def __init__(self): self.session = self.create_session() self.Model = self.make_declarative_base() @property def engine(self): dburl = os.environ['DATABASE_URL'] return create_engine(dburl) def create_session(self): session = scoped_session(sessionmaker()) session.configure(bind=self.engine) return session def make_declarative_base(self): base = declarative_base(cls=Model) base.query = self.session.query_property() return base def drop_all(self): self.Model.metadata.drop_all(bind=self.engine) def create_all(self): self.Model.metadata.create_all(bind=self.engine) db = SQLAlchemy() all = [db] def rollback(*_): db.session.rollback()
from __future__ import unicode_literals import os import pdb from sqlalchemy import create_engine, MetaData from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import sessionmaker, scoped_session class Model(object): def __repr__(self): cols = self.__mapper__.c.keys() class_name = self.__class__.__name__ items = ', '.join(['%s=%s' % (col, repr(getattr(self, col))) for col in cols]) return '%s(%s)' % (class_name, items) def attrs_dict(self): keys = self.__mapper__.c.keys() attrs = {} for key in keys: attrs[key] = getattr(self, key) return attrs class SQLAlchemy(object): def __init__(self): self.session = self.create_session() self.Model = self.make_declarative_base() @property def engine(self): dburl = os.environ['DATABASE_URL'] return create_engine(dburl) def create_session(self): session = scoped_session(sessionmaker()) session.configure(bind=self.engine) return session def make_declarative_base(self): base = declarative_base(cls=Model) base.query = self.session.query_property() return base + def drop_all(self): + self.Model.metadata.drop_all(bind=self.engine) + + def create_all(self): + self.Model.metadata.create_all(bind=self.engine) + db = SQLAlchemy() all = [db] def rollback(*_): db.session.rollback()
d191a947e34e4d6eee1965f4896a44efc8c7ae91
feedback/views.py
feedback/views.py
from django.http import HttpResponseRedirect from django.shortcuts import render_to_response from django.template import RequestContext from feedback.forms import FeedbackForm def leave_feedback(request): form = FeedbackForm(request.POST or None) if form.is_valid(): feedback = form.save(commit=False) feedback.user = request.user feedback.save() request.user.message_set.create(message="Your feedback has been saved successfully.") return HttpResponseRedirect(request.POST.get('next', request.META.get('HTTP_REFERER', '/'))) return render_to_response('feedback/feedback_form.html', {'form': form}, context_instance=RequestContext(request))
from django.http import HttpResponseRedirect from django.shortcuts import render_to_response from django.template import RequestContext from feedback.forms import FeedbackForm def leave_feedback(request, template_name='feedback/feedback_form.html'): form = FeedbackForm(request.POST or None) if form.is_valid(): feedback = form.save(commit=False) feedback.user = request.user feedback.save() request.user.message_set.create(message="Your feedback has been saved successfully.") return HttpResponseRedirect(request.POST.get('next', request.META.get('HTTP_REFERER', '/'))) return render_to_response(template_name, {'form': form}, context_instance=RequestContext(request))
Allow passing of template_name to view
Allow passing of template_name to view
Python
bsd-3-clause
girasquid/django-feedback
from django.http import HttpResponseRedirect from django.shortcuts import render_to_response from django.template import RequestContext from feedback.forms import FeedbackForm - def leave_feedback(request): + def leave_feedback(request, template_name='feedback/feedback_form.html'): form = FeedbackForm(request.POST or None) if form.is_valid(): feedback = form.save(commit=False) feedback.user = request.user feedback.save() request.user.message_set.create(message="Your feedback has been saved successfully.") return HttpResponseRedirect(request.POST.get('next', request.META.get('HTTP_REFERER', '/'))) - return render_to_response('feedback/feedback_form.html', {'form': form}, context_instance=RequestContext(request)) + return render_to_response(template_name, {'form': form}, context_instance=RequestContext(request)) + +
Allow passing of template_name to view
## Code Before: from django.http import HttpResponseRedirect from django.shortcuts import render_to_response from django.template import RequestContext from feedback.forms import FeedbackForm def leave_feedback(request): form = FeedbackForm(request.POST or None) if form.is_valid(): feedback = form.save(commit=False) feedback.user = request.user feedback.save() request.user.message_set.create(message="Your feedback has been saved successfully.") return HttpResponseRedirect(request.POST.get('next', request.META.get('HTTP_REFERER', '/'))) return render_to_response('feedback/feedback_form.html', {'form': form}, context_instance=RequestContext(request)) ## Instruction: Allow passing of template_name to view ## Code After: from django.http import HttpResponseRedirect from django.shortcuts import render_to_response from django.template import RequestContext from feedback.forms import FeedbackForm def leave_feedback(request, template_name='feedback/feedback_form.html'): form = FeedbackForm(request.POST or None) if form.is_valid(): feedback = form.save(commit=False) feedback.user = request.user feedback.save() request.user.message_set.create(message="Your feedback has been saved successfully.") return HttpResponseRedirect(request.POST.get('next', request.META.get('HTTP_REFERER', '/'))) return render_to_response(template_name, {'form': form}, context_instance=RequestContext(request))
from django.http import HttpResponseRedirect from django.shortcuts import render_to_response from django.template import RequestContext from feedback.forms import FeedbackForm - def leave_feedback(request): + def leave_feedback(request, template_name='feedback/feedback_form.html'): form = FeedbackForm(request.POST or None) if form.is_valid(): feedback = form.save(commit=False) feedback.user = request.user feedback.save() request.user.message_set.create(message="Your feedback has been saved successfully.") return HttpResponseRedirect(request.POST.get('next', request.META.get('HTTP_REFERER', '/'))) - return render_to_response('feedback/feedback_form.html', {'form': form}, context_instance=RequestContext(request)) ? ^^ ^^ ^^^^ ----------------- + return render_to_response(template_name, {'form': form}, context_instance=RequestContext(request)) ? ^ +++++ ^^ ^ +
a43d461bf2d5c40b8d828873f9fa0b5e2048a0df
SnsManager/google/GoogleBase.py
SnsManager/google/GoogleBase.py
import httplib2 from apiclient.discovery import build from oauth2client.client import AccessTokenCredentials, AccessTokenCredentialsError from SnsManager.SnsBase import SnsBase from SnsManager import ErrorCode class GoogleBase(SnsBase): def __init__(self, *args, **kwargs): super(self.__class__, self).__init__(*args, **kwargs) self.myId = self.getMyId() self._userAgent = 'Waveface AOStream/1.0' def getMyId(self): try: http = httplib2.Http() credentials = AccessTokenCredentials(self._accessToken, self._userAgent) http = credentials.authorize(http) userInfo = build('oauth2', 'v2', http=http).userinfo().get().execute() self.myId = userInfo['email'] except: return None return self.myId def isTokenValid(self): try: http = httplib2.Http() credentials = AccessTokenCredentials(self._accessToken, self._userAgent) http = credentials.authorize(http) userInfo = build('oauth2', 'v2', http=http).userinfo().get().execute() except AccessTokenCredentialsError as e: return ErrorCode.E_INVALID_TOKEN except: self._logger.exception('GoogleBase::isTokenValid() exception') return ErrorCode.E_FAILED else: return ErrorCode.S_OK
import httplib2 from apiclient.discovery import build from oauth2client.client import AccessTokenCredentials, AccessTokenCredentialsError from SnsManager.SnsBase import SnsBase from SnsManager import ErrorCode class GoogleBase(SnsBase): def __init__(self, *args, **kwargs): super(GoogleBase, self).__init__(*args, **kwargs) self.myId = self.getMyId() self._userAgent = 'Waveface AOStream/1.0' self._http = httplib2.Http() credentials = AccessTokenCredentials(self._accessToken, self._userAgent) self._http = credentials.authorize(self._http) def getMyId(self): try: userInfo = build('oauth2', 'v2', http=self._http).userinfo().get().execute() self.myId = userInfo['email'] except: return None return self.myId def isTokenValid(self): try: userInfo = build('oauth2', 'v2', http=self._http).userinfo().get().execute() except AccessTokenCredentialsError as e: return ErrorCode.E_INVALID_TOKEN except: self._logger.exception('GoogleBase::isTokenValid() exception') return ErrorCode.E_FAILED else: return ErrorCode.S_OK
Move http object as based object
Move http object as based object
Python
bsd-3-clause
waveface/SnsManager
import httplib2 from apiclient.discovery import build from oauth2client.client import AccessTokenCredentials, AccessTokenCredentialsError from SnsManager.SnsBase import SnsBase from SnsManager import ErrorCode class GoogleBase(SnsBase): def __init__(self, *args, **kwargs): - super(self.__class__, self).__init__(*args, **kwargs) + super(GoogleBase, self).__init__(*args, **kwargs) self.myId = self.getMyId() self._userAgent = 'Waveface AOStream/1.0' + self._http = httplib2.Http() + credentials = AccessTokenCredentials(self._accessToken, self._userAgent) + self._http = credentials.authorize(self._http) + def getMyId(self): try: - http = httplib2.Http() - credentials = AccessTokenCredentials(self._accessToken, self._userAgent) - http = credentials.authorize(http) - userInfo = build('oauth2', 'v2', http=http).userinfo().get().execute() + userInfo = build('oauth2', 'v2', http=self._http).userinfo().get().execute() self.myId = userInfo['email'] except: return None return self.myId def isTokenValid(self): try: - http = httplib2.Http() - credentials = AccessTokenCredentials(self._accessToken, self._userAgent) - http = credentials.authorize(http) - userInfo = build('oauth2', 'v2', http=http).userinfo().get().execute() + userInfo = build('oauth2', 'v2', http=self._http).userinfo().get().execute() except AccessTokenCredentialsError as e: return ErrorCode.E_INVALID_TOKEN except: self._logger.exception('GoogleBase::isTokenValid() exception') return ErrorCode.E_FAILED else: return ErrorCode.S_OK
Move http object as based object
## Code Before: import httplib2 from apiclient.discovery import build from oauth2client.client import AccessTokenCredentials, AccessTokenCredentialsError from SnsManager.SnsBase import SnsBase from SnsManager import ErrorCode class GoogleBase(SnsBase): def __init__(self, *args, **kwargs): super(self.__class__, self).__init__(*args, **kwargs) self.myId = self.getMyId() self._userAgent = 'Waveface AOStream/1.0' def getMyId(self): try: http = httplib2.Http() credentials = AccessTokenCredentials(self._accessToken, self._userAgent) http = credentials.authorize(http) userInfo = build('oauth2', 'v2', http=http).userinfo().get().execute() self.myId = userInfo['email'] except: return None return self.myId def isTokenValid(self): try: http = httplib2.Http() credentials = AccessTokenCredentials(self._accessToken, self._userAgent) http = credentials.authorize(http) userInfo = build('oauth2', 'v2', http=http).userinfo().get().execute() except AccessTokenCredentialsError as e: return ErrorCode.E_INVALID_TOKEN except: self._logger.exception('GoogleBase::isTokenValid() exception') return ErrorCode.E_FAILED else: return ErrorCode.S_OK ## Instruction: Move http object as based object ## Code After: import httplib2 from apiclient.discovery import build from oauth2client.client import AccessTokenCredentials, AccessTokenCredentialsError from SnsManager.SnsBase import SnsBase from SnsManager import ErrorCode class GoogleBase(SnsBase): def __init__(self, *args, **kwargs): super(GoogleBase, self).__init__(*args, **kwargs) self.myId = self.getMyId() self._userAgent = 'Waveface AOStream/1.0' self._http = httplib2.Http() credentials = AccessTokenCredentials(self._accessToken, self._userAgent) self._http = credentials.authorize(self._http) def getMyId(self): try: userInfo = build('oauth2', 'v2', http=self._http).userinfo().get().execute() self.myId = userInfo['email'] except: return None return self.myId def isTokenValid(self): try: userInfo = build('oauth2', 'v2', http=self._http).userinfo().get().execute() except AccessTokenCredentialsError as e: return ErrorCode.E_INVALID_TOKEN except: self._logger.exception('GoogleBase::isTokenValid() exception') return ErrorCode.E_FAILED else: return ErrorCode.S_OK
import httplib2 from apiclient.discovery import build from oauth2client.client import AccessTokenCredentials, AccessTokenCredentialsError from SnsManager.SnsBase import SnsBase from SnsManager import ErrorCode class GoogleBase(SnsBase): def __init__(self, *args, **kwargs): - super(self.__class__, self).__init__(*args, **kwargs) ? ------------ + super(GoogleBase, self).__init__(*args, **kwargs) ? ++++++++ self.myId = self.getMyId() self._userAgent = 'Waveface AOStream/1.0' + self._http = httplib2.Http() + credentials = AccessTokenCredentials(self._accessToken, self._userAgent) + self._http = credentials.authorize(self._http) + def getMyId(self): try: - http = httplib2.Http() - credentials = AccessTokenCredentials(self._accessToken, self._userAgent) - http = credentials.authorize(http) - userInfo = build('oauth2', 'v2', http=http).userinfo().get().execute() + userInfo = build('oauth2', 'v2', http=self._http).userinfo().get().execute() ? ++++++ self.myId = userInfo['email'] except: return None return self.myId def isTokenValid(self): try: - http = httplib2.Http() - credentials = AccessTokenCredentials(self._accessToken, self._userAgent) - http = credentials.authorize(http) - userInfo = build('oauth2', 'v2', http=http).userinfo().get().execute() + userInfo = build('oauth2', 'v2', http=self._http).userinfo().get().execute() ? ++++++ except AccessTokenCredentialsError as e: return ErrorCode.E_INVALID_TOKEN except: self._logger.exception('GoogleBase::isTokenValid() exception') return ErrorCode.E_FAILED else: return ErrorCode.S_OK
b5fc673d44624dfddfbdd98c9806b7e7e2f67331
simplekv/memory/memcachestore.py
simplekv/memory/memcachestore.py
try: from cStringIO import StringIO except ImportError: from StringIO import StringIO from .. import KeyValueStore class MemcacheStore(KeyValueStore): def __contains__(self, key): try: return key in self.mc except TypeError: raise IOError('memcache implementation does not support '\ '__contains__') def __init__(self, mc): self.mc = mc def _delete(self, key): self.mc.delete(key) def _get(self, key): rv = self.mc.get(key) if None == rv: raise KeyError(key) return rv def _get_file(self, key, file): file.write(self._get(key)) def _open(self, key): return StringIO(self._get(key)) def _put(self, key, data): self.mc.set(key, data) return key def _put_file(self, key, file): self.mc.set(key, file.read()) return key def keys(self): raise IOError('Memcache does not support listing keys.') def iter_keys(self): raise IOError('Memcache does not support key iteration.')
try: from cStringIO import StringIO except ImportError: from StringIO import StringIO from .. import KeyValueStore class MemcacheStore(KeyValueStore): def __contains__(self, key): try: return key in self.mc except TypeError: raise IOError('memcache implementation does not support '\ '__contains__') def __init__(self, mc): self.mc = mc def _delete(self, key): if not self.mc.delete(key): raise IOError('Error deleting key') def _get(self, key): rv = self.mc.get(key) if None == rv: raise KeyError(key) return rv def _get_file(self, key, file): file.write(self._get(key)) def _open(self, key): return StringIO(self._get(key)) def _put(self, key, data): if not self.mc.set(key, data): if len(data) >= 1024 * 1023: raise IOError('Failed to store data, probably too large. '\ 'memcached limit is 1M') raise IOError('Failed to store data') return key def _put_file(self, key, file): return self._put(key, file.read()) def keys(self): raise IOError('Memcache does not support listing keys.') def iter_keys(self): raise IOError('Memcache does not support key iteration.')
Check if putting/getting was actually successful.
Check if putting/getting was actually successful.
Python
mit
fmarczin/simplekv,fmarczin/simplekv,karteek/simplekv,mbr/simplekv,karteek/simplekv,mbr/simplekv
try: from cStringIO import StringIO except ImportError: from StringIO import StringIO from .. import KeyValueStore class MemcacheStore(KeyValueStore): def __contains__(self, key): try: return key in self.mc except TypeError: raise IOError('memcache implementation does not support '\ '__contains__') def __init__(self, mc): self.mc = mc def _delete(self, key): - self.mc.delete(key) + if not self.mc.delete(key): + raise IOError('Error deleting key') def _get(self, key): rv = self.mc.get(key) if None == rv: raise KeyError(key) return rv def _get_file(self, key, file): file.write(self._get(key)) def _open(self, key): return StringIO(self._get(key)) def _put(self, key, data): - self.mc.set(key, data) + if not self.mc.set(key, data): + if len(data) >= 1024 * 1023: + raise IOError('Failed to store data, probably too large. '\ + 'memcached limit is 1M') + raise IOError('Failed to store data') return key def _put_file(self, key, file): - self.mc.set(key, file.read()) + return self._put(key, file.read()) - return key def keys(self): raise IOError('Memcache does not support listing keys.') def iter_keys(self): raise IOError('Memcache does not support key iteration.')
Check if putting/getting was actually successful.
## Code Before: try: from cStringIO import StringIO except ImportError: from StringIO import StringIO from .. import KeyValueStore class MemcacheStore(KeyValueStore): def __contains__(self, key): try: return key in self.mc except TypeError: raise IOError('memcache implementation does not support '\ '__contains__') def __init__(self, mc): self.mc = mc def _delete(self, key): self.mc.delete(key) def _get(self, key): rv = self.mc.get(key) if None == rv: raise KeyError(key) return rv def _get_file(self, key, file): file.write(self._get(key)) def _open(self, key): return StringIO(self._get(key)) def _put(self, key, data): self.mc.set(key, data) return key def _put_file(self, key, file): self.mc.set(key, file.read()) return key def keys(self): raise IOError('Memcache does not support listing keys.') def iter_keys(self): raise IOError('Memcache does not support key iteration.') ## Instruction: Check if putting/getting was actually successful. ## Code After: try: from cStringIO import StringIO except ImportError: from StringIO import StringIO from .. import KeyValueStore class MemcacheStore(KeyValueStore): def __contains__(self, key): try: return key in self.mc except TypeError: raise IOError('memcache implementation does not support '\ '__contains__') def __init__(self, mc): self.mc = mc def _delete(self, key): if not self.mc.delete(key): raise IOError('Error deleting key') def _get(self, key): rv = self.mc.get(key) if None == rv: raise KeyError(key) return rv def _get_file(self, key, file): file.write(self._get(key)) def _open(self, key): return StringIO(self._get(key)) def _put(self, key, data): if not self.mc.set(key, data): if len(data) >= 1024 * 1023: raise IOError('Failed to store data, probably too large. '\ 'memcached limit is 1M') raise IOError('Failed to store data') return key def _put_file(self, key, file): return self._put(key, file.read()) def keys(self): raise IOError('Memcache does not support listing keys.') def iter_keys(self): raise IOError('Memcache does not support key iteration.')
try: from cStringIO import StringIO except ImportError: from StringIO import StringIO from .. import KeyValueStore class MemcacheStore(KeyValueStore): def __contains__(self, key): try: return key in self.mc except TypeError: raise IOError('memcache implementation does not support '\ '__contains__') def __init__(self, mc): self.mc = mc def _delete(self, key): - self.mc.delete(key) + if not self.mc.delete(key): ? +++++++ + + raise IOError('Error deleting key') def _get(self, key): rv = self.mc.get(key) if None == rv: raise KeyError(key) return rv def _get_file(self, key, file): file.write(self._get(key)) def _open(self, key): return StringIO(self._get(key)) def _put(self, key, data): - self.mc.set(key, data) + if not self.mc.set(key, data): ? +++++++ + + if len(data) >= 1024 * 1023: + raise IOError('Failed to store data, probably too large. '\ + 'memcached limit is 1M') + raise IOError('Failed to store data') return key def _put_file(self, key, file): - self.mc.set(key, file.read()) ? ^^^^^ + return self._put(key, file.read()) ? +++++++ ^^^ - return key def keys(self): raise IOError('Memcache does not support listing keys.') def iter_keys(self): raise IOError('Memcache does not support key iteration.')
022f2cc6d067769a6c8e56601c0238aac69ec9ab
jfr_playoff/settings.py
jfr_playoff/settings.py
import glob, json, os, readline, sys def complete_filename(text, state): return (glob.glob(text+'*')+[None])[state] class PlayoffSettings: def __init__(self): self.interactive = False self.settings_file = None if len(sys.argv) > 1: self.settings_file = sys.argv[1] else: self.interactive = True def load(self): if self.settings_file is None: readline.set_completer_delims(' \t\n;') readline.parse_and_bind("tab: complete") readline.set_completer(complete_filename) self.settings_file = raw_input('JSON settings file: ') self.settings = json.load(open(self.settings_file)) def has_section(self, key): self.load() return key in self.settings def get(self, *keys): self.load() section = self.settings for key in keys: section = section[key] return section
import glob, json, os, readline, sys def complete_filename(text, state): return (glob.glob(text+'*')+[None])[state] class PlayoffSettings: def __init__(self): self.settings = None self.interactive = False self.settings_file = None if len(sys.argv) > 1: self.settings_file = sys.argv[1] else: self.interactive = True def load(self): if self.settings_file is None: readline.set_completer_delims(' \t\n;') readline.parse_and_bind("tab: complete") readline.set_completer(complete_filename) self.settings_file = raw_input('JSON settings file: ') if self.settings is None: self.settings = json.load(open(self.settings_file)) def has_section(self, key): self.load() return key in self.settings def get(self, *keys): self.load() section = self.settings for key in keys: section = section[key] return section
Load config file only once
Load config file only once
Python
bsd-2-clause
emkael/jfrteamy-playoff,emkael/jfrteamy-playoff
import glob, json, os, readline, sys def complete_filename(text, state): return (glob.glob(text+'*')+[None])[state] class PlayoffSettings: def __init__(self): + self.settings = None self.interactive = False self.settings_file = None if len(sys.argv) > 1: self.settings_file = sys.argv[1] else: self.interactive = True def load(self): if self.settings_file is None: readline.set_completer_delims(' \t\n;') readline.parse_and_bind("tab: complete") readline.set_completer(complete_filename) self.settings_file = raw_input('JSON settings file: ') + if self.settings is None: - self.settings = json.load(open(self.settings_file)) + self.settings = json.load(open(self.settings_file)) def has_section(self, key): self.load() return key in self.settings def get(self, *keys): self.load() section = self.settings for key in keys: section = section[key] return section
Load config file only once
## Code Before: import glob, json, os, readline, sys def complete_filename(text, state): return (glob.glob(text+'*')+[None])[state] class PlayoffSettings: def __init__(self): self.interactive = False self.settings_file = None if len(sys.argv) > 1: self.settings_file = sys.argv[1] else: self.interactive = True def load(self): if self.settings_file is None: readline.set_completer_delims(' \t\n;') readline.parse_and_bind("tab: complete") readline.set_completer(complete_filename) self.settings_file = raw_input('JSON settings file: ') self.settings = json.load(open(self.settings_file)) def has_section(self, key): self.load() return key in self.settings def get(self, *keys): self.load() section = self.settings for key in keys: section = section[key] return section ## Instruction: Load config file only once ## Code After: import glob, json, os, readline, sys def complete_filename(text, state): return (glob.glob(text+'*')+[None])[state] class PlayoffSettings: def __init__(self): self.settings = None self.interactive = False self.settings_file = None if len(sys.argv) > 1: self.settings_file = sys.argv[1] else: self.interactive = True def load(self): if self.settings_file is None: readline.set_completer_delims(' \t\n;') readline.parse_and_bind("tab: complete") readline.set_completer(complete_filename) self.settings_file = raw_input('JSON settings file: ') if self.settings is None: self.settings = json.load(open(self.settings_file)) def has_section(self, key): self.load() return key in self.settings def get(self, *keys): self.load() section = self.settings for key in keys: section = section[key] return section
import glob, json, os, readline, sys def complete_filename(text, state): return (glob.glob(text+'*')+[None])[state] class PlayoffSettings: def __init__(self): + self.settings = None self.interactive = False self.settings_file = None if len(sys.argv) > 1: self.settings_file = sys.argv[1] else: self.interactive = True def load(self): if self.settings_file is None: readline.set_completer_delims(' \t\n;') readline.parse_and_bind("tab: complete") readline.set_completer(complete_filename) self.settings_file = raw_input('JSON settings file: ') + if self.settings is None: - self.settings = json.load(open(self.settings_file)) + self.settings = json.load(open(self.settings_file)) ? ++++ def has_section(self, key): self.load() return key in self.settings def get(self, *keys): self.load() section = self.settings for key in keys: section = section[key] return section
ec37dae820e49d816014c62f00711eaaeaf64597
transaction_hooks/test/settings_pg.py
transaction_hooks/test/settings_pg.py
import os from .settings import * # noqa DATABASES = { 'default': { 'ENGINE': 'transaction_hooks.backends.postgresql_psycopg2', 'NAME': 'dtc', }, } if 'DTC_PG_USERNAME' in os.environ: DATABASES['default'].update( { 'USER': os.environ['DTC_PG_USERNAME'], 'PASSWORD': '', 'HOST': 'localhost', } )
import os try: from psycopg2cffi import compat compat.register() except ImportError: pass from .settings import * # noqa DATABASES = { 'default': { 'ENGINE': 'transaction_hooks.backends.postgresql_psycopg2', 'NAME': 'dtc', }, } if 'DTC_PG_USERNAME' in os.environ: DATABASES['default'].update( { 'USER': os.environ['DTC_PG_USERNAME'], 'PASSWORD': '', 'HOST': 'localhost', } )
Enable postgresql CFFI compatability if available.
Enable postgresql CFFI compatability if available.
Python
bsd-3-clause
carljm/django-transaction-hooks
import os + + try: + from psycopg2cffi import compat + compat.register() + except ImportError: + pass from .settings import * # noqa DATABASES = { 'default': { 'ENGINE': 'transaction_hooks.backends.postgresql_psycopg2', 'NAME': 'dtc', }, } if 'DTC_PG_USERNAME' in os.environ: DATABASES['default'].update( { 'USER': os.environ['DTC_PG_USERNAME'], 'PASSWORD': '', 'HOST': 'localhost', } )
Enable postgresql CFFI compatability if available.
## Code Before: import os from .settings import * # noqa DATABASES = { 'default': { 'ENGINE': 'transaction_hooks.backends.postgresql_psycopg2', 'NAME': 'dtc', }, } if 'DTC_PG_USERNAME' in os.environ: DATABASES['default'].update( { 'USER': os.environ['DTC_PG_USERNAME'], 'PASSWORD': '', 'HOST': 'localhost', } ) ## Instruction: Enable postgresql CFFI compatability if available. ## Code After: import os try: from psycopg2cffi import compat compat.register() except ImportError: pass from .settings import * # noqa DATABASES = { 'default': { 'ENGINE': 'transaction_hooks.backends.postgresql_psycopg2', 'NAME': 'dtc', }, } if 'DTC_PG_USERNAME' in os.environ: DATABASES['default'].update( { 'USER': os.environ['DTC_PG_USERNAME'], 'PASSWORD': '', 'HOST': 'localhost', } )
import os + + try: + from psycopg2cffi import compat + compat.register() + except ImportError: + pass from .settings import * # noqa DATABASES = { 'default': { 'ENGINE': 'transaction_hooks.backends.postgresql_psycopg2', 'NAME': 'dtc', }, } if 'DTC_PG_USERNAME' in os.environ: DATABASES['default'].update( { 'USER': os.environ['DTC_PG_USERNAME'], 'PASSWORD': '', 'HOST': 'localhost', } )
15be3bd492a0808713c6ae6981ecb99acacd5297
allauth/socialaccount/providers/trello/provider.py
allauth/socialaccount/providers/trello/provider.py
from allauth.socialaccount.providers.base import ProviderAccount from allauth.socialaccount.providers.oauth.provider import OAuthProvider class TrelloAccount(ProviderAccount): def get_profile_url(self): return None def get_avatar_url(self): return None class TrelloProvider(OAuthProvider): id = 'trello' name = 'Trello' account_class = TrelloAccount def get_default_scope(self): return ['read'] def extract_uid(self, data): return data['id'] def get_auth_params(self, request, action): data = super(TrelloProvider, self).get_auth_params(request, action) app = self.get_app(request) data['type'] = 'web_server' data['name'] = app.name # define here for how long it will be, this can be configured on the # social app data['expiration'] = 'never' return data provider_classes = [TrelloProvider]
from allauth.socialaccount.providers.base import ProviderAccount from allauth.socialaccount.providers.oauth.provider import OAuthProvider class TrelloAccount(ProviderAccount): def get_profile_url(self): return None def get_avatar_url(self): return None class TrelloProvider(OAuthProvider): id = 'trello' name = 'Trello' account_class = TrelloAccount def get_default_scope(self): return ['read'] def extract_uid(self, data): return data['id'] def get_auth_params(self, request, action): data = super(TrelloProvider, self).get_auth_params(request, action) app = self.get_app(request) data['type'] = 'web_server' data['name'] = app.name data['scope'] = self.get_scope(request) # define here for how long it will be, this can be configured on the # social app data['expiration'] = 'never' return data provider_classes = [TrelloProvider]
Use 'scope' in TrelloProvider auth params. Allows overriding from django settings.
feat(TrelloProvider): Use 'scope' in TrelloProvider auth params. Allows overriding from django settings.
Python
mit
AltSchool/django-allauth,AltSchool/django-allauth,AltSchool/django-allauth
from allauth.socialaccount.providers.base import ProviderAccount from allauth.socialaccount.providers.oauth.provider import OAuthProvider class TrelloAccount(ProviderAccount): def get_profile_url(self): return None def get_avatar_url(self): return None class TrelloProvider(OAuthProvider): id = 'trello' name = 'Trello' account_class = TrelloAccount def get_default_scope(self): return ['read'] def extract_uid(self, data): return data['id'] def get_auth_params(self, request, action): data = super(TrelloProvider, self).get_auth_params(request, action) app = self.get_app(request) data['type'] = 'web_server' data['name'] = app.name + data['scope'] = self.get_scope(request) # define here for how long it will be, this can be configured on the # social app data['expiration'] = 'never' return data provider_classes = [TrelloProvider]
Use 'scope' in TrelloProvider auth params. Allows overriding from django settings.
## Code Before: from allauth.socialaccount.providers.base import ProviderAccount from allauth.socialaccount.providers.oauth.provider import OAuthProvider class TrelloAccount(ProviderAccount): def get_profile_url(self): return None def get_avatar_url(self): return None class TrelloProvider(OAuthProvider): id = 'trello' name = 'Trello' account_class = TrelloAccount def get_default_scope(self): return ['read'] def extract_uid(self, data): return data['id'] def get_auth_params(self, request, action): data = super(TrelloProvider, self).get_auth_params(request, action) app = self.get_app(request) data['type'] = 'web_server' data['name'] = app.name # define here for how long it will be, this can be configured on the # social app data['expiration'] = 'never' return data provider_classes = [TrelloProvider] ## Instruction: Use 'scope' in TrelloProvider auth params. Allows overriding from django settings. ## Code After: from allauth.socialaccount.providers.base import ProviderAccount from allauth.socialaccount.providers.oauth.provider import OAuthProvider class TrelloAccount(ProviderAccount): def get_profile_url(self): return None def get_avatar_url(self): return None class TrelloProvider(OAuthProvider): id = 'trello' name = 'Trello' account_class = TrelloAccount def get_default_scope(self): return ['read'] def extract_uid(self, data): return data['id'] def get_auth_params(self, request, action): data = super(TrelloProvider, self).get_auth_params(request, action) app = self.get_app(request) data['type'] = 'web_server' data['name'] = app.name data['scope'] = self.get_scope(request) # define here for how long it will be, this can be configured on the # social app data['expiration'] = 'never' return data provider_classes = [TrelloProvider]
from allauth.socialaccount.providers.base import ProviderAccount from allauth.socialaccount.providers.oauth.provider import OAuthProvider class TrelloAccount(ProviderAccount): def get_profile_url(self): return None def get_avatar_url(self): return None class TrelloProvider(OAuthProvider): id = 'trello' name = 'Trello' account_class = TrelloAccount def get_default_scope(self): return ['read'] def extract_uid(self, data): return data['id'] def get_auth_params(self, request, action): data = super(TrelloProvider, self).get_auth_params(request, action) app = self.get_app(request) data['type'] = 'web_server' data['name'] = app.name + data['scope'] = self.get_scope(request) # define here for how long it will be, this can be configured on the # social app data['expiration'] = 'never' return data provider_classes = [TrelloProvider]
a9c6e045631103fe8508fd1b60d6076c05092fe1
tests/examples/customnode/nodes.py
tests/examples/customnode/nodes.py
from viewflow.activation import AbstractGateActivation, Activation from viewflow.flow import base from viewflow.token import Token class DynamicSplitActivation(AbstractGateActivation): def calculate_next(self): self._split_count = self.flow_task._task_count_callback(self.process) @Activation.status.super() def activate_next(self): if self._split_count: token_source = Token.split_token_source(self.task.token, self.task.pk) for _ in range(self._split_count): self.flow_task._next.activate(prev_activation=self, token=next(token_source)) class DynamicSplit(base.NextNodeMixin, base.DetailsViewMixin, base.Gateway): """ Activates several outgoing task instances depends on callback value Example:: spit_on_decision = flow.DynamicSplit(lambda p: 4) \\ .Next(this.make_decision) make_decision = flow.View(MyView) \\ .Next(this.join_on_decision) join_on_decision = flow.Join() \\ .Next(this.end) """ task_type = 'SPLIT' activation_cls = DynamicSplitActivation def __init__(self, callback): super(DynamicSplit, self).__init__() self._task_count_callback = callback
from viewflow.activation import AbstractGateActivation from viewflow.flow import base from viewflow.token import Token class DynamicSplitActivation(AbstractGateActivation): def calculate_next(self): self._split_count = self.flow_task._task_count_callback(self.process) def activate_next(self): if self._split_count: token_source = Token.split_token_source(self.task.token, self.task.pk) for _ in range(self._split_count): self.flow_task._next.activate(prev_activation=self, token=next(token_source)) class DynamicSplit(base.NextNodeMixin, base.UndoViewMixin, base.CancelViewMixin, base.PerformViewMixin, base.DetailsViewMixin, base.Gateway): """ Activates several outgoing task instances depends on callback value Example:: spit_on_decision = flow.DynamicSplit(lambda p: 4) \\ .Next(this.make_decision) make_decision = flow.View(MyView) \\ .Next(this.join_on_decision) join_on_decision = flow.Join() \\ .Next(this.end) """ task_type = 'SPLIT' activation_cls = DynamicSplitActivation def __init__(self, callback): super(DynamicSplit, self).__init__() self._task_count_callback = callback
Add undo to custom node sample
Add undo to custom node sample
Python
agpl-3.0
ribeiro-ucl/viewflow,codingjoe/viewflow,pombredanne/viewflow,pombredanne/viewflow,codingjoe/viewflow,codingjoe/viewflow,viewflow/viewflow,viewflow/viewflow,ribeiro-ucl/viewflow,viewflow/viewflow,ribeiro-ucl/viewflow
- from viewflow.activation import AbstractGateActivation, Activation + from viewflow.activation import AbstractGateActivation from viewflow.flow import base from viewflow.token import Token class DynamicSplitActivation(AbstractGateActivation): def calculate_next(self): self._split_count = self.flow_task._task_count_callback(self.process) - @Activation.status.super() def activate_next(self): if self._split_count: token_source = Token.split_token_source(self.task.token, self.task.pk) for _ in range(self._split_count): self.flow_task._next.activate(prev_activation=self, token=next(token_source)) - class DynamicSplit(base.NextNodeMixin, base.DetailsViewMixin, base.Gateway): + class DynamicSplit(base.NextNodeMixin, + base.UndoViewMixin, + base.CancelViewMixin, + base.PerformViewMixin, + base.DetailsViewMixin, + base.Gateway): """ Activates several outgoing task instances depends on callback value Example:: spit_on_decision = flow.DynamicSplit(lambda p: 4) \\ .Next(this.make_decision) make_decision = flow.View(MyView) \\ .Next(this.join_on_decision) join_on_decision = flow.Join() \\ .Next(this.end) """ task_type = 'SPLIT' activation_cls = DynamicSplitActivation def __init__(self, callback): super(DynamicSplit, self).__init__() self._task_count_callback = callback
Add undo to custom node sample
## Code Before: from viewflow.activation import AbstractGateActivation, Activation from viewflow.flow import base from viewflow.token import Token class DynamicSplitActivation(AbstractGateActivation): def calculate_next(self): self._split_count = self.flow_task._task_count_callback(self.process) @Activation.status.super() def activate_next(self): if self._split_count: token_source = Token.split_token_source(self.task.token, self.task.pk) for _ in range(self._split_count): self.flow_task._next.activate(prev_activation=self, token=next(token_source)) class DynamicSplit(base.NextNodeMixin, base.DetailsViewMixin, base.Gateway): """ Activates several outgoing task instances depends on callback value Example:: spit_on_decision = flow.DynamicSplit(lambda p: 4) \\ .Next(this.make_decision) make_decision = flow.View(MyView) \\ .Next(this.join_on_decision) join_on_decision = flow.Join() \\ .Next(this.end) """ task_type = 'SPLIT' activation_cls = DynamicSplitActivation def __init__(self, callback): super(DynamicSplit, self).__init__() self._task_count_callback = callback ## Instruction: Add undo to custom node sample ## Code After: from viewflow.activation import AbstractGateActivation from viewflow.flow import base from viewflow.token import Token class DynamicSplitActivation(AbstractGateActivation): def calculate_next(self): self._split_count = self.flow_task._task_count_callback(self.process) def activate_next(self): if self._split_count: token_source = Token.split_token_source(self.task.token, self.task.pk) for _ in range(self._split_count): self.flow_task._next.activate(prev_activation=self, token=next(token_source)) class DynamicSplit(base.NextNodeMixin, base.UndoViewMixin, base.CancelViewMixin, base.PerformViewMixin, base.DetailsViewMixin, base.Gateway): """ Activates several outgoing task instances depends on callback value Example:: spit_on_decision = flow.DynamicSplit(lambda p: 4) \\ .Next(this.make_decision) make_decision = flow.View(MyView) \\ .Next(this.join_on_decision) join_on_decision = flow.Join() \\ .Next(this.end) """ task_type = 'SPLIT' activation_cls = DynamicSplitActivation def __init__(self, callback): super(DynamicSplit, self).__init__() self._task_count_callback = callback
- from viewflow.activation import AbstractGateActivation, Activation ? ------------ + from viewflow.activation import AbstractGateActivation from viewflow.flow import base from viewflow.token import Token class DynamicSplitActivation(AbstractGateActivation): def calculate_next(self): self._split_count = self.flow_task._task_count_callback(self.process) - @Activation.status.super() def activate_next(self): if self._split_count: token_source = Token.split_token_source(self.task.token, self.task.pk) for _ in range(self._split_count): self.flow_task._next.activate(prev_activation=self, token=next(token_source)) - class DynamicSplit(base.NextNodeMixin, base.DetailsViewMixin, base.Gateway): + class DynamicSplit(base.NextNodeMixin, + base.UndoViewMixin, + base.CancelViewMixin, + base.PerformViewMixin, + base.DetailsViewMixin, + base.Gateway): """ Activates several outgoing task instances depends on callback value Example:: spit_on_decision = flow.DynamicSplit(lambda p: 4) \\ .Next(this.make_decision) make_decision = flow.View(MyView) \\ .Next(this.join_on_decision) join_on_decision = flow.Join() \\ .Next(this.end) """ task_type = 'SPLIT' activation_cls = DynamicSplitActivation def __init__(self, callback): super(DynamicSplit, self).__init__() self._task_count_callback = callback
76b5d00a4f936c38036270ef37465fd2621db71c
TelegramLogHandler/handler.py
TelegramLogHandler/handler.py
import logging class TelegramHandler(logging.Handler): """ A handler class which sends a Telegram message for each logging event. """ def __init__(self, token, ids): """ Initialize the handler. Initialize the instance with the bot's token and a list of chat_id(s) of the conversations that should be notified by the handler. """ logging.Handler.__init__(self) self.token = token self.ids = ids def emit(self, record): """ Emit a record. Format the record and send it to the specified chats. """ try: import requests url = 'https://api.telegram.org/bot{}/sendMessage'.format(self.token) for chat_id in self.ids: payload = { 'chat_id':chat_id, 'text': self.format(record) } requests.post(url, data=payload) except: self.handleError(record)
import logging class TelegramHandler(logging.Handler): """ A handler class which sends a Telegram message for each logging event. """ def __init__(self, token, ids): """ Initialize the handler. Initialize the instance with the bot's token and a list of chat_id(s) of the conversations that should be notified by the handler. """ logging.Handler.__init__(self) self.token = token self.ids = ids def emit(self, record): """ Emit a record. Format the record and send it to the specified chats. """ try: import requests requests_handler = logging.getLogger("requests") url = 'https://api.telegram.org/bot{}/sendMessage'.format(self.token) requests_handler.propagate = False for chat_id in self.ids: payload = { 'chat_id':chat_id, 'text': self.format(record) } requests.post(url, data=payload) requests_handler.propagate = True except: self.handleError(record)
Fix infinite loop caused by requests library's logging
Fix infinite loop caused by requests library's logging
Python
mit
simonacca/TelegramLogHandler
import logging class TelegramHandler(logging.Handler): """ A handler class which sends a Telegram message for each logging event. """ def __init__(self, token, ids): """ Initialize the handler. Initialize the instance with the bot's token and a list of chat_id(s) of the conversations that should be notified by the handler. """ logging.Handler.__init__(self) self.token = token self.ids = ids def emit(self, record): """ Emit a record. Format the record and send it to the specified chats. """ try: import requests + requests_handler = logging.getLogger("requests") + url = 'https://api.telegram.org/bot{}/sendMessage'.format(self.token) + + requests_handler.propagate = False for chat_id in self.ids: payload = { 'chat_id':chat_id, 'text': self.format(record) } + requests.post(url, data=payload) + requests_handler.propagate = True except: self.handleError(record)
Fix infinite loop caused by requests library's logging
## Code Before: import logging class TelegramHandler(logging.Handler): """ A handler class which sends a Telegram message for each logging event. """ def __init__(self, token, ids): """ Initialize the handler. Initialize the instance with the bot's token and a list of chat_id(s) of the conversations that should be notified by the handler. """ logging.Handler.__init__(self) self.token = token self.ids = ids def emit(self, record): """ Emit a record. Format the record and send it to the specified chats. """ try: import requests url = 'https://api.telegram.org/bot{}/sendMessage'.format(self.token) for chat_id in self.ids: payload = { 'chat_id':chat_id, 'text': self.format(record) } requests.post(url, data=payload) except: self.handleError(record) ## Instruction: Fix infinite loop caused by requests library's logging ## Code After: import logging class TelegramHandler(logging.Handler): """ A handler class which sends a Telegram message for each logging event. """ def __init__(self, token, ids): """ Initialize the handler. Initialize the instance with the bot's token and a list of chat_id(s) of the conversations that should be notified by the handler. """ logging.Handler.__init__(self) self.token = token self.ids = ids def emit(self, record): """ Emit a record. Format the record and send it to the specified chats. """ try: import requests requests_handler = logging.getLogger("requests") url = 'https://api.telegram.org/bot{}/sendMessage'.format(self.token) requests_handler.propagate = False for chat_id in self.ids: payload = { 'chat_id':chat_id, 'text': self.format(record) } requests.post(url, data=payload) requests_handler.propagate = True except: self.handleError(record)
import logging class TelegramHandler(logging.Handler): """ A handler class which sends a Telegram message for each logging event. """ def __init__(self, token, ids): """ Initialize the handler. Initialize the instance with the bot's token and a list of chat_id(s) of the conversations that should be notified by the handler. """ logging.Handler.__init__(self) self.token = token self.ids = ids def emit(self, record): """ Emit a record. Format the record and send it to the specified chats. """ try: import requests + requests_handler = logging.getLogger("requests") + url = 'https://api.telegram.org/bot{}/sendMessage'.format(self.token) + + requests_handler.propagate = False for chat_id in self.ids: payload = { 'chat_id':chat_id, 'text': self.format(record) } + requests.post(url, data=payload) + requests_handler.propagate = True except: self.handleError(record)
a8cb15b1983c48547edfeb53bfb63245f7e7c892
dbaas_zabbix/__init__.py
dbaas_zabbix/__init__.py
import logging import sys from dbaas_zabbix.dbaas_api import DatabaseAsAServiceApi from dbaas_zabbix.provider_factory import ProviderFactory from pyzabbix import ZabbixAPI stream = logging.StreamHandler(sys.stdout) stream.setLevel(logging.DEBUG) log = logging.getLogger('pyzabbix') log.addHandler(stream) log.setLevel(logging.DEBUG) def factory_for(**kwargs): databaseinfra = kwargs['databaseinfra'] credentials = kwargs['credentials'] del kwargs['databaseinfra'] del kwargs['credentials'] zabbix_api = ZabbixAPI if kwargs.get('zabbix_api'): zabbix_api = kwargs.get('zabbix_api') del kwargs['zabbix_api'] dbaas_api = DatabaseAsAServiceApi(databaseinfra, credentials) return ProviderFactory.factory(dbaas_api, zabbix_api=zabbix_api, **kwargs)
from dbaas_zabbix.dbaas_api import DatabaseAsAServiceApi from dbaas_zabbix.provider_factory import ProviderFactory from pyzabbix import ZabbixAPI def factory_for(**kwargs): databaseinfra = kwargs['databaseinfra'] credentials = kwargs['credentials'] del kwargs['databaseinfra'] del kwargs['credentials'] zabbix_api = ZabbixAPI if kwargs.get('zabbix_api'): zabbix_api = kwargs.get('zabbix_api') del kwargs['zabbix_api'] dbaas_api = DatabaseAsAServiceApi(databaseinfra, credentials) return ProviderFactory.factory(dbaas_api, zabbix_api=zabbix_api, **kwargs)
Revert "log integrations with zabbix through pyzabbix"
Revert "log integrations with zabbix through pyzabbix" This reverts commit 1506b6ad3e35e4a10ca36d42a75f3a572add06b7.
Python
bsd-3-clause
globocom/dbaas-zabbix,globocom/dbaas-zabbix
- import logging - import sys - from dbaas_zabbix.dbaas_api import DatabaseAsAServiceApi from dbaas_zabbix.provider_factory import ProviderFactory from pyzabbix import ZabbixAPI - stream = logging.StreamHandler(sys.stdout) - stream.setLevel(logging.DEBUG) - log = logging.getLogger('pyzabbix') - log.addHandler(stream) - log.setLevel(logging.DEBUG) def factory_for(**kwargs): databaseinfra = kwargs['databaseinfra'] credentials = kwargs['credentials'] del kwargs['databaseinfra'] del kwargs['credentials'] zabbix_api = ZabbixAPI if kwargs.get('zabbix_api'): zabbix_api = kwargs.get('zabbix_api') del kwargs['zabbix_api'] dbaas_api = DatabaseAsAServiceApi(databaseinfra, credentials) return ProviderFactory.factory(dbaas_api, zabbix_api=zabbix_api, **kwargs)
Revert "log integrations with zabbix through pyzabbix"
## Code Before: import logging import sys from dbaas_zabbix.dbaas_api import DatabaseAsAServiceApi from dbaas_zabbix.provider_factory import ProviderFactory from pyzabbix import ZabbixAPI stream = logging.StreamHandler(sys.stdout) stream.setLevel(logging.DEBUG) log = logging.getLogger('pyzabbix') log.addHandler(stream) log.setLevel(logging.DEBUG) def factory_for(**kwargs): databaseinfra = kwargs['databaseinfra'] credentials = kwargs['credentials'] del kwargs['databaseinfra'] del kwargs['credentials'] zabbix_api = ZabbixAPI if kwargs.get('zabbix_api'): zabbix_api = kwargs.get('zabbix_api') del kwargs['zabbix_api'] dbaas_api = DatabaseAsAServiceApi(databaseinfra, credentials) return ProviderFactory.factory(dbaas_api, zabbix_api=zabbix_api, **kwargs) ## Instruction: Revert "log integrations with zabbix through pyzabbix" ## Code After: from dbaas_zabbix.dbaas_api import DatabaseAsAServiceApi from dbaas_zabbix.provider_factory import ProviderFactory from pyzabbix import ZabbixAPI def factory_for(**kwargs): databaseinfra = kwargs['databaseinfra'] credentials = kwargs['credentials'] del kwargs['databaseinfra'] del kwargs['credentials'] zabbix_api = ZabbixAPI if kwargs.get('zabbix_api'): zabbix_api = kwargs.get('zabbix_api') del kwargs['zabbix_api'] dbaas_api = DatabaseAsAServiceApi(databaseinfra, credentials) return ProviderFactory.factory(dbaas_api, zabbix_api=zabbix_api, **kwargs)
- import logging - import sys - from dbaas_zabbix.dbaas_api import DatabaseAsAServiceApi from dbaas_zabbix.provider_factory import ProviderFactory from pyzabbix import ZabbixAPI - stream = logging.StreamHandler(sys.stdout) - stream.setLevel(logging.DEBUG) - log = logging.getLogger('pyzabbix') - log.addHandler(stream) - log.setLevel(logging.DEBUG) def factory_for(**kwargs): databaseinfra = kwargs['databaseinfra'] credentials = kwargs['credentials'] del kwargs['databaseinfra'] del kwargs['credentials'] zabbix_api = ZabbixAPI if kwargs.get('zabbix_api'): zabbix_api = kwargs.get('zabbix_api') del kwargs['zabbix_api'] dbaas_api = DatabaseAsAServiceApi(databaseinfra, credentials) return ProviderFactory.factory(dbaas_api, zabbix_api=zabbix_api, **kwargs)
7b3f239964c6663a9b655553202567fccead85c8
mollie/api/resources/profiles.py
mollie/api/resources/profiles.py
from ..error import IdentifierError from ..objects.profile import Profile from .base import Base class Profiles(Base): RESOURCE_ID_PREFIX = 'pfl_' def get_resource_object(self, result): return Profile(result, self.client) def get(self, profile_id, **params): if not profile_id or \ (not profile_id.startswith(self.RESOURCE_ID_PREFIX) and not profile_id == 'me'): raise IdentifierError( "Invalid profile ID: '{id}'. A profile ID should start with '{prefix}'.".format( id=profile_id, prefix=self.RESOURCE_ID_PREFIX) ) return super(Profiles, self).get(profile_id, **params)
from ..error import IdentifierError from ..objects.profile import Profile from .base import Base class Profiles(Base): RESOURCE_ID_PREFIX = 'pfl_' def get_resource_object(self, result): return Profile(result, self.client) def get(self, profile_id, **params): if not profile_id or \ (not profile_id.startswith(self.RESOURCE_ID_PREFIX) and not profile_id == 'me'): raise IdentifierError( "Invalid profile ID: '{id}'. A profile ID should start with '{prefix}' " "or it should be 'me'.".format( id=profile_id, prefix=self.RESOURCE_ID_PREFIX) ) return super(Profiles, self).get(profile_id, **params)
Add 'me' to profile IdentifierError
Add 'me' to profile IdentifierError
Python
bsd-2-clause
mollie/mollie-api-python
from ..error import IdentifierError from ..objects.profile import Profile from .base import Base class Profiles(Base): RESOURCE_ID_PREFIX = 'pfl_' def get_resource_object(self, result): return Profile(result, self.client) def get(self, profile_id, **params): if not profile_id or \ (not profile_id.startswith(self.RESOURCE_ID_PREFIX) and not profile_id == 'me'): raise IdentifierError( - "Invalid profile ID: '{id}'. A profile ID should start with '{prefix}'.".format( + "Invalid profile ID: '{id}'. A profile ID should start with '{prefix}' " + "or it should be 'me'.".format( + id=profile_id, - id=profile_id, prefix=self.RESOURCE_ID_PREFIX) + prefix=self.RESOURCE_ID_PREFIX) ) return super(Profiles, self).get(profile_id, **params)
Add 'me' to profile IdentifierError
## Code Before: from ..error import IdentifierError from ..objects.profile import Profile from .base import Base class Profiles(Base): RESOURCE_ID_PREFIX = 'pfl_' def get_resource_object(self, result): return Profile(result, self.client) def get(self, profile_id, **params): if not profile_id or \ (not profile_id.startswith(self.RESOURCE_ID_PREFIX) and not profile_id == 'me'): raise IdentifierError( "Invalid profile ID: '{id}'. A profile ID should start with '{prefix}'.".format( id=profile_id, prefix=self.RESOURCE_ID_PREFIX) ) return super(Profiles, self).get(profile_id, **params) ## Instruction: Add 'me' to profile IdentifierError ## Code After: from ..error import IdentifierError from ..objects.profile import Profile from .base import Base class Profiles(Base): RESOURCE_ID_PREFIX = 'pfl_' def get_resource_object(self, result): return Profile(result, self.client) def get(self, profile_id, **params): if not profile_id or \ (not profile_id.startswith(self.RESOURCE_ID_PREFIX) and not profile_id == 'me'): raise IdentifierError( "Invalid profile ID: '{id}'. A profile ID should start with '{prefix}' " "or it should be 'me'.".format( id=profile_id, prefix=self.RESOURCE_ID_PREFIX) ) return super(Profiles, self).get(profile_id, **params)
from ..error import IdentifierError from ..objects.profile import Profile from .base import Base class Profiles(Base): RESOURCE_ID_PREFIX = 'pfl_' def get_resource_object(self, result): return Profile(result, self.client) def get(self, profile_id, **params): if not profile_id or \ (not profile_id.startswith(self.RESOURCE_ID_PREFIX) and not profile_id == 'me'): raise IdentifierError( - "Invalid profile ID: '{id}'. A profile ID should start with '{prefix}'.".format( ? ^ -------- + "Invalid profile ID: '{id}'. A profile ID should start with '{prefix}' " ? ^ + "or it should be 'me'.".format( + id=profile_id, - id=profile_id, prefix=self.RESOURCE_ID_PREFIX) ? --------------- + prefix=self.RESOURCE_ID_PREFIX) ) return super(Profiles, self).get(profile_id, **params)
b21750ad60b84bf87f15c3d25ffa0317091a10dc
pyoracc/test/model/test_corpus.py
pyoracc/test/model/test_corpus.py
import pytest from ...model.corpus import Corpus from ..fixtures import tiny_corpus, sample_corpus, whole_corpus slow = pytest.mark.skipif( not pytest.config.getoption("--runslow"), reason="need --runslow option to run" ) def test_tiny(): corpus = Corpus(source=tiny_corpus()) assert corpus.successes == 1 assert corpus.failures == 1 @slow def test_sample(): corpus = Corpus(source=sample_corpus()) assert corpus.successes == 36 assert corpus.failures == 3 @pytest.mark.skipif(not whole_corpus(), reason="Need to set oracc_corpus_path to point " "to the whole corpus, which is not bundled with " "pyoracc") @slow def test_whole(): corpus = Corpus(source=whole_corpus()) assert corpus.successes == 2477 assert corpus.failures == 391
import pytest from ...model.corpus import Corpus from ..fixtures import tiny_corpus, sample_corpus, whole_corpus slow = pytest.mark.skipif( not pytest.config.getoption("--runslow"), reason="need --runslow option to run" ) def test_tiny(): corpus = Corpus(source=tiny_corpus()) assert corpus.successes == 1 assert corpus.failures == 1 @slow def test_sample(): corpus = Corpus(source=sample_corpus()) assert corpus.successes == 36 assert corpus.failures == 3 @pytest.mark.skipif(not whole_corpus(), reason="Need to set oracc_corpus_path to point " "to the whole corpus, which is not bundled with " "pyoracc") @slow def test_whole(): corpus = Corpus(source=whole_corpus()) # there is a total of 2868 files in the corpus assert corpus.successes == 2477 assert corpus.failures == 391
Comment about number of tests
Comment about number of tests
Python
mit
UCL/pyoracc
import pytest from ...model.corpus import Corpus from ..fixtures import tiny_corpus, sample_corpus, whole_corpus slow = pytest.mark.skipif( not pytest.config.getoption("--runslow"), reason="need --runslow option to run" ) def test_tiny(): corpus = Corpus(source=tiny_corpus()) assert corpus.successes == 1 assert corpus.failures == 1 @slow def test_sample(): corpus = Corpus(source=sample_corpus()) assert corpus.successes == 36 assert corpus.failures == 3 @pytest.mark.skipif(not whole_corpus(), reason="Need to set oracc_corpus_path to point " "to the whole corpus, which is not bundled with " "pyoracc") @slow def test_whole(): corpus = Corpus(source=whole_corpus()) + # there is a total of 2868 files in the corpus assert corpus.successes == 2477 assert corpus.failures == 391
Comment about number of tests
## Code Before: import pytest from ...model.corpus import Corpus from ..fixtures import tiny_corpus, sample_corpus, whole_corpus slow = pytest.mark.skipif( not pytest.config.getoption("--runslow"), reason="need --runslow option to run" ) def test_tiny(): corpus = Corpus(source=tiny_corpus()) assert corpus.successes == 1 assert corpus.failures == 1 @slow def test_sample(): corpus = Corpus(source=sample_corpus()) assert corpus.successes == 36 assert corpus.failures == 3 @pytest.mark.skipif(not whole_corpus(), reason="Need to set oracc_corpus_path to point " "to the whole corpus, which is not bundled with " "pyoracc") @slow def test_whole(): corpus = Corpus(source=whole_corpus()) assert corpus.successes == 2477 assert corpus.failures == 391 ## Instruction: Comment about number of tests ## Code After: import pytest from ...model.corpus import Corpus from ..fixtures import tiny_corpus, sample_corpus, whole_corpus slow = pytest.mark.skipif( not pytest.config.getoption("--runslow"), reason="need --runslow option to run" ) def test_tiny(): corpus = Corpus(source=tiny_corpus()) assert corpus.successes == 1 assert corpus.failures == 1 @slow def test_sample(): corpus = Corpus(source=sample_corpus()) assert corpus.successes == 36 assert corpus.failures == 3 @pytest.mark.skipif(not whole_corpus(), reason="Need to set oracc_corpus_path to point " "to the whole corpus, which is not bundled with " "pyoracc") @slow def test_whole(): corpus = Corpus(source=whole_corpus()) # there is a total of 2868 files in the corpus assert corpus.successes == 2477 assert corpus.failures == 391
import pytest from ...model.corpus import Corpus from ..fixtures import tiny_corpus, sample_corpus, whole_corpus slow = pytest.mark.skipif( not pytest.config.getoption("--runslow"), reason="need --runslow option to run" ) def test_tiny(): corpus = Corpus(source=tiny_corpus()) assert corpus.successes == 1 assert corpus.failures == 1 @slow def test_sample(): corpus = Corpus(source=sample_corpus()) assert corpus.successes == 36 assert corpus.failures == 3 @pytest.mark.skipif(not whole_corpus(), reason="Need to set oracc_corpus_path to point " "to the whole corpus, which is not bundled with " "pyoracc") @slow def test_whole(): corpus = Corpus(source=whole_corpus()) + # there is a total of 2868 files in the corpus assert corpus.successes == 2477 assert corpus.failures == 391
04069a5d5d6f5647394aba987ae5798629bf4a73
Ghidra/Features/Python/ghidra_scripts/ImportSymbolsScript.py
Ghidra/Features/Python/ghidra_scripts/ImportSymbolsScript.py
f = askFile("Give me a file to open", "Go baby go!") for line in file(f.absolutePath): # note, cannot use open(), since that is in GhidraScript pieces = line.split() address = toAddr(long(pieces[1], 16)) print "creating symbol", pieces[0], "at address", address createLabel(address, pieces[0], False)
from ghidra.program.model.symbol.SourceType import * import string functionManager = currentProgram.getFunctionManager() f = askFile("Give me a file to open", "Go baby go!") for line in file(f.absolutePath): # note, cannot use open(), since that is in GhidraScript pieces = line.split() name = pieces[0] address = toAddr(long(pieces[1], 16)) try: function_or_label = pieces[2] except IndexError: function_or_label = "l" if function_or_label == "f": func = functionManager.getFunctionAt(address) if func is not None: old_name = func.getName() func.setName(name, USER_DEFINED) print("Renamed function {} to {} at address {}".format(old_name, name, address)) else: func = createFunction(address, name) print("Created function {} at address {}".format(name, address)) else: print("Created label {} at address {}".format(name, address)) createLabel(address, name, False)
Add optional 3 option to ImportSymbolScript.py to allow importing function names.
Add optional 3 option to ImportSymbolScript.py to allow importing function names. Add python script to import functions and labels Use print() instead of print Merge ImportSymbolsScript.py and ImportFunctionAndLabels.py Made third token optional
Python
apache-2.0
NationalSecurityAgency/ghidra,NationalSecurityAgency/ghidra,NationalSecurityAgency/ghidra,NationalSecurityAgency/ghidra,NationalSecurityAgency/ghidra,NationalSecurityAgency/ghidra,NationalSecurityAgency/ghidra
- + + from ghidra.program.model.symbol.SourceType import * + import string + + functionManager = currentProgram.getFunctionManager() + f = askFile("Give me a file to open", "Go baby go!") for line in file(f.absolutePath): # note, cannot use open(), since that is in GhidraScript pieces = line.split() + + name = pieces[0] address = toAddr(long(pieces[1], 16)) - print "creating symbol", pieces[0], "at address", address - createLabel(address, pieces[0], False) + try: + function_or_label = pieces[2] + except IndexError: + function_or_label = "l" + + + if function_or_label == "f": + func = functionManager.getFunctionAt(address) + + if func is not None: + old_name = func.getName() + func.setName(name, USER_DEFINED) + print("Renamed function {} to {} at address {}".format(old_name, name, address)) + else: + func = createFunction(address, name) + print("Created function {} at address {}".format(name, address)) + + else: + print("Created label {} at address {}".format(name, address)) + createLabel(address, name, False) +
Add optional 3 option to ImportSymbolScript.py to allow importing function names.
## Code Before: f = askFile("Give me a file to open", "Go baby go!") for line in file(f.absolutePath): # note, cannot use open(), since that is in GhidraScript pieces = line.split() address = toAddr(long(pieces[1], 16)) print "creating symbol", pieces[0], "at address", address createLabel(address, pieces[0], False) ## Instruction: Add optional 3 option to ImportSymbolScript.py to allow importing function names. ## Code After: from ghidra.program.model.symbol.SourceType import * import string functionManager = currentProgram.getFunctionManager() f = askFile("Give me a file to open", "Go baby go!") for line in file(f.absolutePath): # note, cannot use open(), since that is in GhidraScript pieces = line.split() name = pieces[0] address = toAddr(long(pieces[1], 16)) try: function_or_label = pieces[2] except IndexError: function_or_label = "l" if function_or_label == "f": func = functionManager.getFunctionAt(address) if func is not None: old_name = func.getName() func.setName(name, USER_DEFINED) print("Renamed function {} to {} at address {}".format(old_name, name, address)) else: func = createFunction(address, name) print("Created function {} at address {}".format(name, address)) else: print("Created label {} at address {}".format(name, address)) createLabel(address, name, False)
- + + from ghidra.program.model.symbol.SourceType import * + import string + + functionManager = currentProgram.getFunctionManager() + f = askFile("Give me a file to open", "Go baby go!") for line in file(f.absolutePath): # note, cannot use open(), since that is in GhidraScript pieces = line.split() + + name = pieces[0] address = toAddr(long(pieces[1], 16)) - print "creating symbol", pieces[0], "at address", address + + try: + function_or_label = pieces[2] + except IndexError: + function_or_label = "l" + + + if function_or_label == "f": + func = functionManager.getFunctionAt(address) + + if func is not None: + old_name = func.getName() + func.setName(name, USER_DEFINED) + print("Renamed function {} to {} at address {}".format(old_name, name, address)) + else: + func = createFunction(address, name) + print("Created function {} at address {}".format(name, address)) + + else: + print("Created label {} at address {}".format(name, address)) - createLabel(address, pieces[0], False) ? ^^ ------ + createLabel(address, name, False) ? ++++ ^^^
d5e3d6c3ca285f1037f284cfb78e279c2d1032ec
dojopuzzles/core/urls.py
dojopuzzles/core/urls.py
from django.urls import path from core import views app_name = "core" urlpatterns = [ path("home/", views.home, name="home"), path("about/", views.about, name="about"), ]
from core import views from django.urls import path app_name = "core" urlpatterns = [ path("", views.home, name="home"), path("about/", views.about, name="about"), ]
Fix route for main page
Fix route for main page
Python
mit
rennerocha/dojopuzzles
+ from core import views from django.urls import path - - from core import views - app_name = "core" urlpatterns = [ - path("home/", views.home, name="home"), + path("", views.home, name="home"), path("about/", views.about, name="about"), ]
Fix route for main page
## Code Before: from django.urls import path from core import views app_name = "core" urlpatterns = [ path("home/", views.home, name="home"), path("about/", views.about, name="about"), ] ## Instruction: Fix route for main page ## Code After: from core import views from django.urls import path app_name = "core" urlpatterns = [ path("", views.home, name="home"), path("about/", views.about, name="about"), ]
+ from core import views from django.urls import path - - from core import views - app_name = "core" urlpatterns = [ - path("home/", views.home, name="home"), ? ----- + path("", views.home, name="home"), path("about/", views.about, name="about"), ]