commit
stringlengths
40
40
old_file
stringlengths
4
118
new_file
stringlengths
4
118
old_contents
stringlengths
10
2.94k
new_contents
stringlengths
21
3.18k
subject
stringlengths
16
444
message
stringlengths
17
2.63k
lang
stringclasses
1 value
license
stringclasses
13 values
repos
stringlengths
5
43k
ndiff
stringlengths
52
3.32k
instruction
stringlengths
16
444
content
stringlengths
133
4.32k
fuzzy_diff
stringlengths
16
3.18k
347853290ebc4f5c47430ffce7d603eb4fead2d9
cpt/test/integration/update_python_reqs_test.py
cpt/test/integration/update_python_reqs_test.py
import unittest from conans.test.utils.tools import TestClient from cpt.test.test_client.tools import get_patched_multipackager class PythonRequiresTest(unittest.TestCase): def test_python_requires(self): base_conanfile = """from conans import ConanFile myvar = 123 def myfunct(): return 234 class Pkg(ConanFile): pass """ conanfile = """from conans import ConanFile class Pkg(ConanFile): name = "pyreq" version = "1.0.0" python_requires = "pyreq_base/0.1@user/channel" def build(self): v = self.python_requires["pyreq_base"].module.myvar f = self.python_requires["pyreq_base"].module.myfunct() self.output.info("%s,%s" % (v, f)) """ client = TestClient() client.save({"conanfile_base.py": base_conanfile}) client.run("export conanfile_base.py pyreq_base/0.1@user/channel") client.save({"conanfile.py": conanfile}) mulitpackager = get_patched_multipackager(client, username="user", channel="testing", exclude_vcvars_precommand=True) mulitpackager.add({}, {}) mulitpackager.run() self.assertIn("pyreq/1.0.0@user/testing: 123,234", client.out)
import unittest from conans.test.utils.tools import TestClient from cpt.test.test_client.tools import get_patched_multipackager class PythonRequiresTest(unittest.TestCase): def test_python_requires(self): base_conanfile = """from conans import ConanFile myvar = 123 def myfunct(): return 234 class Pkg(ConanFile): pass """ conanfile = """from conans import ConanFile class Pkg(ConanFile): name = "pyreq" version = "1.0.0" python_requires = "pyreq_base/0.1@user/channel" def build(self): v = self.python_requires["pyreq_base"].module.myvar f = self.python_requires["pyreq_base"].module.myfunct() self.output.info("%s,%s" % (v, f)) """ client = TestClient() client.save({"conanfile_base.py": base_conanfile}) client.run("export conanfile_base.py pyreq_base/0.1@user/channel") client.save({"conanfile.py": conanfile}) mulitpackager = get_patched_multipackager(client, username="user", channel="testing", exclude_vcvars_precommand=True) mulitpackager.add({}, {}) mulitpackager.run() self.assertIn("pyreq/1.0.0@user/", client.out) self.assertIn(": 123,234", client.out)
Fix pyreq test on Windows
Fix pyreq test on Windows Signed-off-by: Uilian Ries <[email protected]>
Python
mit
conan-io/conan-package-tools
import unittest from conans.test.utils.tools import TestClient from cpt.test.test_client.tools import get_patched_multipackager class PythonRequiresTest(unittest.TestCase): def test_python_requires(self): base_conanfile = """from conans import ConanFile myvar = 123 def myfunct(): return 234 class Pkg(ConanFile): pass """ conanfile = """from conans import ConanFile class Pkg(ConanFile): name = "pyreq" version = "1.0.0" python_requires = "pyreq_base/0.1@user/channel" def build(self): v = self.python_requires["pyreq_base"].module.myvar f = self.python_requires["pyreq_base"].module.myfunct() self.output.info("%s,%s" % (v, f)) """ client = TestClient() client.save({"conanfile_base.py": base_conanfile}) client.run("export conanfile_base.py pyreq_base/0.1@user/channel") client.save({"conanfile.py": conanfile}) mulitpackager = get_patched_multipackager(client, username="user", channel="testing", exclude_vcvars_precommand=True) mulitpackager.add({}, {}) mulitpackager.run() - self.assertIn("pyreq/1.0.0@user/testing: 123,234", client.out) + self.assertIn("pyreq/1.0.0@user/", client.out) + self.assertIn(": 123,234", client.out)
Fix pyreq test on Windows
## Code Before: import unittest from conans.test.utils.tools import TestClient from cpt.test.test_client.tools import get_patched_multipackager class PythonRequiresTest(unittest.TestCase): def test_python_requires(self): base_conanfile = """from conans import ConanFile myvar = 123 def myfunct(): return 234 class Pkg(ConanFile): pass """ conanfile = """from conans import ConanFile class Pkg(ConanFile): name = "pyreq" version = "1.0.0" python_requires = "pyreq_base/0.1@user/channel" def build(self): v = self.python_requires["pyreq_base"].module.myvar f = self.python_requires["pyreq_base"].module.myfunct() self.output.info("%s,%s" % (v, f)) """ client = TestClient() client.save({"conanfile_base.py": base_conanfile}) client.run("export conanfile_base.py pyreq_base/0.1@user/channel") client.save({"conanfile.py": conanfile}) mulitpackager = get_patched_multipackager(client, username="user", channel="testing", exclude_vcvars_precommand=True) mulitpackager.add({}, {}) mulitpackager.run() self.assertIn("pyreq/1.0.0@user/testing: 123,234", client.out) ## Instruction: Fix pyreq test on Windows ## Code After: import unittest from conans.test.utils.tools import TestClient from cpt.test.test_client.tools import get_patched_multipackager class PythonRequiresTest(unittest.TestCase): def test_python_requires(self): base_conanfile = """from conans import ConanFile myvar = 123 def myfunct(): return 234 class Pkg(ConanFile): pass """ conanfile = """from conans import ConanFile class Pkg(ConanFile): name = "pyreq" version = "1.0.0" python_requires = "pyreq_base/0.1@user/channel" def build(self): v = self.python_requires["pyreq_base"].module.myvar f = self.python_requires["pyreq_base"].module.myfunct() self.output.info("%s,%s" % (v, f)) """ client = TestClient() client.save({"conanfile_base.py": base_conanfile}) client.run("export conanfile_base.py pyreq_base/0.1@user/channel") client.save({"conanfile.py": conanfile}) mulitpackager = get_patched_multipackager(client, username="user", channel="testing", exclude_vcvars_precommand=True) mulitpackager.add({}, {}) mulitpackager.run() self.assertIn("pyreq/1.0.0@user/", client.out) self.assertIn(": 123,234", client.out)
... mulitpackager.run() self.assertIn("pyreq/1.0.0@user/", client.out) self.assertIn(": 123,234", client.out) ...
cd342448675f3174bf74118de0447c1b0f169f3e
python/volumeBars.py
python/volumeBars.py
from rgbmatrix import RGBMatrix from random import randint import numpy import math import time rows = 16 chains = 1 parallel = 1 ledMatrix = RGBMatrix(rows, chains, parallel) height = ledMatrix.height width = ledMatrix.width barWidth = width / 16 pi = numpy.pi barHeights = numpy.empty([16]) for i in range(16): barHeights[i] = i * pi / 16 while True: nextFrame = ledMatrix.CreateFrameCanvas() heights = numpy.sin(barHeights) barHeights += pi / 16 for x in range(width): barHeight = int(heights[int(x / barWidth)] * height) for y in range(height): if height - y <= barHeight: if y < 2: nextFrame.SetPixel(x, y, 255, 0, 0) elif y < 6: nextFrame.SetPixel(x, y, 200, 200, 0) else: nextFrame.SetPixel(x, y, 0, 200, 0) ledMatrix.SwapOnVSync(nextFrame) time.sleep(0.2)
from rgbmatrix import RGBMatrix from random import randint import numpy import math import time rows = 16 chains = 1 parallel = 1 ledMatrix = RGBMatrix(rows, chains, parallel) height = ledMatrix.height width = ledMatrix.width barWidth = width / 16 pi = numpy.pi barHeights = numpy.empty([16]) for i in range(16): barHeights[i] = i * pi / 16 while True: nextFrame = ledMatrix.CreateFrameCanvas() heights = numpy.empty([16]) for i in range(len(barHeights)): heights[i] = (math.sin(randint(-3, 3) * x) + math.cos(randint(-3, 3) * x) + math.cos(randint(-3, 3) * x)) / 3 barHeights += pi / 16 for x in range(width): barHeight = int(heights[int(x / barWidth)] * height) for y in range(height): if height - y <= barHeight: if y < 2: nextFrame.SetPixel(x, y, 255, 0, 0) elif y < 6: nextFrame.SetPixel(x, y, 200, 200, 0) else: nextFrame.SetPixel(x, y, 0, 200, 0) ledMatrix.SwapOnVSync(nextFrame) time.sleep(0.2)
Create a more random function
Create a more random function
Python
mit
DarkAce65/rpi-led-matrix,DarkAce65/rpi-led-matrix
from rgbmatrix import RGBMatrix from random import randint import numpy import math import time rows = 16 chains = 1 parallel = 1 ledMatrix = RGBMatrix(rows, chains, parallel) height = ledMatrix.height width = ledMatrix.width barWidth = width / 16 pi = numpy.pi barHeights = numpy.empty([16]) for i in range(16): barHeights[i] = i * pi / 16 while True: nextFrame = ledMatrix.CreateFrameCanvas() - heights = numpy.sin(barHeights) + heights = numpy.empty([16]) + for i in range(len(barHeights)): + heights[i] = (math.sin(randint(-3, 3) * x) + math.cos(randint(-3, 3) * x) + math.cos(randint(-3, 3) * x)) / 3 + barHeights += pi / 16 for x in range(width): barHeight = int(heights[int(x / barWidth)] * height) for y in range(height): if height - y <= barHeight: if y < 2: nextFrame.SetPixel(x, y, 255, 0, 0) elif y < 6: nextFrame.SetPixel(x, y, 200, 200, 0) else: nextFrame.SetPixel(x, y, 0, 200, 0) + ledMatrix.SwapOnVSync(nextFrame) time.sleep(0.2)
Create a more random function
## Code Before: from rgbmatrix import RGBMatrix from random import randint import numpy import math import time rows = 16 chains = 1 parallel = 1 ledMatrix = RGBMatrix(rows, chains, parallel) height = ledMatrix.height width = ledMatrix.width barWidth = width / 16 pi = numpy.pi barHeights = numpy.empty([16]) for i in range(16): barHeights[i] = i * pi / 16 while True: nextFrame = ledMatrix.CreateFrameCanvas() heights = numpy.sin(barHeights) barHeights += pi / 16 for x in range(width): barHeight = int(heights[int(x / barWidth)] * height) for y in range(height): if height - y <= barHeight: if y < 2: nextFrame.SetPixel(x, y, 255, 0, 0) elif y < 6: nextFrame.SetPixel(x, y, 200, 200, 0) else: nextFrame.SetPixel(x, y, 0, 200, 0) ledMatrix.SwapOnVSync(nextFrame) time.sleep(0.2) ## Instruction: Create a more random function ## Code After: from rgbmatrix import RGBMatrix from random import randint import numpy import math import time rows = 16 chains = 1 parallel = 1 ledMatrix = RGBMatrix(rows, chains, parallel) height = ledMatrix.height width = ledMatrix.width barWidth = width / 16 pi = numpy.pi barHeights = numpy.empty([16]) for i in range(16): barHeights[i] = i * pi / 16 while True: nextFrame = ledMatrix.CreateFrameCanvas() heights = numpy.empty([16]) for i in range(len(barHeights)): heights[i] = (math.sin(randint(-3, 3) * x) + math.cos(randint(-3, 3) * x) + math.cos(randint(-3, 3) * x)) / 3 barHeights += pi / 16 for x in range(width): barHeight = int(heights[int(x / barWidth)] * height) for y in range(height): if height - y <= barHeight: if y < 2: nextFrame.SetPixel(x, y, 255, 0, 0) elif y < 6: nextFrame.SetPixel(x, y, 200, 200, 0) else: nextFrame.SetPixel(x, y, 0, 200, 0) ledMatrix.SwapOnVSync(nextFrame) time.sleep(0.2)
... nextFrame = ledMatrix.CreateFrameCanvas() heights = numpy.empty([16]) for i in range(len(barHeights)): heights[i] = (math.sin(randint(-3, 3) * x) + math.cos(randint(-3, 3) * x) + math.cos(randint(-3, 3) * x)) / 3 barHeights += pi / 16 ... nextFrame.SetPixel(x, y, 0, 200, 0) ledMatrix.SwapOnVSync(nextFrame) ...
b9fbc9ba6ab2c379e26d6e599fcaaf6ab9b84473
server/slack.py
server/slack.py
import json import kartlogic.rank import logging import prettytable import util.web import util.slack def handler(event, context): logging.warning(event['body']) logging.warning(json.dumps(util.slack.parse_input(event['body']))) return util.web.respond_success("Successful") def rank_individuals_by_average_score(event, context): # retrieve the ranking board data board_data = kartlogic.rank.average_individual() # initialize the text table table = prettytable.PrettyTable(['Rank', 'Player', 'Character', 'Average']) # add player data to table for index, player in enumerate(board_data): table.add_row([(index + 1), player['name'], player['character'], player['average']]) # convert the entire table to a string table_string = '```' + table.get_string(border=True) + '```' # the response body that Slack expects slack_response = util.slack.in_channel_response(table_string) return util.web.respond_success_json(slack_response)
import kartlogic.rank import prettytable import util.web as webutil import util.slack as slackutil def handler(event, context): input_data = slackutil.slack.parse_input(event['body']) if slackutil.validate_slack_token(input_data) is False: return webutil.respond_unauthorized("Invalid Slack token") return webutil.respond_success("Successful") def rank_individuals_by_average_score(event, context): # retrieve the ranking board data board_data = kartlogic.rank.average_individual() # initialize the text table table = prettytable.PrettyTable(['Rank', 'Player', 'Character', 'Average']) # add player data to table for index, player in enumerate(board_data): table.add_row([(index + 1), player['name'], player['character'], player['average']]) # convert the entire table to a string table_string = '```' + table.get_string(border=True) + '```' # the response body that Slack expects slack_response = slackutil.in_channel_response(table_string) return webutil.respond_success_json(slack_response)
Add Slack token validation to handler
Add Slack token validation to handler
Python
mit
groppe/mario
- import json import kartlogic.rank - import logging import prettytable - import util.web - import util.slack + import util.web as webutil + import util.slack as slackutil def handler(event, context): - logging.warning(event['body']) - logging.warning(json.dumps(util.slack.parse_input(event['body']))) + input_data = slackutil.slack.parse_input(event['body']) + + if slackutil.validate_slack_token(input_data) is False: + return webutil.respond_unauthorized("Invalid Slack token") + - return util.web.respond_success("Successful") + return webutil.respond_success("Successful") def rank_individuals_by_average_score(event, context): # retrieve the ranking board data board_data = kartlogic.rank.average_individual() # initialize the text table table = prettytable.PrettyTable(['Rank', 'Player', 'Character', 'Average']) # add player data to table for index, player in enumerate(board_data): table.add_row([(index + 1), player['name'], player['character'], player['average']]) # convert the entire table to a string table_string = '```' + table.get_string(border=True) + '```' # the response body that Slack expects - slack_response = util.slack.in_channel_response(table_string) + slack_response = slackutil.in_channel_response(table_string) - return util.web.respond_success_json(slack_response) + return webutil.respond_success_json(slack_response)
Add Slack token validation to handler
## Code Before: import json import kartlogic.rank import logging import prettytable import util.web import util.slack def handler(event, context): logging.warning(event['body']) logging.warning(json.dumps(util.slack.parse_input(event['body']))) return util.web.respond_success("Successful") def rank_individuals_by_average_score(event, context): # retrieve the ranking board data board_data = kartlogic.rank.average_individual() # initialize the text table table = prettytable.PrettyTable(['Rank', 'Player', 'Character', 'Average']) # add player data to table for index, player in enumerate(board_data): table.add_row([(index + 1), player['name'], player['character'], player['average']]) # convert the entire table to a string table_string = '```' + table.get_string(border=True) + '```' # the response body that Slack expects slack_response = util.slack.in_channel_response(table_string) return util.web.respond_success_json(slack_response) ## Instruction: Add Slack token validation to handler ## Code After: import kartlogic.rank import prettytable import util.web as webutil import util.slack as slackutil def handler(event, context): input_data = slackutil.slack.parse_input(event['body']) if slackutil.validate_slack_token(input_data) is False: return webutil.respond_unauthorized("Invalid Slack token") return webutil.respond_success("Successful") def rank_individuals_by_average_score(event, context): # retrieve the ranking board data board_data = kartlogic.rank.average_individual() # initialize the text table table = prettytable.PrettyTable(['Rank', 'Player', 'Character', 'Average']) # add player data to table for index, player in enumerate(board_data): table.add_row([(index + 1), player['name'], player['character'], player['average']]) # convert the entire table to a string table_string = '```' + table.get_string(border=True) + '```' # the response body that Slack expects slack_response = slackutil.in_channel_response(table_string) return webutil.respond_success_json(slack_response)
# ... existing code ... import kartlogic.rank import prettytable import util.web as webutil import util.slack as slackutil # ... modified code ... def handler(event, context): input_data = slackutil.slack.parse_input(event['body']) if slackutil.validate_slack_token(input_data) is False: return webutil.respond_unauthorized("Invalid Slack token") return webutil.respond_success("Successful") ... # the response body that Slack expects slack_response = slackutil.in_channel_response(table_string) return webutil.respond_success_json(slack_response) # ... rest of the code ...
bbedbab40ba6fc6b958eb7bdc5b50cef58ad0240
bijgeschaafd/settings_test.py
bijgeschaafd/settings_test.py
import os from settings_base import * APP_ROOT = os.path.dirname(os.path.abspath(__file__)) DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.dirname(APP_ROOT)+'/newsdiffs.db', } }
import os from settings_base import * APP_ROOT = os.path.dirname(os.path.abspath(__file__)) DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.dirname(APP_ROOT)+'/newsdiffs.db', } } SECRET_KEY='BULLSHIT'
Add some SECRET_KEY to the test settings in order to make Travis run.
Add some SECRET_KEY to the test settings in order to make Travis run.
Python
mit
flupzor/newsdiffs,flupzor/bijgeschaafd,flupzor/newsdiffs,flupzor/bijgeschaafd,flupzor/bijgeschaafd,flupzor/bijgeschaafd,flupzor/newsdiffs,flupzor/newsdiffs
import os from settings_base import * APP_ROOT = os.path.dirname(os.path.abspath(__file__)) DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.dirname(APP_ROOT)+'/newsdiffs.db', } } + SECRET_KEY='BULLSHIT'
Add some SECRET_KEY to the test settings in order to make Travis run.
## Code Before: import os from settings_base import * APP_ROOT = os.path.dirname(os.path.abspath(__file__)) DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.dirname(APP_ROOT)+'/newsdiffs.db', } } ## Instruction: Add some SECRET_KEY to the test settings in order to make Travis run. ## Code After: import os from settings_base import * APP_ROOT = os.path.dirname(os.path.abspath(__file__)) DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.dirname(APP_ROOT)+'/newsdiffs.db', } } SECRET_KEY='BULLSHIT'
// ... existing code ... SECRET_KEY='BULLSHIT' // ... rest of the code ...
1096bc339caf0ba329332633d8b9170fb8940f6f
start.py
start.py
import cursingspock from spockbot import Client from spockbot.plugins import default_plugins as plugins from bat import bat, command plugins.extend([ ('bat', bat.BatPlugin), ('commands', command.CommandPlugin), ('curses', cursingspock.CursesPlugin), ]) # login_credentials should contain a dict with 'username' and 'password' #from login_credentials import settings settings = { 'start': {'username': 'Bat'}, 'auth': {'online_mode': False}, } client = Client(plugins=plugins, settings=settings) client.start('localhost', 25565)
import cursingspock from spockbot import Client from spockbot.plugins import default_plugins from bat import bat, command plugins = default_plugins.copy() plugins.extend([ ('bat', bat.BatPlugin), ('commands', command.CommandPlugin), ('curses', cursingspock.CursesPlugin), ]) # login_credentials should contain a dict with 'username' and 'password' #from login_credentials import settings settings = { 'start': {'username': 'Bat'}, 'auth': {'online_mode': False}, } client = Client(plugins=plugins, settings=settings) client.start('localhost', 25565)
Copy default plugins and extend
Copy default plugins and extend
Python
mit
Gjum/Bat
import cursingspock from spockbot import Client - from spockbot.plugins import default_plugins as plugins + from spockbot.plugins import default_plugins from bat import bat, command + plugins = default_plugins.copy() plugins.extend([ ('bat', bat.BatPlugin), ('commands', command.CommandPlugin), ('curses', cursingspock.CursesPlugin), ]) # login_credentials should contain a dict with 'username' and 'password' #from login_credentials import settings settings = { 'start': {'username': 'Bat'}, 'auth': {'online_mode': False}, } client = Client(plugins=plugins, settings=settings) client.start('localhost', 25565)
Copy default plugins and extend
## Code Before: import cursingspock from spockbot import Client from spockbot.plugins import default_plugins as plugins from bat import bat, command plugins.extend([ ('bat', bat.BatPlugin), ('commands', command.CommandPlugin), ('curses', cursingspock.CursesPlugin), ]) # login_credentials should contain a dict with 'username' and 'password' #from login_credentials import settings settings = { 'start': {'username': 'Bat'}, 'auth': {'online_mode': False}, } client = Client(plugins=plugins, settings=settings) client.start('localhost', 25565) ## Instruction: Copy default plugins and extend ## Code After: import cursingspock from spockbot import Client from spockbot.plugins import default_plugins from bat import bat, command plugins = default_plugins.copy() plugins.extend([ ('bat', bat.BatPlugin), ('commands', command.CommandPlugin), ('curses', cursingspock.CursesPlugin), ]) # login_credentials should contain a dict with 'username' and 'password' #from login_credentials import settings settings = { 'start': {'username': 'Bat'}, 'auth': {'online_mode': False}, } client = Client(plugins=plugins, settings=settings) client.start('localhost', 25565)
... from spockbot import Client from spockbot.plugins import default_plugins from bat import bat, command ... plugins = default_plugins.copy() plugins.extend([ ...
40ae333ab81ae1f4d93f3937306ddd12718b59a8
virtool/processes.py
virtool/processes.py
import virtool.db.processes FIRST_STEPS = { "delete_reference": "delete_indexes", "clone_reference": "copy_otus", "import_reference": "load_file", "remote_reference": "download", "update_remote_reference": "download", "update_software": "", "install_hmms": "" } class ProgressTracker: def __init__(self, db, process_id, total, factor=1, increment=0.03, initial=0): self.db = db self.process_id = process_id self.total = total self.factor = factor self.increment = increment self.initial = initial self.count = 0 self.last_reported = 0 self.progress = self.initial async def add(self, value): count = self.count + value if count > self.total: raise ValueError("Count cannot exceed total") self.count = count self.progress = self.initial + round(self.count / self.total * self.factor, 2) if self.progress - self.last_reported >= self.increment: await virtool.db.processes.update(self.db, self.process_id, progress=self.progress) self.last_reported = self.progress return self.progress
import virtool.db.processes FIRST_STEPS = { "delete_reference": "delete_indexes", "clone_reference": "copy_otus", "import_reference": "load_file", "remote_reference": "download", "update_remote_reference": "download", "update_software": "download", "install_hmms": "download" } class ProgressTracker: def __init__(self, db, process_id, total, factor=1, increment=0.03, initial=0): self.db = db self.process_id = process_id self.total = total self.factor = factor self.increment = increment self.initial = initial self.count = 0 self.last_reported = 0 self.progress = self.initial async def add(self, value): count = self.count + value if count > self.total: raise ValueError("Count cannot exceed total") self.count = count self.progress = self.initial + round(self.count / self.total * self.factor, 2) if self.progress - self.last_reported >= self.increment: await virtool.db.processes.update(self.db, self.process_id, progress=self.progress) self.last_reported = self.progress return self.progress
Make download first step for install_software process type
Make download first step for install_software process type
Python
mit
virtool/virtool,igboyes/virtool,virtool/virtool,igboyes/virtool
import virtool.db.processes FIRST_STEPS = { "delete_reference": "delete_indexes", "clone_reference": "copy_otus", "import_reference": "load_file", "remote_reference": "download", "update_remote_reference": "download", - "update_software": "", + "update_software": "download", - "install_hmms": "" + "install_hmms": "download" } class ProgressTracker: def __init__(self, db, process_id, total, factor=1, increment=0.03, initial=0): self.db = db self.process_id = process_id self.total = total self.factor = factor self.increment = increment self.initial = initial self.count = 0 self.last_reported = 0 self.progress = self.initial async def add(self, value): count = self.count + value if count > self.total: raise ValueError("Count cannot exceed total") self.count = count self.progress = self.initial + round(self.count / self.total * self.factor, 2) if self.progress - self.last_reported >= self.increment: await virtool.db.processes.update(self.db, self.process_id, progress=self.progress) self.last_reported = self.progress return self.progress
Make download first step for install_software process type
## Code Before: import virtool.db.processes FIRST_STEPS = { "delete_reference": "delete_indexes", "clone_reference": "copy_otus", "import_reference": "load_file", "remote_reference": "download", "update_remote_reference": "download", "update_software": "", "install_hmms": "" } class ProgressTracker: def __init__(self, db, process_id, total, factor=1, increment=0.03, initial=0): self.db = db self.process_id = process_id self.total = total self.factor = factor self.increment = increment self.initial = initial self.count = 0 self.last_reported = 0 self.progress = self.initial async def add(self, value): count = self.count + value if count > self.total: raise ValueError("Count cannot exceed total") self.count = count self.progress = self.initial + round(self.count / self.total * self.factor, 2) if self.progress - self.last_reported >= self.increment: await virtool.db.processes.update(self.db, self.process_id, progress=self.progress) self.last_reported = self.progress return self.progress ## Instruction: Make download first step for install_software process type ## Code After: import virtool.db.processes FIRST_STEPS = { "delete_reference": "delete_indexes", "clone_reference": "copy_otus", "import_reference": "load_file", "remote_reference": "download", "update_remote_reference": "download", "update_software": "download", "install_hmms": "download" } class ProgressTracker: def __init__(self, db, process_id, total, factor=1, increment=0.03, initial=0): self.db = db self.process_id = process_id self.total = total self.factor = factor self.increment = increment self.initial = initial self.count = 0 self.last_reported = 0 self.progress = self.initial async def add(self, value): count = self.count + value if count > self.total: raise ValueError("Count cannot exceed total") self.count = count self.progress = self.initial + round(self.count / self.total * self.factor, 2) if self.progress - self.last_reported >= self.increment: await virtool.db.processes.update(self.db, self.process_id, progress=self.progress) self.last_reported = self.progress return self.progress
// ... existing code ... "update_remote_reference": "download", "update_software": "download", "install_hmms": "download" } // ... rest of the code ...
527d460289cb574528f70a2a6c530e86627eb81a
framework/archiver/listeners.py
framework/archiver/listeners.py
from framework.tasks.handlers import enqueue_task from framework.archiver.tasks import archive, send_success_message from framework.archiver.utils import ( link_archive_provider, ) from framework.archiver import ( ARCHIVER_SUCCESS, ARCHIVER_FAILURE, ) from framework.archiver.exceptions import ArchiverCopyError from website.project import signals as project_signals @project_signals.after_create_registration.connect def archive_node(src, dst, user): """Blinker listener for registration initiations. Enqueqes an archive task :param src: Node being registered :param dst: registration Node :param user: registration initiator """ link_archive_provider(dst, user) enqueue_task(archive.si(src._id, dst._id, user._id)) @project_signals.archive_callback.connect def archive_callback(dst): """Blinker listener for updates to the archive task. When no tasks are pending, either fail the registration or send a success email :param dst: registration Node """ if not dst.archiving: return pending = {key: value for key, value in dst.archived_providers.iteritems() if value['status'] not in (ARCHIVER_SUCCESS, ARCHIVER_FAILURE)} if not len(pending): dst.archiving = False dst.save() if ARCHIVER_FAILURE in [value['status'] for value in dst.archived_providers.values()]: raise ArchiverCopyError(dst.registered_from, dst, dst.creator, dst.archived_providers) else: send_success_message.delay(dst._id)
from framework.tasks.handlers import enqueue_task from framework.archiver.tasks import archive, send_success_message from framework.archiver.utils import ( link_archive_provider, ) from framework.archiver import ( ARCHIVER_SUCCESS, ARCHIVER_FAILURE, ) from framework.archiver.exceptions import ArchiverCopyError from website.project import signals as project_signals @project_signals.after_create_registration.connect def archive_node(src, dst, user): """Blinker listener for registration initiations. Enqueqes an archive task :param src: Node being registered :param dst: registration Node :param user: registration initiator """ link_archive_provider(dst, user) enqueue_task(archive.si(src._id, dst._id, user._id)) @project_signals.archive_callback.connect def archive_callback(dst): """Blinker listener for updates to the archive task. When no tasks are pending, either fail the registration or send a success email :param dst: registration Node """ if not dst.archiving: return pending = [value for value in dst.archived_providers.values() if value['status'] not in (ARCHIVER_SUCCESS, ARCHIVER_FAILURE)] if not pending: dst.archiving = False dst.save() if ARCHIVER_FAILURE in [value['status'] for value in dst.archived_providers.values()]: raise ArchiverCopyError(dst.registered_from, dst, dst.creator, dst.archived_providers) else: send_success_message.delay(dst._id)
Use list comp instead of unnecessary dict comp
Use list comp instead of unnecessary dict comp
Python
apache-2.0
pattisdr/osf.io,ticklemepierce/osf.io,billyhunt/osf.io,wearpants/osf.io,mluo613/osf.io,ckc6cz/osf.io,doublebits/osf.io,aaxelb/osf.io,chrisseto/osf.io,amyshi188/osf.io,abought/osf.io,DanielSBrown/osf.io,adlius/osf.io,Nesiehr/osf.io,laurenrevere/osf.io,monikagrabowska/osf.io,kwierman/osf.io,amyshi188/osf.io,reinaH/osf.io,petermalcolm/osf.io,caseyrygt/osf.io,dplorimer/osf,hmoco/osf.io,chennan47/osf.io,brianjgeiger/osf.io,arpitar/osf.io,brianjgeiger/osf.io,jnayak1/osf.io,petermalcolm/osf.io,samchrisinger/osf.io,ckc6cz/osf.io,leb2dg/osf.io,mluo613/osf.io,acshi/osf.io,MerlinZhang/osf.io,jmcarp/osf.io,mluke93/osf.io,emetsger/osf.io,KAsante95/osf.io,acshi/osf.io,mfraezz/osf.io,TomHeatwole/osf.io,leb2dg/osf.io,KAsante95/osf.io,mluo613/osf.io,RomanZWang/osf.io,bdyetton/prettychart,hmoco/osf.io,acshi/osf.io,sloria/osf.io,TomBaxter/osf.io,jnayak1/osf.io,SSJohns/osf.io,monikagrabowska/osf.io,Nesiehr/osf.io,mluo613/osf.io,cwisecarver/osf.io,alexschiller/osf.io,felliott/osf.io,kch8qx/osf.io,adlius/osf.io,DanielSBrown/osf.io,caseyrollins/osf.io,CenterForOpenScience/osf.io,jinluyuan/osf.io,dplorimer/osf,ticklemepierce/osf.io,zamattiac/osf.io,cslzchen/osf.io,CenterForOpenScience/osf.io,hmoco/osf.io,samchrisinger/osf.io,asanfilippo7/osf.io,chennan47/osf.io,jolene-esposito/osf.io,asanfilippo7/osf.io,mluke93/osf.io,chrisseto/osf.io,cwisecarver/osf.io,monikagrabowska/osf.io,icereval/osf.io,zachjanicki/osf.io,asanfilippo7/osf.io,MerlinZhang/osf.io,rdhyee/osf.io,KAsante95/osf.io,alexschiller/osf.io,zachjanicki/osf.io,samanehsan/osf.io,jmcarp/osf.io,dplorimer/osf,HarryRybacki/osf.io,fabianvf/osf.io,danielneis/osf.io,jmcarp/osf.io,Ghalko/osf.io,caseyrygt/osf.io,samanehsan/osf.io,HalcyonChimera/osf.io,HalcyonChimera/osf.io,RomanZWang/osf.io,wearpants/osf.io,Ghalko/osf.io,Johnetordoff/osf.io,ZobairAlijan/osf.io,reinaH/osf.io,cldershem/osf.io,caneruguz/osf.io,emetsger/osf.io,icereval/osf.io,KAsante95/osf.io,danielneis/osf.io,wearpants/osf.io,fabianvf/osf.io,petermalcolm/osf.io,saradbowman/osf.io,mluke93/osf.io,sbt9uc/osf.io,njantrania/osf.io,amyshi188/osf.io,Ghalko/osf.io,emetsger/osf.io,rdhyee/osf.io,mattclark/osf.io,kch8qx/osf.io,baylee-d/osf.io,arpitar/osf.io,cslzchen/osf.io,binoculars/osf.io,emetsger/osf.io,cosenal/osf.io,erinspace/osf.io,alexschiller/osf.io,acshi/osf.io,lyndsysimon/osf.io,sloria/osf.io,caseyrygt/osf.io,binoculars/osf.io,TomHeatwole/osf.io,brianjgeiger/osf.io,HarryRybacki/osf.io,SSJohns/osf.io,amyshi188/osf.io,jinluyuan/osf.io,laurenrevere/osf.io,Nesiehr/osf.io,erinspace/osf.io,bdyetton/prettychart,acshi/osf.io,haoyuchen1992/osf.io,kwierman/osf.io,KAsante95/osf.io,SSJohns/osf.io,jeffreyliu3230/osf.io,brandonPurvis/osf.io,danielneis/osf.io,TomHeatwole/osf.io,mfraezz/osf.io,zachjanicki/osf.io,TomHeatwole/osf.io,caseyrollins/osf.io,Johnetordoff/osf.io,lyndsysimon/osf.io,zamattiac/osf.io,petermalcolm/osf.io,GageGaskins/osf.io,bdyetton/prettychart,jnayak1/osf.io,dplorimer/osf,rdhyee/osf.io,jolene-esposito/osf.io,TomBaxter/osf.io,GageGaskins/osf.io,abought/osf.io,cwisecarver/osf.io,cslzchen/osf.io,RomanZWang/osf.io,GageGaskins/osf.io,samchrisinger/osf.io,alexschiller/osf.io,jeffreyliu3230/osf.io,pattisdr/osf.io,saradbowman/osf.io,adlius/osf.io,cosenal/osf.io,samanehsan/osf.io,lyndsysimon/osf.io,chrisseto/osf.io,cosenal/osf.io,Nesiehr/osf.io,Ghalko/osf.io,RomanZWang/osf.io,crcresearch/osf.io,adlius/osf.io,njantrania/osf.io,DanielSBrown/osf.io,MerlinZhang/osf.io,danielneis/osf.io,brandonPurvis/osf.io,ticklemepierce/osf.io,reinaH/osf.io,mfraezz/osf.io,mluo613/osf.io,billyhunt/osf.io,icereval/osf.io,wearpants/osf.io,bdyetton/prettychart,monikagrabowska/osf.io,binoculars/osf.io,chrisseto/osf.io,mattclark/osf.io,chennan47/osf.io,rdhyee/osf.io,caneruguz/osf.io,aaxelb/osf.io,GageGaskins/osf.io,mfraezz/osf.io,Johnetordoff/osf.io,erinspace/osf.io,leb2dg/osf.io,ZobairAlijan/osf.io,CenterForOpenScience/osf.io,jeffreyliu3230/osf.io,doublebits/osf.io,reinaH/osf.io,fabianvf/osf.io,ZobairAlijan/osf.io,cslzchen/osf.io,doublebits/osf.io,monikagrabowska/osf.io,GageGaskins/osf.io,jinluyuan/osf.io,DanielSBrown/osf.io,doublebits/osf.io,caseyrollins/osf.io,brianjgeiger/osf.io,sbt9uc/osf.io,kch8qx/osf.io,jolene-esposito/osf.io,alexschiller/osf.io,ZobairAlijan/osf.io,njantrania/osf.io,zachjanicki/osf.io,billyhunt/osf.io,haoyuchen1992/osf.io,jolene-esposito/osf.io,cosenal/osf.io,cwisecarver/osf.io,abought/osf.io,MerlinZhang/osf.io,zamattiac/osf.io,HalcyonChimera/osf.io,mluke93/osf.io,ticklemepierce/osf.io,TomBaxter/osf.io,jinluyuan/osf.io,brandonPurvis/osf.io,jeffreyliu3230/osf.io,haoyuchen1992/osf.io,ckc6cz/osf.io,cldershem/osf.io,brandonPurvis/osf.io,fabianvf/osf.io,laurenrevere/osf.io,HarryRybacki/osf.io,caneruguz/osf.io,felliott/osf.io,crcresearch/osf.io,sbt9uc/osf.io,baylee-d/osf.io,hmoco/osf.io,njantrania/osf.io,crcresearch/osf.io,aaxelb/osf.io,sloria/osf.io,SSJohns/osf.io,kwierman/osf.io,arpitar/osf.io,ckc6cz/osf.io,billyhunt/osf.io,doublebits/osf.io,cldershem/osf.io,caseyrygt/osf.io,haoyuchen1992/osf.io,arpitar/osf.io,lyndsysimon/osf.io,felliott/osf.io,asanfilippo7/osf.io,zamattiac/osf.io,abought/osf.io,sbt9uc/osf.io,jmcarp/osf.io,jnayak1/osf.io,HalcyonChimera/osf.io,pattisdr/osf.io,HarryRybacki/osf.io,caneruguz/osf.io,samchrisinger/osf.io,baylee-d/osf.io,felliott/osf.io,cldershem/osf.io,mattclark/osf.io,kwierman/osf.io,CenterForOpenScience/osf.io,kch8qx/osf.io,billyhunt/osf.io,leb2dg/osf.io,samanehsan/osf.io,Johnetordoff/osf.io,brandonPurvis/osf.io,kch8qx/osf.io,aaxelb/osf.io,RomanZWang/osf.io
from framework.tasks.handlers import enqueue_task from framework.archiver.tasks import archive, send_success_message from framework.archiver.utils import ( link_archive_provider, ) from framework.archiver import ( ARCHIVER_SUCCESS, ARCHIVER_FAILURE, ) from framework.archiver.exceptions import ArchiverCopyError from website.project import signals as project_signals @project_signals.after_create_registration.connect def archive_node(src, dst, user): """Blinker listener for registration initiations. Enqueqes an archive task :param src: Node being registered :param dst: registration Node :param user: registration initiator """ link_archive_provider(dst, user) enqueue_task(archive.si(src._id, dst._id, user._id)) @project_signals.archive_callback.connect def archive_callback(dst): """Blinker listener for updates to the archive task. When no tasks are pending, either fail the registration or send a success email :param dst: registration Node """ if not dst.archiving: return - pending = {key: value for key, value in dst.archived_providers.iteritems() if value['status'] not in (ARCHIVER_SUCCESS, ARCHIVER_FAILURE)} + pending = [value for value in dst.archived_providers.values() if value['status'] not in (ARCHIVER_SUCCESS, ARCHIVER_FAILURE)] - if not len(pending): + if not pending: dst.archiving = False dst.save() if ARCHIVER_FAILURE in [value['status'] for value in dst.archived_providers.values()]: raise ArchiverCopyError(dst.registered_from, dst, dst.creator, dst.archived_providers) else: send_success_message.delay(dst._id)
Use list comp instead of unnecessary dict comp
## Code Before: from framework.tasks.handlers import enqueue_task from framework.archiver.tasks import archive, send_success_message from framework.archiver.utils import ( link_archive_provider, ) from framework.archiver import ( ARCHIVER_SUCCESS, ARCHIVER_FAILURE, ) from framework.archiver.exceptions import ArchiverCopyError from website.project import signals as project_signals @project_signals.after_create_registration.connect def archive_node(src, dst, user): """Blinker listener for registration initiations. Enqueqes an archive task :param src: Node being registered :param dst: registration Node :param user: registration initiator """ link_archive_provider(dst, user) enqueue_task(archive.si(src._id, dst._id, user._id)) @project_signals.archive_callback.connect def archive_callback(dst): """Blinker listener for updates to the archive task. When no tasks are pending, either fail the registration or send a success email :param dst: registration Node """ if not dst.archiving: return pending = {key: value for key, value in dst.archived_providers.iteritems() if value['status'] not in (ARCHIVER_SUCCESS, ARCHIVER_FAILURE)} if not len(pending): dst.archiving = False dst.save() if ARCHIVER_FAILURE in [value['status'] for value in dst.archived_providers.values()]: raise ArchiverCopyError(dst.registered_from, dst, dst.creator, dst.archived_providers) else: send_success_message.delay(dst._id) ## Instruction: Use list comp instead of unnecessary dict comp ## Code After: from framework.tasks.handlers import enqueue_task from framework.archiver.tasks import archive, send_success_message from framework.archiver.utils import ( link_archive_provider, ) from framework.archiver import ( ARCHIVER_SUCCESS, ARCHIVER_FAILURE, ) from framework.archiver.exceptions import ArchiverCopyError from website.project import signals as project_signals @project_signals.after_create_registration.connect def archive_node(src, dst, user): """Blinker listener for registration initiations. Enqueqes an archive task :param src: Node being registered :param dst: registration Node :param user: registration initiator """ link_archive_provider(dst, user) enqueue_task(archive.si(src._id, dst._id, user._id)) @project_signals.archive_callback.connect def archive_callback(dst): """Blinker listener for updates to the archive task. When no tasks are pending, either fail the registration or send a success email :param dst: registration Node """ if not dst.archiving: return pending = [value for value in dst.archived_providers.values() if value['status'] not in (ARCHIVER_SUCCESS, ARCHIVER_FAILURE)] if not pending: dst.archiving = False dst.save() if ARCHIVER_FAILURE in [value['status'] for value in dst.archived_providers.values()]: raise ArchiverCopyError(dst.registered_from, dst, dst.creator, dst.archived_providers) else: send_success_message.delay(dst._id)
... return pending = [value for value in dst.archived_providers.values() if value['status'] not in (ARCHIVER_SUCCESS, ARCHIVER_FAILURE)] if not pending: dst.archiving = False ...
a2d3c2e0391d2deeb1d6729567c2d8812ad7e7df
exam/asserts.py
exam/asserts.py
irrelevant = object() class ChangeWatcher(object): def __init__(self, thing, *args, **kwargs): self.thing = thing self.args = args self.kwargs = kwargs self.expected_before = kwargs.pop('before', irrelevant) self.expected_after = kwargs.pop('after', irrelevant) def __enter__(self): self.before = self.__apply() if not self.expected_before is irrelevant: check = self.before == self.expected_before assert check, self.__precondition_failure_msg_for('before') def __exit__(self, type, value, traceback): self.after = self.__apply() if not self.expected_after is irrelevant: check = self.after == self.expected_after assert check, self.__precondition_failure_msg_for('after') assert self.before != self.after, self.__equality_failure_message def __apply(self): return self.thing(*self.args, **self.kwargs) @property def __equality_failure_message(self): return 'Expected before %s != %s after' % (self.before, self.after) def __precondition_failure_msg_for(self, condition): return '%s value did not change (%s)' % ( condition, getattr(self, condition) ) class AssertsMixin(object): assertChanges = ChangeWatcher
IRRELEVANT = object() class ChangeWatcher(object): def __init__(self, thing, *args, **kwargs): self.thing = thing self.args = args self.kwargs = kwargs self.expected_before = kwargs.pop('before', IRRELEVANT) self.expected_after = kwargs.pop('after', IRRELEVANT) def __enter__(self): self.before = self.__apply() if not self.expected_before is IRRELEVANT: check = self.before == self.expected_before assert check, self.__precondition_failure_msg_for('before') def __exit__(self, type, value, traceback): self.after = self.__apply() if not self.expected_after is IRRELEVANT: check = self.after == self.expected_after assert check, self.__precondition_failure_msg_for('after') assert self.before != self.after, self.__equality_failure_message def __apply(self): return self.thing(*self.args, **self.kwargs) @property def __equality_failure_message(self): return 'Expected before %s != %s after' % (self.before, self.after) def __precondition_failure_msg_for(self, condition): return '%s value did not change (%s)' % ( condition, getattr(self, condition) ) class AssertsMixin(object): assertChanges = ChangeWatcher
Make the irrelevant object a constant
Make the irrelevant object a constant
Python
mit
Fluxx/exam,gterzian/exam,Fluxx/exam,gterzian/exam
- irrelevant = object() + IRRELEVANT = object() class ChangeWatcher(object): def __init__(self, thing, *args, **kwargs): self.thing = thing self.args = args self.kwargs = kwargs - self.expected_before = kwargs.pop('before', irrelevant) + self.expected_before = kwargs.pop('before', IRRELEVANT) - self.expected_after = kwargs.pop('after', irrelevant) + self.expected_after = kwargs.pop('after', IRRELEVANT) def __enter__(self): self.before = self.__apply() - if not self.expected_before is irrelevant: + if not self.expected_before is IRRELEVANT: check = self.before == self.expected_before assert check, self.__precondition_failure_msg_for('before') def __exit__(self, type, value, traceback): self.after = self.__apply() - if not self.expected_after is irrelevant: + if not self.expected_after is IRRELEVANT: check = self.after == self.expected_after assert check, self.__precondition_failure_msg_for('after') assert self.before != self.after, self.__equality_failure_message def __apply(self): return self.thing(*self.args, **self.kwargs) @property def __equality_failure_message(self): return 'Expected before %s != %s after' % (self.before, self.after) def __precondition_failure_msg_for(self, condition): return '%s value did not change (%s)' % ( condition, getattr(self, condition) ) class AssertsMixin(object): assertChanges = ChangeWatcher
Make the irrelevant object a constant
## Code Before: irrelevant = object() class ChangeWatcher(object): def __init__(self, thing, *args, **kwargs): self.thing = thing self.args = args self.kwargs = kwargs self.expected_before = kwargs.pop('before', irrelevant) self.expected_after = kwargs.pop('after', irrelevant) def __enter__(self): self.before = self.__apply() if not self.expected_before is irrelevant: check = self.before == self.expected_before assert check, self.__precondition_failure_msg_for('before') def __exit__(self, type, value, traceback): self.after = self.__apply() if not self.expected_after is irrelevant: check = self.after == self.expected_after assert check, self.__precondition_failure_msg_for('after') assert self.before != self.after, self.__equality_failure_message def __apply(self): return self.thing(*self.args, **self.kwargs) @property def __equality_failure_message(self): return 'Expected before %s != %s after' % (self.before, self.after) def __precondition_failure_msg_for(self, condition): return '%s value did not change (%s)' % ( condition, getattr(self, condition) ) class AssertsMixin(object): assertChanges = ChangeWatcher ## Instruction: Make the irrelevant object a constant ## Code After: IRRELEVANT = object() class ChangeWatcher(object): def __init__(self, thing, *args, **kwargs): self.thing = thing self.args = args self.kwargs = kwargs self.expected_before = kwargs.pop('before', IRRELEVANT) self.expected_after = kwargs.pop('after', IRRELEVANT) def __enter__(self): self.before = self.__apply() if not self.expected_before is IRRELEVANT: check = self.before == self.expected_before assert check, self.__precondition_failure_msg_for('before') def __exit__(self, type, value, traceback): self.after = self.__apply() if not self.expected_after is IRRELEVANT: check = self.after == self.expected_after assert check, self.__precondition_failure_msg_for('after') assert self.before != self.after, self.__equality_failure_message def __apply(self): return self.thing(*self.args, **self.kwargs) @property def __equality_failure_message(self): return 'Expected before %s != %s after' % (self.before, self.after) def __precondition_failure_msg_for(self, condition): return '%s value did not change (%s)' % ( condition, getattr(self, condition) ) class AssertsMixin(object): assertChanges = ChangeWatcher
// ... existing code ... IRRELEVANT = object() // ... modified code ... self.kwargs = kwargs self.expected_before = kwargs.pop('before', IRRELEVANT) self.expected_after = kwargs.pop('after', IRRELEVANT) ... if not self.expected_before is IRRELEVANT: check = self.before == self.expected_before ... if not self.expected_after is IRRELEVANT: check = self.after == self.expected_after // ... rest of the code ...
8b2cb51c8913737c524e1b922aeb02c07bfb2afc
src/keybar/models/entry.py
src/keybar/models/entry.py
from django.db import models from django.utils.translation import ugettext_lazy as _ from uuidfield import UUIDField from keybar.utils.crypto import encrypt, get_salt class Entry(models.Model): id = UUIDField(auto=True, primary_key=True) created_by = models.ForeignKey('keybar.User') title = models.TextField(_('Title'), blank=True, default='') url = models.URLField(blank=True, default='') identifier = models.TextField(_('Identifier for login'), help_text=_('Usually a username or email address')) value = models.TextField(_('The encrypted value for the entry.'), help_text=_('Usually a password.')) description = models.TextField(_('Description'), blank=True, default='') salt = models.BinaryField(null=True, blank=True) def set_value(self, password, value, salt=None): if salt is None: salt = get_salt() self.value = encrypt(value, password, salt) self.salt = salt
from django.db import models from django.utils.translation import ugettext_lazy as _ from uuidfield import UUIDField from keybar.utils.crypto import encrypt, decrypt, get_salt class Entry(models.Model): id = UUIDField(auto=True, primary_key=True) created_by = models.ForeignKey('keybar.User') title = models.TextField(_('Title'), blank=True, default='') url = models.URLField(blank=True, default='') identifier = models.TextField(_('Identifier for login'), help_text=_('Usually a username or email address')) value = models.TextField(_('The encrypted value for the entry.'), help_text=_('Usually a password.')) description = models.TextField(_('Description'), blank=True, default='') salt = models.BinaryField(null=True, blank=True) def set_value(self, password, value, salt=None): if salt is None: salt = get_salt() self.value = encrypt(value, password, salt) self.salt = salt def decrypt(self, password): return decrypt(self.value, password, bytes(self.salt))
Add decrypt helper to Entry
Add decrypt helper to Entry
Python
bsd-3-clause
keybar/keybar
from django.db import models from django.utils.translation import ugettext_lazy as _ from uuidfield import UUIDField - from keybar.utils.crypto import encrypt, get_salt + from keybar.utils.crypto import encrypt, decrypt, get_salt class Entry(models.Model): id = UUIDField(auto=True, primary_key=True) created_by = models.ForeignKey('keybar.User') title = models.TextField(_('Title'), blank=True, default='') url = models.URLField(blank=True, default='') identifier = models.TextField(_('Identifier for login'), help_text=_('Usually a username or email address')) value = models.TextField(_('The encrypted value for the entry.'), help_text=_('Usually a password.')) description = models.TextField(_('Description'), blank=True, default='') salt = models.BinaryField(null=True, blank=True) def set_value(self, password, value, salt=None): if salt is None: salt = get_salt() self.value = encrypt(value, password, salt) self.salt = salt + def decrypt(self, password): + return decrypt(self.value, password, bytes(self.salt)) +
Add decrypt helper to Entry
## Code Before: from django.db import models from django.utils.translation import ugettext_lazy as _ from uuidfield import UUIDField from keybar.utils.crypto import encrypt, get_salt class Entry(models.Model): id = UUIDField(auto=True, primary_key=True) created_by = models.ForeignKey('keybar.User') title = models.TextField(_('Title'), blank=True, default='') url = models.URLField(blank=True, default='') identifier = models.TextField(_('Identifier for login'), help_text=_('Usually a username or email address')) value = models.TextField(_('The encrypted value for the entry.'), help_text=_('Usually a password.')) description = models.TextField(_('Description'), blank=True, default='') salt = models.BinaryField(null=True, blank=True) def set_value(self, password, value, salt=None): if salt is None: salt = get_salt() self.value = encrypt(value, password, salt) self.salt = salt ## Instruction: Add decrypt helper to Entry ## Code After: from django.db import models from django.utils.translation import ugettext_lazy as _ from uuidfield import UUIDField from keybar.utils.crypto import encrypt, decrypt, get_salt class Entry(models.Model): id = UUIDField(auto=True, primary_key=True) created_by = models.ForeignKey('keybar.User') title = models.TextField(_('Title'), blank=True, default='') url = models.URLField(blank=True, default='') identifier = models.TextField(_('Identifier for login'), help_text=_('Usually a username or email address')) value = models.TextField(_('The encrypted value for the entry.'), help_text=_('Usually a password.')) description = models.TextField(_('Description'), blank=True, default='') salt = models.BinaryField(null=True, blank=True) def set_value(self, password, value, salt=None): if salt is None: salt = get_salt() self.value = encrypt(value, password, salt) self.salt = salt def decrypt(self, password): return decrypt(self.value, password, bytes(self.salt))
... from keybar.utils.crypto import encrypt, decrypt, get_salt ... self.salt = salt def decrypt(self, password): return decrypt(self.value, password, bytes(self.salt)) ...
b4ce232f050de073572f64c04b170a2e790fdc24
nefertari_mongodb/serializers.py
nefertari_mongodb/serializers.py
import logging import datetime import decimal import elasticsearch from bson import ObjectId, DBRef from nefertari.renderers import _JSONEncoder log = logging.getLogger(__name__) class JSONEncoder(_JSONEncoder): def default(self, obj): if isinstance(obj, (ObjectId, DBRef)): return str(obj) if isinstance(obj, decimal.Decimal): return float(obj) if isinstance(obj, (datetime.datetime, datetime.date)): return obj.strftime("%Y-%m-%dT%H:%M:%SZ") # iso if isinstance(obj, datetime.time): return obj.strftime('%H:%M:%S') if isinstance(obj, datetime.timedelta): return obj.seconds if hasattr(obj, 'to_dict'): # If it got to this point, it means its a nested object. # outter objects would have been handled with DataProxy. return obj.to_dict(__nested=True) return super(JSONEncoder, self).default(obj) class ESJSONSerializer(elasticsearch.serializer.JSONSerializer): def default(self, obj): if isinstance(obj, (ObjectId, DBRef)): return str(obj) if isinstance(obj, (datetime.datetime, datetime.date)): return obj.strftime("%Y-%m-%dT%H:%M:%SZ") # iso if isinstance(obj, datetime.time): return obj.strftime('%H:%M:%S') if isinstance(obj, datetime.timedelta): return obj.seconds if isinstance(obj, decimal.Decimal): return float(obj) try: return super(ESJSONSerializer, self).default(obj) except: import traceback log.error(traceback.format_exc())
import logging import datetime import decimal import elasticsearch from bson import ObjectId, DBRef from nefertari.renderers import _JSONEncoder log = logging.getLogger(__name__) class JSONEncoderMixin(object): def default(self, obj): if isinstance(obj, (ObjectId, DBRef)): return str(obj) if isinstance(obj, (datetime.datetime, datetime.date)): return obj.strftime("%Y-%m-%dT%H:%M:%SZ") # iso if isinstance(obj, datetime.time): return obj.strftime('%H:%M:%S') if isinstance(obj, datetime.timedelta): return obj.seconds if isinstance(obj, decimal.Decimal): return float(obj) return super(JSONEncoderMixin, self).default(obj) class JSONEncoder(JSONEncoderMixin, _JSONEncoder): def default(self, obj): if hasattr(obj, 'to_dict'): # If it got to this point, it means its a nested object. # outter objects would have been handled with DataProxy. return obj.to_dict(__nested=True) return super(JSONEncoder, self).default(obj) class ESJSONSerializer(JSONEncoderMixin, elasticsearch.serializer.JSONSerializer): def default(self, obj): try: return super(ESJSONSerializer, self).default(obj) except: import traceback log.error(traceback.format_exc())
Refactor encoders to have base class
Refactor encoders to have base class
Python
apache-2.0
brandicted/nefertari-mongodb,ramses-tech/nefertari-mongodb
import logging import datetime import decimal import elasticsearch from bson import ObjectId, DBRef from nefertari.renderers import _JSONEncoder log = logging.getLogger(__name__) + class JSONEncoderMixin(object): - class JSONEncoder(_JSONEncoder): - def default(self, obj): - if isinstance(obj, (ObjectId, DBRef)): - return str(obj) - if isinstance(obj, decimal.Decimal): - return float(obj) - if isinstance(obj, (datetime.datetime, datetime.date)): - return obj.strftime("%Y-%m-%dT%H:%M:%SZ") # iso - if isinstance(obj, datetime.time): - return obj.strftime('%H:%M:%S') - if isinstance(obj, datetime.timedelta): - return obj.seconds - - if hasattr(obj, 'to_dict'): - # If it got to this point, it means its a nested object. - # outter objects would have been handled with DataProxy. - return obj.to_dict(__nested=True) - - return super(JSONEncoder, self).default(obj) - - - class ESJSONSerializer(elasticsearch.serializer.JSONSerializer): def default(self, obj): if isinstance(obj, (ObjectId, DBRef)): return str(obj) if isinstance(obj, (datetime.datetime, datetime.date)): return obj.strftime("%Y-%m-%dT%H:%M:%SZ") # iso if isinstance(obj, datetime.time): return obj.strftime('%H:%M:%S') if isinstance(obj, datetime.timedelta): return obj.seconds if isinstance(obj, decimal.Decimal): return float(obj) + return super(JSONEncoderMixin, self).default(obj) + + + class JSONEncoder(JSONEncoderMixin, _JSONEncoder): + def default(self, obj): + if hasattr(obj, 'to_dict'): + # If it got to this point, it means its a nested object. + # outter objects would have been handled with DataProxy. + return obj.to_dict(__nested=True) + return super(JSONEncoder, self).default(obj) + + + class ESJSONSerializer(JSONEncoderMixin, + elasticsearch.serializer.JSONSerializer): + def default(self, obj): try: return super(ESJSONSerializer, self).default(obj) except: import traceback log.error(traceback.format_exc())
Refactor encoders to have base class
## Code Before: import logging import datetime import decimal import elasticsearch from bson import ObjectId, DBRef from nefertari.renderers import _JSONEncoder log = logging.getLogger(__name__) class JSONEncoder(_JSONEncoder): def default(self, obj): if isinstance(obj, (ObjectId, DBRef)): return str(obj) if isinstance(obj, decimal.Decimal): return float(obj) if isinstance(obj, (datetime.datetime, datetime.date)): return obj.strftime("%Y-%m-%dT%H:%M:%SZ") # iso if isinstance(obj, datetime.time): return obj.strftime('%H:%M:%S') if isinstance(obj, datetime.timedelta): return obj.seconds if hasattr(obj, 'to_dict'): # If it got to this point, it means its a nested object. # outter objects would have been handled with DataProxy. return obj.to_dict(__nested=True) return super(JSONEncoder, self).default(obj) class ESJSONSerializer(elasticsearch.serializer.JSONSerializer): def default(self, obj): if isinstance(obj, (ObjectId, DBRef)): return str(obj) if isinstance(obj, (datetime.datetime, datetime.date)): return obj.strftime("%Y-%m-%dT%H:%M:%SZ") # iso if isinstance(obj, datetime.time): return obj.strftime('%H:%M:%S') if isinstance(obj, datetime.timedelta): return obj.seconds if isinstance(obj, decimal.Decimal): return float(obj) try: return super(ESJSONSerializer, self).default(obj) except: import traceback log.error(traceback.format_exc()) ## Instruction: Refactor encoders to have base class ## Code After: import logging import datetime import decimal import elasticsearch from bson import ObjectId, DBRef from nefertari.renderers import _JSONEncoder log = logging.getLogger(__name__) class JSONEncoderMixin(object): def default(self, obj): if isinstance(obj, (ObjectId, DBRef)): return str(obj) if isinstance(obj, (datetime.datetime, datetime.date)): return obj.strftime("%Y-%m-%dT%H:%M:%SZ") # iso if isinstance(obj, datetime.time): return obj.strftime('%H:%M:%S') if isinstance(obj, datetime.timedelta): return obj.seconds if isinstance(obj, decimal.Decimal): return float(obj) return super(JSONEncoderMixin, self).default(obj) class JSONEncoder(JSONEncoderMixin, _JSONEncoder): def default(self, obj): if hasattr(obj, 'to_dict'): # If it got to this point, it means its a nested object. # outter objects would have been handled with DataProxy. return obj.to_dict(__nested=True) return super(JSONEncoder, self).default(obj) class ESJSONSerializer(JSONEncoderMixin, elasticsearch.serializer.JSONSerializer): def default(self, obj): try: return super(ESJSONSerializer, self).default(obj) except: import traceback log.error(traceback.format_exc())
# ... existing code ... class JSONEncoderMixin(object): def default(self, obj): # ... modified code ... return float(obj) return super(JSONEncoderMixin, self).default(obj) class JSONEncoder(JSONEncoderMixin, _JSONEncoder): def default(self, obj): if hasattr(obj, 'to_dict'): # If it got to this point, it means its a nested object. # outter objects would have been handled with DataProxy. return obj.to_dict(__nested=True) return super(JSONEncoder, self).default(obj) class ESJSONSerializer(JSONEncoderMixin, elasticsearch.serializer.JSONSerializer): def default(self, obj): try: # ... rest of the code ...
b1a21354735e3e4b58cf63c3fc81b6e8e2ee5ed7
concourse/scripts/builds/GpBuild.py
concourse/scripts/builds/GpBuild.py
import os import subprocess import sys from GpdbBuildBase import GpdbBuildBase class GpBuild(GpdbBuildBase): def __init__(self, mode): self.mode = 'on' if mode == 'orca' else 'off' def configure(self): return subprocess.call(["./configure", "--enable-mapreduce", "--with-perl", "--with-libxml", "--with-python", "--disable-gpcloud", "--prefix=/usr/local/gpdb"], cwd="gpdb_src") def icg(self): status = subprocess.call( "printf '\nLD_LIBRARY_PATH=$LD_LIBRARY_PATH:/usr/local/lib\nexport \ LD_LIBRARY_PATH' >> /usr/local/gpdb/greenplum_path.sh", shell=True) if status: return status status = subprocess.call([ "runuser gpadmin -c \"source /usr/local/gpdb/greenplum_path.sh \ && make create-demo-cluster DEFAULT_QD_MAX_CONNECT=150\""], cwd="gpdb_src/gpAux/gpdemo", shell=True) if status: return status return subprocess.call([ "runuser gpadmin -c \"source /usr/local/gpdb/greenplum_path.sh \ && source gpAux/gpdemo/gpdemo-env.sh && PGOPTIONS='-c optimizer={0}' \ make -C src/test installcheck-good\"".format(self.mode)], cwd="gpdb_src", shell=True)
import os import subprocess import sys from GpdbBuildBase import GpdbBuildBase class GpBuild(GpdbBuildBase): def __init__(self, mode): self.mode = 'on' if mode == 'orca' else 'off' def configure(self): return subprocess.call(["./configure", "--enable-mapreduce", "--with-perl", "--with-libxml", "--with-python", "--disable-gpcloud", "--disable-pxf", "--prefix=/usr/local/gpdb"], cwd="gpdb_src") def icg(self): status = subprocess.call( "printf '\nLD_LIBRARY_PATH=$LD_LIBRARY_PATH:/usr/local/lib\nexport \ LD_LIBRARY_PATH' >> /usr/local/gpdb/greenplum_path.sh", shell=True) if status: return status status = subprocess.call([ "runuser gpadmin -c \"source /usr/local/gpdb/greenplum_path.sh \ && make create-demo-cluster DEFAULT_QD_MAX_CONNECT=150\""], cwd="gpdb_src/gpAux/gpdemo", shell=True) if status: return status return subprocess.call([ "runuser gpadmin -c \"source /usr/local/gpdb/greenplum_path.sh \ && source gpAux/gpdemo/gpdemo-env.sh && PGOPTIONS='-c optimizer={0}' \ make -C src/test installcheck-good\"".format(self.mode)], cwd="gpdb_src", shell=True)
Disable PXF in ORCA CI
Disable PXF in ORCA CI
Python
apache-2.0
greenplum-db/gpdb,xinzweb/gpdb,adam8157/gpdb,adam8157/gpdb,greenplum-db/gpdb,lisakowen/gpdb,yuanzhao/gpdb,yuanzhao/gpdb,lisakowen/gpdb,lisakowen/gpdb,Chibin/gpdb,lisakowen/gpdb,Chibin/gpdb,adam8157/gpdb,Chibin/gpdb,Chibin/gpdb,xinzweb/gpdb,ashwinstar/gpdb,jmcatamney/gpdb,edespino/gpdb,janebeckman/gpdb,janebeckman/gpdb,greenplum-db/gpdb,xinzweb/gpdb,xinzweb/gpdb,greenplum-db/gpdb,jmcatamney/gpdb,ashwinstar/gpdb,xinzweb/gpdb,ashwinstar/gpdb,jmcatamney/gpdb,Chibin/gpdb,janebeckman/gpdb,Chibin/gpdb,edespino/gpdb,xinzweb/gpdb,Chibin/gpdb,adam8157/gpdb,janebeckman/gpdb,Chibin/gpdb,edespino/gpdb,lisakowen/gpdb,edespino/gpdb,50wu/gpdb,janebeckman/gpdb,yuanzhao/gpdb,adam8157/gpdb,janebeckman/gpdb,ashwinstar/gpdb,adam8157/gpdb,greenplum-db/gpdb,janebeckman/gpdb,adam8157/gpdb,ashwinstar/gpdb,lisakowen/gpdb,edespino/gpdb,50wu/gpdb,adam8157/gpdb,jmcatamney/gpdb,jmcatamney/gpdb,yuanzhao/gpdb,ashwinstar/gpdb,lisakowen/gpdb,yuanzhao/gpdb,ashwinstar/gpdb,janebeckman/gpdb,janebeckman/gpdb,edespino/gpdb,greenplum-db/gpdb,edespino/gpdb,jmcatamney/gpdb,yuanzhao/gpdb,Chibin/gpdb,50wu/gpdb,greenplum-db/gpdb,greenplum-db/gpdb,yuanzhao/gpdb,ashwinstar/gpdb,xinzweb/gpdb,50wu/gpdb,jmcatamney/gpdb,janebeckman/gpdb,50wu/gpdb,xinzweb/gpdb,jmcatamney/gpdb,yuanzhao/gpdb,edespino/gpdb,edespino/gpdb,50wu/gpdb,lisakowen/gpdb,50wu/gpdb,yuanzhao/gpdb,edespino/gpdb,50wu/gpdb,Chibin/gpdb,yuanzhao/gpdb
import os import subprocess import sys from GpdbBuildBase import GpdbBuildBase class GpBuild(GpdbBuildBase): def __init__(self, mode): self.mode = 'on' if mode == 'orca' else 'off' def configure(self): return subprocess.call(["./configure", "--enable-mapreduce", "--with-perl", "--with-libxml", "--with-python", "--disable-gpcloud", + "--disable-pxf", "--prefix=/usr/local/gpdb"], cwd="gpdb_src") def icg(self): status = subprocess.call( "printf '\nLD_LIBRARY_PATH=$LD_LIBRARY_PATH:/usr/local/lib\nexport \ LD_LIBRARY_PATH' >> /usr/local/gpdb/greenplum_path.sh", shell=True) if status: return status status = subprocess.call([ "runuser gpadmin -c \"source /usr/local/gpdb/greenplum_path.sh \ && make create-demo-cluster DEFAULT_QD_MAX_CONNECT=150\""], cwd="gpdb_src/gpAux/gpdemo", shell=True) if status: return status return subprocess.call([ "runuser gpadmin -c \"source /usr/local/gpdb/greenplum_path.sh \ && source gpAux/gpdemo/gpdemo-env.sh && PGOPTIONS='-c optimizer={0}' \ make -C src/test installcheck-good\"".format(self.mode)], cwd="gpdb_src", shell=True)
Disable PXF in ORCA CI
## Code Before: import os import subprocess import sys from GpdbBuildBase import GpdbBuildBase class GpBuild(GpdbBuildBase): def __init__(self, mode): self.mode = 'on' if mode == 'orca' else 'off' def configure(self): return subprocess.call(["./configure", "--enable-mapreduce", "--with-perl", "--with-libxml", "--with-python", "--disable-gpcloud", "--prefix=/usr/local/gpdb"], cwd="gpdb_src") def icg(self): status = subprocess.call( "printf '\nLD_LIBRARY_PATH=$LD_LIBRARY_PATH:/usr/local/lib\nexport \ LD_LIBRARY_PATH' >> /usr/local/gpdb/greenplum_path.sh", shell=True) if status: return status status = subprocess.call([ "runuser gpadmin -c \"source /usr/local/gpdb/greenplum_path.sh \ && make create-demo-cluster DEFAULT_QD_MAX_CONNECT=150\""], cwd="gpdb_src/gpAux/gpdemo", shell=True) if status: return status return subprocess.call([ "runuser gpadmin -c \"source /usr/local/gpdb/greenplum_path.sh \ && source gpAux/gpdemo/gpdemo-env.sh && PGOPTIONS='-c optimizer={0}' \ make -C src/test installcheck-good\"".format(self.mode)], cwd="gpdb_src", shell=True) ## Instruction: Disable PXF in ORCA CI ## Code After: import os import subprocess import sys from GpdbBuildBase import GpdbBuildBase class GpBuild(GpdbBuildBase): def __init__(self, mode): self.mode = 'on' if mode == 'orca' else 'off' def configure(self): return subprocess.call(["./configure", "--enable-mapreduce", "--with-perl", "--with-libxml", "--with-python", "--disable-gpcloud", "--disable-pxf", "--prefix=/usr/local/gpdb"], cwd="gpdb_src") def icg(self): status = subprocess.call( "printf '\nLD_LIBRARY_PATH=$LD_LIBRARY_PATH:/usr/local/lib\nexport \ LD_LIBRARY_PATH' >> /usr/local/gpdb/greenplum_path.sh", shell=True) if status: return status status = subprocess.call([ "runuser gpadmin -c \"source /usr/local/gpdb/greenplum_path.sh \ && make create-demo-cluster DEFAULT_QD_MAX_CONNECT=150\""], cwd="gpdb_src/gpAux/gpdemo", shell=True) if status: return status return subprocess.call([ "runuser gpadmin -c \"source /usr/local/gpdb/greenplum_path.sh \ && source gpAux/gpdemo/gpdemo-env.sh && PGOPTIONS='-c optimizer={0}' \ make -C src/test installcheck-good\"".format(self.mode)], cwd="gpdb_src", shell=True)
// ... existing code ... "--disable-gpcloud", "--disable-pxf", "--prefix=/usr/local/gpdb"], cwd="gpdb_src") // ... rest of the code ...
c4109fadf0a66db5af0e579600a70e4b7e28493d
csdms/dakota/experiment.py
csdms/dakota/experiment.py
"""A template for describing a Dakota experiment.""" import os import importlib import inspect blocks = ['environment', 'method', 'variables', 'interface', 'responses'] class Experiment(object): """Describe parameters to create an input file for a Dakota experiment.""" def __init__(self, method='vector_parameter_study', variables='continuous_design', interface='direct', responses='response_functions', **kwargs): """Create a set of default experiment parameters.""" self.environment = self._import('environment', 'environment', **kwargs) self.method = self._import('method', method, **kwargs) self.variables = self._import('variables', variables, **kwargs) self.interface = self._import('interface', interface, **kwargs) self.responses = self._import('responses', responses, **kwargs) def _get_subpackage_namespace(self, subpackage): return os.path.splitext(self.__module__)[0] + '.' + subpackage def _import(self, subpackage, module, **kwargs): namespace = self._get_subpackage_namespace(subpackage) + '.' + module module = importlib.import_module(namespace) cls = getattr(module, module.classname) return cls(**kwargs) def __str__(self): s = '# Dakota input file\n' for section in blocks: s += str(getattr(self, section)) return s
"""A template for describing a Dakota experiment.""" import os import importlib class Experiment(object): """Describe parameters to create an input file for a Dakota experiment.""" def __init__(self, environment='environment', method='vector_parameter_study', variables='continuous_design', interface='direct', responses='response_functions', **kwargs): """Create a set of default experiment parameters.""" self._blocks = ('environment', 'method', 'variables', 'interface', 'responses') for section in self._blocks: cls = self._import(section, eval(section), **kwargs) setattr(self, section, cls) def _get_subpackage_namespace(self, subpackage): return os.path.splitext(self.__module__)[0] + '.' + subpackage def _import(self, subpackage, module, **kwargs): namespace = self._get_subpackage_namespace(subpackage) + '.' + module module = importlib.import_module(namespace) cls = getattr(module, module.classname) return cls(**kwargs) def __str__(self): s = '# Dakota input file\n' for section in self._blocks: s += str(getattr(self, section)) return s
Refactor init method with _blocks attribute
Refactor init method with _blocks attribute
Python
mit
csdms/dakota,csdms/dakota
"""A template for describing a Dakota experiment.""" import os import importlib - import inspect - - - blocks = ['environment', 'method', 'variables', 'interface', 'responses'] class Experiment(object): """Describe parameters to create an input file for a Dakota experiment.""" def __init__(self, + environment='environment', method='vector_parameter_study', variables='continuous_design', interface='direct', responses='response_functions', **kwargs): """Create a set of default experiment parameters.""" - self.environment = self._import('environment', 'environment', **kwargs) - self.method = self._import('method', method, **kwargs) - self.variables = self._import('variables', variables, **kwargs) - self.interface = self._import('interface', interface, **kwargs) - self.responses = self._import('responses', responses, **kwargs) + self._blocks = ('environment', 'method', 'variables', + 'interface', 'responses') + for section in self._blocks: + cls = self._import(section, eval(section), **kwargs) + setattr(self, section, cls) def _get_subpackage_namespace(self, subpackage): return os.path.splitext(self.__module__)[0] + '.' + subpackage def _import(self, subpackage, module, **kwargs): namespace = self._get_subpackage_namespace(subpackage) + '.' + module module = importlib.import_module(namespace) cls = getattr(module, module.classname) return cls(**kwargs) def __str__(self): s = '# Dakota input file\n' - for section in blocks: + for section in self._blocks: s += str(getattr(self, section)) return s
Refactor init method with _blocks attribute
## Code Before: """A template for describing a Dakota experiment.""" import os import importlib import inspect blocks = ['environment', 'method', 'variables', 'interface', 'responses'] class Experiment(object): """Describe parameters to create an input file for a Dakota experiment.""" def __init__(self, method='vector_parameter_study', variables='continuous_design', interface='direct', responses='response_functions', **kwargs): """Create a set of default experiment parameters.""" self.environment = self._import('environment', 'environment', **kwargs) self.method = self._import('method', method, **kwargs) self.variables = self._import('variables', variables, **kwargs) self.interface = self._import('interface', interface, **kwargs) self.responses = self._import('responses', responses, **kwargs) def _get_subpackage_namespace(self, subpackage): return os.path.splitext(self.__module__)[0] + '.' + subpackage def _import(self, subpackage, module, **kwargs): namespace = self._get_subpackage_namespace(subpackage) + '.' + module module = importlib.import_module(namespace) cls = getattr(module, module.classname) return cls(**kwargs) def __str__(self): s = '# Dakota input file\n' for section in blocks: s += str(getattr(self, section)) return s ## Instruction: Refactor init method with _blocks attribute ## Code After: """A template for describing a Dakota experiment.""" import os import importlib class Experiment(object): """Describe parameters to create an input file for a Dakota experiment.""" def __init__(self, environment='environment', method='vector_parameter_study', variables='continuous_design', interface='direct', responses='response_functions', **kwargs): """Create a set of default experiment parameters.""" self._blocks = ('environment', 'method', 'variables', 'interface', 'responses') for section in self._blocks: cls = self._import(section, eval(section), **kwargs) setattr(self, section, cls) def _get_subpackage_namespace(self, subpackage): return os.path.splitext(self.__module__)[0] + '.' + subpackage def _import(self, subpackage, module, **kwargs): namespace = self._get_subpackage_namespace(subpackage) + '.' + module module = importlib.import_module(namespace) cls = getattr(module, module.classname) return cls(**kwargs) def __str__(self): s = '# Dakota input file\n' for section in self._blocks: s += str(getattr(self, section)) return s
... import importlib ... def __init__(self, environment='environment', method='vector_parameter_study', ... """Create a set of default experiment parameters.""" self._blocks = ('environment', 'method', 'variables', 'interface', 'responses') for section in self._blocks: cls = self._import(section, eval(section), **kwargs) setattr(self, section, cls) ... s = '# Dakota input file\n' for section in self._blocks: s += str(getattr(self, section)) ...
c0eb0f902b0fcbea29c8a3bf70f80ca9384cce9f
scripts/remove_after_use/send_mendeley_reauth_email.py
scripts/remove_after_use/send_mendeley_reauth_email.py
import sys import logging from website.app import setup_django setup_django() from website import mails from osf.models import OSFUser from addons.mendeley.models import UserSettings import progressbar from scripts import utils as script_utils logger = logging.getLogger(__name__) def main(dry=True): user = OSFUser.load('qrgl2') qs = UserSettings.objects.filter(owner__is_active=True).select_related('owner') pbar = progressbar.ProgressBar(maxval=qs.count()).start() for i, each in enumerate(qs): user = each.owner logger.info('Sending email to OSFUser {}'.format(user._id)) if not dry: mails.send_mail( mail=mails.MENDELEY_REAUTH, to_addr=user.username, can_change_preferences=False, user=user ) pbar.update(i + 1) if __name__ == '__main__': dry = '--dry' in sys.argv if not dry: script_utils.add_file_logger(logger, __file__) main(dry=dry)
import sys import logging from website.app import setup_django setup_django() from website import mails from osf.models import OSFUser from addons.mendeley.models import UserSettings import progressbar from scripts import utils as script_utils logger = logging.getLogger(__name__) def main(dry=True): qs = UserSettings.objects.filter(owner__is_active=True).select_related('owner').order_by('pk') count = qs.count() pbar = progressbar.ProgressBar(maxval=count).start() logger.info('Sending email to {} users'.format(count)) for i, each in enumerate(qs): user = each.owner logger.info('Sending email to OSFUser {}'.format(user._id)) if not dry: mails.send_mail( mail=mails.MENDELEY_REAUTH, to_addr=user.username, can_change_preferences=False, user=user ) pbar.update(i + 1) logger.info('Sent email to {} users'.format(count)) if __name__ == '__main__': dry = '--dry' in sys.argv if not dry: script_utils.add_file_logger(logger, __file__) main(dry=dry)
Remove junk and add more logging
Remove junk and add more logging
Python
apache-2.0
cslzchen/osf.io,icereval/osf.io,brianjgeiger/osf.io,mattclark/osf.io,pattisdr/osf.io,Johnetordoff/osf.io,baylee-d/osf.io,adlius/osf.io,cslzchen/osf.io,mattclark/osf.io,saradbowman/osf.io,brianjgeiger/osf.io,CenterForOpenScience/osf.io,adlius/osf.io,mfraezz/osf.io,brianjgeiger/osf.io,CenterForOpenScience/osf.io,baylee-d/osf.io,icereval/osf.io,erinspace/osf.io,felliott/osf.io,brianjgeiger/osf.io,aaxelb/osf.io,saradbowman/osf.io,felliott/osf.io,Johnetordoff/osf.io,caseyrollins/osf.io,mfraezz/osf.io,CenterForOpenScience/osf.io,pattisdr/osf.io,Johnetordoff/osf.io,aaxelb/osf.io,caseyrollins/osf.io,HalcyonChimera/osf.io,HalcyonChimera/osf.io,aaxelb/osf.io,cslzchen/osf.io,baylee-d/osf.io,felliott/osf.io,mfraezz/osf.io,pattisdr/osf.io,sloria/osf.io,felliott/osf.io,aaxelb/osf.io,Johnetordoff/osf.io,sloria/osf.io,icereval/osf.io,sloria/osf.io,cslzchen/osf.io,caseyrollins/osf.io,erinspace/osf.io,mfraezz/osf.io,HalcyonChimera/osf.io,CenterForOpenScience/osf.io,mattclark/osf.io,erinspace/osf.io,adlius/osf.io,adlius/osf.io,HalcyonChimera/osf.io
import sys import logging from website.app import setup_django setup_django() from website import mails from osf.models import OSFUser from addons.mendeley.models import UserSettings import progressbar from scripts import utils as script_utils logger = logging.getLogger(__name__) def main(dry=True): - user = OSFUser.load('qrgl2') - qs = UserSettings.objects.filter(owner__is_active=True).select_related('owner') + qs = UserSettings.objects.filter(owner__is_active=True).select_related('owner').order_by('pk') + count = qs.count() - pbar = progressbar.ProgressBar(maxval=qs.count()).start() + pbar = progressbar.ProgressBar(maxval=count).start() + logger.info('Sending email to {} users'.format(count)) for i, each in enumerate(qs): user = each.owner logger.info('Sending email to OSFUser {}'.format(user._id)) if not dry: mails.send_mail( mail=mails.MENDELEY_REAUTH, to_addr=user.username, can_change_preferences=False, user=user ) pbar.update(i + 1) + logger.info('Sent email to {} users'.format(count)) if __name__ == '__main__': dry = '--dry' in sys.argv if not dry: script_utils.add_file_logger(logger, __file__) main(dry=dry)
Remove junk and add more logging
## Code Before: import sys import logging from website.app import setup_django setup_django() from website import mails from osf.models import OSFUser from addons.mendeley.models import UserSettings import progressbar from scripts import utils as script_utils logger = logging.getLogger(__name__) def main(dry=True): user = OSFUser.load('qrgl2') qs = UserSettings.objects.filter(owner__is_active=True).select_related('owner') pbar = progressbar.ProgressBar(maxval=qs.count()).start() for i, each in enumerate(qs): user = each.owner logger.info('Sending email to OSFUser {}'.format(user._id)) if not dry: mails.send_mail( mail=mails.MENDELEY_REAUTH, to_addr=user.username, can_change_preferences=False, user=user ) pbar.update(i + 1) if __name__ == '__main__': dry = '--dry' in sys.argv if not dry: script_utils.add_file_logger(logger, __file__) main(dry=dry) ## Instruction: Remove junk and add more logging ## Code After: import sys import logging from website.app import setup_django setup_django() from website import mails from osf.models import OSFUser from addons.mendeley.models import UserSettings import progressbar from scripts import utils as script_utils logger = logging.getLogger(__name__) def main(dry=True): qs = UserSettings.objects.filter(owner__is_active=True).select_related('owner').order_by('pk') count = qs.count() pbar = progressbar.ProgressBar(maxval=count).start() logger.info('Sending email to {} users'.format(count)) for i, each in enumerate(qs): user = each.owner logger.info('Sending email to OSFUser {}'.format(user._id)) if not dry: mails.send_mail( mail=mails.MENDELEY_REAUTH, to_addr=user.username, can_change_preferences=False, user=user ) pbar.update(i + 1) logger.info('Sent email to {} users'.format(count)) if __name__ == '__main__': dry = '--dry' in sys.argv if not dry: script_utils.add_file_logger(logger, __file__) main(dry=dry)
... def main(dry=True): qs = UserSettings.objects.filter(owner__is_active=True).select_related('owner').order_by('pk') count = qs.count() pbar = progressbar.ProgressBar(maxval=count).start() logger.info('Sending email to {} users'.format(count)) for i, each in enumerate(qs): ... pbar.update(i + 1) logger.info('Sent email to {} users'.format(count)) ...
33f4036825c6ff4d9df0038471727648e0df100d
feder/virus_scan/engine/base.py
feder/virus_scan/engine/base.py
from django.contrib.sites.shortcuts import get_current_site from django.urls import reverse from django.core.signing import TimestampSigner class BaseEngine: def __init__(self): self.signer = TimestampSigner() def get_webhook_url(self): return "{}://{}{}?token={}".format( "https", get_current_site(None).domain, reverse("virus_scan:webhook"), self.signer.sign(self.name), ) def send_scan(self, this_file, filename): raise NotImplementedError( "Provide 'send' in {name}".format(name=self.__class__.__name__) ) def receive_scan(self, engine_id): raise NotImplementedError( "Provide 'receive_scan' in {name}".format(name=self.__class__.__name__) )
import urllib.parse from django.contrib.sites.shortcuts import get_current_site from django.urls import reverse from django.core.signing import TimestampSigner class BaseEngine: def __init__(self): self.signer = TimestampSigner() def get_webhook_url(self): return "{}://{}{}?token={}".format( "https", get_current_site(None).domain, reverse("virus_scan:webhook"), urllib.parse.quote(self.signer.sign(self.name)), ) def send_scan(self, this_file, filename): raise NotImplementedError( "Provide 'send' in {name}".format(name=self.__class__.__name__) ) def receive_scan(self, engine_id): raise NotImplementedError( "Provide 'receive_scan' in {name}".format(name=self.__class__.__name__) )
Fix urlencode in webhook url
Fix urlencode in webhook url
Python
mit
watchdogpolska/feder,watchdogpolska/feder,watchdogpolska/feder,watchdogpolska/feder
+ import urllib.parse + from django.contrib.sites.shortcuts import get_current_site from django.urls import reverse from django.core.signing import TimestampSigner class BaseEngine: def __init__(self): self.signer = TimestampSigner() def get_webhook_url(self): return "{}://{}{}?token={}".format( "https", get_current_site(None).domain, reverse("virus_scan:webhook"), - self.signer.sign(self.name), + urllib.parse.quote(self.signer.sign(self.name)), ) def send_scan(self, this_file, filename): raise NotImplementedError( "Provide 'send' in {name}".format(name=self.__class__.__name__) ) def receive_scan(self, engine_id): raise NotImplementedError( "Provide 'receive_scan' in {name}".format(name=self.__class__.__name__) )
Fix urlencode in webhook url
## Code Before: from django.contrib.sites.shortcuts import get_current_site from django.urls import reverse from django.core.signing import TimestampSigner class BaseEngine: def __init__(self): self.signer = TimestampSigner() def get_webhook_url(self): return "{}://{}{}?token={}".format( "https", get_current_site(None).domain, reverse("virus_scan:webhook"), self.signer.sign(self.name), ) def send_scan(self, this_file, filename): raise NotImplementedError( "Provide 'send' in {name}".format(name=self.__class__.__name__) ) def receive_scan(self, engine_id): raise NotImplementedError( "Provide 'receive_scan' in {name}".format(name=self.__class__.__name__) ) ## Instruction: Fix urlencode in webhook url ## Code After: import urllib.parse from django.contrib.sites.shortcuts import get_current_site from django.urls import reverse from django.core.signing import TimestampSigner class BaseEngine: def __init__(self): self.signer = TimestampSigner() def get_webhook_url(self): return "{}://{}{}?token={}".format( "https", get_current_site(None).domain, reverse("virus_scan:webhook"), urllib.parse.quote(self.signer.sign(self.name)), ) def send_scan(self, this_file, filename): raise NotImplementedError( "Provide 'send' in {name}".format(name=self.__class__.__name__) ) def receive_scan(self, engine_id): raise NotImplementedError( "Provide 'receive_scan' in {name}".format(name=self.__class__.__name__) )
// ... existing code ... import urllib.parse from django.contrib.sites.shortcuts import get_current_site // ... modified code ... reverse("virus_scan:webhook"), urllib.parse.quote(self.signer.sign(self.name)), ) // ... rest of the code ...
b8cf6f096e14ee7311c18117d57f98b1745b8105
pyuvdata/__init__.py
pyuvdata/__init__.py
from __future__ import absolute_import, division, print_function from .uvdata import * from .telescopes import * from .uvcal import * from .uvbeam import * from . import version # Filter annoying Cython warnings that serve no good purpose. see numpy#432 import warnings warnings.filterwarnings("ignore", message="numpy.dtype size changed") warnings.filterwarnings("ignore", message="numpy.ufunc size changed") __version__ = version.version
from __future__ import absolute_import, division, print_function # Filter annoying Cython warnings that serve no good purpose. see numpy#432 import warnings warnings.filterwarnings("ignore", message="numpy.dtype size changed") warnings.filterwarnings("ignore", message="numpy.ufunc size changed") from .uvdata import * from .telescopes import * from .uvcal import * from .uvbeam import * from . import version __version__ = version.version
Move warning filter above other imports in init
Move warning filter above other imports in init
Python
bsd-2-clause
HERA-Team/pyuvdata,HERA-Team/pyuvdata,HERA-Team/pyuvdata,HERA-Team/pyuvdata
from __future__ import absolute_import, division, print_function + + # Filter annoying Cython warnings that serve no good purpose. see numpy#432 + import warnings + warnings.filterwarnings("ignore", message="numpy.dtype size changed") + warnings.filterwarnings("ignore", message="numpy.ufunc size changed") from .uvdata import * from .telescopes import * from .uvcal import * from .uvbeam import * from . import version - # Filter annoying Cython warnings that serve no good purpose. see numpy#432 - import warnings - warnings.filterwarnings("ignore", message="numpy.dtype size changed") - warnings.filterwarnings("ignore", message="numpy.ufunc size changed") - __version__ = version.version
Move warning filter above other imports in init
## Code Before: from __future__ import absolute_import, division, print_function from .uvdata import * from .telescopes import * from .uvcal import * from .uvbeam import * from . import version # Filter annoying Cython warnings that serve no good purpose. see numpy#432 import warnings warnings.filterwarnings("ignore", message="numpy.dtype size changed") warnings.filterwarnings("ignore", message="numpy.ufunc size changed") __version__ = version.version ## Instruction: Move warning filter above other imports in init ## Code After: from __future__ import absolute_import, division, print_function # Filter annoying Cython warnings that serve no good purpose. see numpy#432 import warnings warnings.filterwarnings("ignore", message="numpy.dtype size changed") warnings.filterwarnings("ignore", message="numpy.ufunc size changed") from .uvdata import * from .telescopes import * from .uvcal import * from .uvbeam import * from . import version __version__ = version.version
# ... existing code ... from __future__ import absolute_import, division, print_function # Filter annoying Cython warnings that serve no good purpose. see numpy#432 import warnings warnings.filterwarnings("ignore", message="numpy.dtype size changed") warnings.filterwarnings("ignore", message="numpy.ufunc size changed") # ... modified code ... __version__ = version.version # ... rest of the code ...
dbfe5fcb87762d68580756d6466bc61fa8ab4a56
histomicstk/preprocessing/color_deconvolution/utils.py
histomicstk/preprocessing/color_deconvolution/utils.py
import numpy from .stain_color_map import stain_color_map def get_stain_vector(args, index): """Get the stain corresponding to args.stain_$index and args.stain_$index_vector. If the former is not "custom", the latter must be None. """ args = vars(args) stain = args['stain_' + str(index)] stain_vector = args['stain_' + str(index) + '_vector'] if stain == 'custom': if stain_vector is None: raise ValueError('If "custom" is chosen for a stain, ' 'a stain vector must be provided.') return stain_vector else: if stain_vector is None: return stain_color_map[stain] raise ValueError('Unless "custom" is chosen for a stain, ' 'no stain vector may be provided.') def get_stain_matrix(args): """Get the stain matrix corresponding to the args.stain_$index and args.stain_$index_vector arguments for values of index 1, 2, 3. Return a numpy array of column vectors. """ return numpy.array([get_stain_vector(args, i) for i in 1, 2, 3]).T __all__ = ( 'get_stain_vector', )
import numpy from .stain_color_map import stain_color_map def get_stain_vector(args, index): """Get the stain corresponding to args.stain_$index and args.stain_$index_vector. If the former is not "custom", the latter must be None. """ args = vars(args) stain = args['stain_' + str(index)] stain_vector = args['stain_' + str(index) + '_vector'] if stain == 'custom': if stain_vector is None: raise ValueError('If "custom" is chosen for a stain, ' 'a stain vector must be provided.') return stain_vector else: if stain_vector is None: return stain_color_map[stain] raise ValueError('Unless "custom" is chosen for a stain, ' 'no stain vector may be provided.') def get_stain_matrix(args, count=3): """Get the stain matrix corresponding to the args.stain_$index and args.stain_$index_vector arguments for values of index 1 to count. Return a numpy array of column vectors. """ return numpy.array([get_stain_vector(args, i+1) for i in range(count)]).T __all__ = ( 'get_stain_vector', )
Enhance get_stain_matrix to take any desired number of vectors
Enhance get_stain_matrix to take any desired number of vectors
Python
apache-2.0
DigitalSlideArchive/HistomicsTK,DigitalSlideArchive/HistomicsTK
import numpy from .stain_color_map import stain_color_map def get_stain_vector(args, index): """Get the stain corresponding to args.stain_$index and args.stain_$index_vector. If the former is not "custom", the latter must be None. """ args = vars(args) stain = args['stain_' + str(index)] stain_vector = args['stain_' + str(index) + '_vector'] if stain == 'custom': if stain_vector is None: raise ValueError('If "custom" is chosen for a stain, ' 'a stain vector must be provided.') return stain_vector else: if stain_vector is None: return stain_color_map[stain] raise ValueError('Unless "custom" is chosen for a stain, ' 'no stain vector may be provided.') - def get_stain_matrix(args): + def get_stain_matrix(args, count=3): """Get the stain matrix corresponding to the args.stain_$index and - args.stain_$index_vector arguments for values of index 1, 2, 3. + args.stain_$index_vector arguments for values of index 1 to count. Return a numpy array of column vectors. """ - return numpy.array([get_stain_vector(args, i) for i in 1, 2, 3]).T + return numpy.array([get_stain_vector(args, i+1) for i in range(count)]).T __all__ = ( 'get_stain_vector', )
Enhance get_stain_matrix to take any desired number of vectors
## Code Before: import numpy from .stain_color_map import stain_color_map def get_stain_vector(args, index): """Get the stain corresponding to args.stain_$index and args.stain_$index_vector. If the former is not "custom", the latter must be None. """ args = vars(args) stain = args['stain_' + str(index)] stain_vector = args['stain_' + str(index) + '_vector'] if stain == 'custom': if stain_vector is None: raise ValueError('If "custom" is chosen for a stain, ' 'a stain vector must be provided.') return stain_vector else: if stain_vector is None: return stain_color_map[stain] raise ValueError('Unless "custom" is chosen for a stain, ' 'no stain vector may be provided.') def get_stain_matrix(args): """Get the stain matrix corresponding to the args.stain_$index and args.stain_$index_vector arguments for values of index 1, 2, 3. Return a numpy array of column vectors. """ return numpy.array([get_stain_vector(args, i) for i in 1, 2, 3]).T __all__ = ( 'get_stain_vector', ) ## Instruction: Enhance get_stain_matrix to take any desired number of vectors ## Code After: import numpy from .stain_color_map import stain_color_map def get_stain_vector(args, index): """Get the stain corresponding to args.stain_$index and args.stain_$index_vector. If the former is not "custom", the latter must be None. """ args = vars(args) stain = args['stain_' + str(index)] stain_vector = args['stain_' + str(index) + '_vector'] if stain == 'custom': if stain_vector is None: raise ValueError('If "custom" is chosen for a stain, ' 'a stain vector must be provided.') return stain_vector else: if stain_vector is None: return stain_color_map[stain] raise ValueError('Unless "custom" is chosen for a stain, ' 'no stain vector may be provided.') def get_stain_matrix(args, count=3): """Get the stain matrix corresponding to the args.stain_$index and args.stain_$index_vector arguments for values of index 1 to count. Return a numpy array of column vectors. """ return numpy.array([get_stain_vector(args, i+1) for i in range(count)]).T __all__ = ( 'get_stain_vector', )
# ... existing code ... def get_stain_matrix(args, count=3): """Get the stain matrix corresponding to the args.stain_$index and args.stain_$index_vector arguments for values of index 1 to count. Return a numpy array of column vectors. # ... modified code ... """ return numpy.array([get_stain_vector(args, i+1) for i in range(count)]).T # ... rest of the code ...
63f40971f8bc4858b32b41595d14315d2261169f
proselint/checks/garner/mondegreens.py
proselint/checks/garner/mondegreens.py
from tools import memoize, preferred_forms_check @memoize def check(text): """Suggest the preferred forms.""" err = "garner.mondegreens" msg = "'{}' is the preferred form." list = [ ["a girl with colitis goes by", "a girl with kaleidascope eyes"], ["a partridge in a pear tree", "a part-red gingerbread tree"], ["attorney and not a republic", "attorney and notary public"], ["beck and call", "beckon call"], ["for all intents and purposes", "for all intensive purposes"], ["laid him on the green", "Lady Mondegreen"], ["Olive, the other reindeer", "all of the other reindeer"], ["to the manner born", "to the manor born"], ] return preferred_forms_check(text, list, err, msg)
from tools import memoize, preferred_forms_check @memoize def check(text): """Suggest the preferred forms.""" err = "garner.mondegreens" msg = "'{}' is the preferred form." list = [ ["a girl with kaleidascope eyes", "a girl with colitis goes by"], ["a partridge in a pear tree", "a part-red gingerbread tree"], ["attorney and not a republic", "attorney and notary public"], ["beck and call", "beckon call"], ["for all intents and purposes", "for all intensive purposes"], ["laid him on the green", "Lady Mondegreen"], ["Olive, the other reindeer", "all of the other reindeer"], ["to the manner born", "to the manor born"], ] return preferred_forms_check(text, list, err, msg)
Fix bug in mondegreen rule
Fix bug in mondegreen rule (The correct versions should all be in the left column.)
Python
bsd-3-clause
jstewmon/proselint,jstewmon/proselint,amperser/proselint,amperser/proselint,jstewmon/proselint,amperser/proselint,amperser/proselint,amperser/proselint
from tools import memoize, preferred_forms_check @memoize def check(text): """Suggest the preferred forms.""" err = "garner.mondegreens" msg = "'{}' is the preferred form." list = [ - ["a girl with colitis goes by", "a girl with kaleidascope eyes"], + ["a girl with kaleidascope eyes", "a girl with colitis goes by"], - ["a partridge in a pear tree", "a part-red gingerbread tree"], + ["a partridge in a pear tree", "a part-red gingerbread tree"], - ["attorney and not a republic", "attorney and notary public"], + ["attorney and not a republic", "attorney and notary public"], - ["beck and call", "beckon call"], + ["beck and call", "beckon call"], - ["for all intents and purposes", "for all intensive purposes"], + ["for all intents and purposes", "for all intensive purposes"], - ["laid him on the green", "Lady Mondegreen"], + ["laid him on the green", "Lady Mondegreen"], - ["Olive, the other reindeer", "all of the other reindeer"], + ["Olive, the other reindeer", "all of the other reindeer"], - ["to the manner born", "to the manor born"], + ["to the manner born", "to the manor born"], ] return preferred_forms_check(text, list, err, msg)
Fix bug in mondegreen rule
## Code Before: from tools import memoize, preferred_forms_check @memoize def check(text): """Suggest the preferred forms.""" err = "garner.mondegreens" msg = "'{}' is the preferred form." list = [ ["a girl with colitis goes by", "a girl with kaleidascope eyes"], ["a partridge in a pear tree", "a part-red gingerbread tree"], ["attorney and not a republic", "attorney and notary public"], ["beck and call", "beckon call"], ["for all intents and purposes", "for all intensive purposes"], ["laid him on the green", "Lady Mondegreen"], ["Olive, the other reindeer", "all of the other reindeer"], ["to the manner born", "to the manor born"], ] return preferred_forms_check(text, list, err, msg) ## Instruction: Fix bug in mondegreen rule ## Code After: from tools import memoize, preferred_forms_check @memoize def check(text): """Suggest the preferred forms.""" err = "garner.mondegreens" msg = "'{}' is the preferred form." list = [ ["a girl with kaleidascope eyes", "a girl with colitis goes by"], ["a partridge in a pear tree", "a part-red gingerbread tree"], ["attorney and not a republic", "attorney and notary public"], ["beck and call", "beckon call"], ["for all intents and purposes", "for all intensive purposes"], ["laid him on the green", "Lady Mondegreen"], ["Olive, the other reindeer", "all of the other reindeer"], ["to the manner born", "to the manor born"], ] return preferred_forms_check(text, list, err, msg)
# ... existing code ... list = [ ["a girl with kaleidascope eyes", "a girl with colitis goes by"], ["a partridge in a pear tree", "a part-red gingerbread tree"], ["attorney and not a republic", "attorney and notary public"], ["beck and call", "beckon call"], ["for all intents and purposes", "for all intensive purposes"], ["laid him on the green", "Lady Mondegreen"], ["Olive, the other reindeer", "all of the other reindeer"], ["to the manner born", "to the manor born"], ] # ... rest of the code ...
994b9fbc9372b0c54f840a239f8b4a1cc89315ee
src/waldur_mastermind/invoices/filters.py
src/waldur_mastermind/invoices/filters.py
import django_filters from waldur_core.core import filters as core_filters from . import models class InvoiceFilter(django_filters.FilterSet): customer = core_filters.URLFilter(view_name='customer-detail', name='customer__uuid') customer_uuid = django_filters.UUIDFilter(name='customer__uuid') state = django_filters.MultipleChoiceFilter(choices=models.Invoice.States.CHOICES) class Meta(object): model = models.Invoice fields = ('year', 'month')
import django_filters from waldur_core.core import filters as core_filters from . import models class InvoiceFilter(django_filters.FilterSet): customer = core_filters.URLFilter(view_name='customer-detail', name='customer__uuid') customer_uuid = django_filters.UUIDFilter(name='customer__uuid') state = django_filters.MultipleChoiceFilter(choices=models.Invoice.States.CHOICES) o = django_filters.OrderingFilter(fields=(('year', 'month'),)) class Meta(object): model = models.Invoice fields = ('year', 'month')
Allow to filter invoices by date
Allow to filter invoices by date [WAL-2340]
Python
mit
opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/nodeconductor-assembly-waldur,opennode/nodeconductor-assembly-waldur,opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/nodeconductor-assembly-waldur
import django_filters from waldur_core.core import filters as core_filters from . import models class InvoiceFilter(django_filters.FilterSet): customer = core_filters.URLFilter(view_name='customer-detail', name='customer__uuid') customer_uuid = django_filters.UUIDFilter(name='customer__uuid') state = django_filters.MultipleChoiceFilter(choices=models.Invoice.States.CHOICES) + o = django_filters.OrderingFilter(fields=(('year', 'month'),)) class Meta(object): model = models.Invoice fields = ('year', 'month')
Allow to filter invoices by date
## Code Before: import django_filters from waldur_core.core import filters as core_filters from . import models class InvoiceFilter(django_filters.FilterSet): customer = core_filters.URLFilter(view_name='customer-detail', name='customer__uuid') customer_uuid = django_filters.UUIDFilter(name='customer__uuid') state = django_filters.MultipleChoiceFilter(choices=models.Invoice.States.CHOICES) class Meta(object): model = models.Invoice fields = ('year', 'month') ## Instruction: Allow to filter invoices by date ## Code After: import django_filters from waldur_core.core import filters as core_filters from . import models class InvoiceFilter(django_filters.FilterSet): customer = core_filters.URLFilter(view_name='customer-detail', name='customer__uuid') customer_uuid = django_filters.UUIDFilter(name='customer__uuid') state = django_filters.MultipleChoiceFilter(choices=models.Invoice.States.CHOICES) o = django_filters.OrderingFilter(fields=(('year', 'month'),)) class Meta(object): model = models.Invoice fields = ('year', 'month')
... state = django_filters.MultipleChoiceFilter(choices=models.Invoice.States.CHOICES) o = django_filters.OrderingFilter(fields=(('year', 'month'),)) ...
86d4aa3e5895d5f7ac029df82c63e2b1e29e8c2d
spc/types.py
spc/types.py
from collections import namedtuple IntegerType = namedtuple('IntegerType', []) Integer = IntegerType() ByteType = namedtuple('ByteType', []) Byte = ByteType() PointerTo = namedtuple('PointerTo', ['type']) ArrayOf = namedtuple('ArrayOf', ['type', 'count']) FunctionPointer = namedtuple('FunctionPointer', ['return_type', 'params']) TypeName = namedtuple('TypeName', ['name']) # Structure is a bit of an oddity - it can't actually be used in 'raw form' # by the user, but is always aliased in a declare block Struct = namedtuple('Struct', ['fields']) # This is used merely to record that a function has been declared - the # actual reified type is FunctionPointer FunctionDecl = namedtuple('FunctionDecl', ['return_type', 'params']) AliasDef = namedtuple('AliasDef', ['type']) # Raw types are types which can be used as variables RAW_TYPES = (types.IntegerType, types.ByteType, types.TypeName, types.PointerTo, types.ArrayOf, types.FunctionPointer) def decay_if_array(type_obj): """ Decays arrays types into pointers. """ if isinstance(type_obj, types.ArrayOf): return type_obj.PointerTo(type_obj.type) else: return type_obj def func_decl_to_ptr(func_decl): """ Converts a function declaration to a pointer. """ return FunctionPointer(*func_decl)
from collections import namedtuple IntegerType = namedtuple('IntegerType', []) Integer = IntegerType() ByteType = namedtuple('ByteType', []) Byte = ByteType() PointerTo = namedtuple('PointerTo', ['type']) ArrayOf = namedtuple('ArrayOf', ['type', 'count']) FunctionPointer = namedtuple('FunctionPointer', ['return_type', 'params']) TypeName = namedtuple('TypeName', ['name']) # Structure is a bit of an oddity - it can't actually be used in 'raw form' # by the user, but is always aliased in a declare block. # # Also, fields is an OrderedDict, since the order of fields matters for layout, Struct = namedtuple('Struct', ['fields']) # This is used merely to record that a function has been declared - the # actual reified type is FunctionPointer FunctionDecl = namedtuple('FunctionDecl', ['return_type', 'params']) AliasDef = namedtuple('AliasDef', ['type']) # Raw types are types which can be used as variables RAW_TYPES = (types.IntegerType, types.ByteType, types.TypeName, types.PointerTo, types.ArrayOf, types.FunctionPointer) def decay_if_array(type_obj): """ Decays arrays types into pointers. """ if isinstance(type_obj, types.ArrayOf): return type_obj.PointerTo(type_obj.type) else: return type_obj def func_decl_to_ptr(func_decl): """ Converts a function declaration to a pointer. """ return FunctionPointer(*func_decl)
Add note that Struct's field collection is an OrderedDict
Add note that Struct's field collection is an OrderedDict
Python
mit
adamnew123456/spc,adamnew123456/spc
from collections import namedtuple IntegerType = namedtuple('IntegerType', []) Integer = IntegerType() ByteType = namedtuple('ByteType', []) Byte = ByteType() PointerTo = namedtuple('PointerTo', ['type']) ArrayOf = namedtuple('ArrayOf', ['type', 'count']) FunctionPointer = namedtuple('FunctionPointer', ['return_type', 'params']) TypeName = namedtuple('TypeName', ['name']) # Structure is a bit of an oddity - it can't actually be used in 'raw form' - # by the user, but is always aliased in a declare block + # by the user, but is always aliased in a declare block. + # + # Also, fields is an OrderedDict, since the order of fields matters for layout, Struct = namedtuple('Struct', ['fields']) # This is used merely to record that a function has been declared - the # actual reified type is FunctionPointer FunctionDecl = namedtuple('FunctionDecl', ['return_type', 'params']) AliasDef = namedtuple('AliasDef', ['type']) # Raw types are types which can be used as variables RAW_TYPES = (types.IntegerType, types.ByteType, types.TypeName, types.PointerTo, types.ArrayOf, types.FunctionPointer) def decay_if_array(type_obj): """ Decays arrays types into pointers. """ if isinstance(type_obj, types.ArrayOf): return type_obj.PointerTo(type_obj.type) else: return type_obj def func_decl_to_ptr(func_decl): """ Converts a function declaration to a pointer. """ return FunctionPointer(*func_decl)
Add note that Struct's field collection is an OrderedDict
## Code Before: from collections import namedtuple IntegerType = namedtuple('IntegerType', []) Integer = IntegerType() ByteType = namedtuple('ByteType', []) Byte = ByteType() PointerTo = namedtuple('PointerTo', ['type']) ArrayOf = namedtuple('ArrayOf', ['type', 'count']) FunctionPointer = namedtuple('FunctionPointer', ['return_type', 'params']) TypeName = namedtuple('TypeName', ['name']) # Structure is a bit of an oddity - it can't actually be used in 'raw form' # by the user, but is always aliased in a declare block Struct = namedtuple('Struct', ['fields']) # This is used merely to record that a function has been declared - the # actual reified type is FunctionPointer FunctionDecl = namedtuple('FunctionDecl', ['return_type', 'params']) AliasDef = namedtuple('AliasDef', ['type']) # Raw types are types which can be used as variables RAW_TYPES = (types.IntegerType, types.ByteType, types.TypeName, types.PointerTo, types.ArrayOf, types.FunctionPointer) def decay_if_array(type_obj): """ Decays arrays types into pointers. """ if isinstance(type_obj, types.ArrayOf): return type_obj.PointerTo(type_obj.type) else: return type_obj def func_decl_to_ptr(func_decl): """ Converts a function declaration to a pointer. """ return FunctionPointer(*func_decl) ## Instruction: Add note that Struct's field collection is an OrderedDict ## Code After: from collections import namedtuple IntegerType = namedtuple('IntegerType', []) Integer = IntegerType() ByteType = namedtuple('ByteType', []) Byte = ByteType() PointerTo = namedtuple('PointerTo', ['type']) ArrayOf = namedtuple('ArrayOf', ['type', 'count']) FunctionPointer = namedtuple('FunctionPointer', ['return_type', 'params']) TypeName = namedtuple('TypeName', ['name']) # Structure is a bit of an oddity - it can't actually be used in 'raw form' # by the user, but is always aliased in a declare block. # # Also, fields is an OrderedDict, since the order of fields matters for layout, Struct = namedtuple('Struct', ['fields']) # This is used merely to record that a function has been declared - the # actual reified type is FunctionPointer FunctionDecl = namedtuple('FunctionDecl', ['return_type', 'params']) AliasDef = namedtuple('AliasDef', ['type']) # Raw types are types which can be used as variables RAW_TYPES = (types.IntegerType, types.ByteType, types.TypeName, types.PointerTo, types.ArrayOf, types.FunctionPointer) def decay_if_array(type_obj): """ Decays arrays types into pointers. """ if isinstance(type_obj, types.ArrayOf): return type_obj.PointerTo(type_obj.type) else: return type_obj def func_decl_to_ptr(func_decl): """ Converts a function declaration to a pointer. """ return FunctionPointer(*func_decl)
// ... existing code ... # Structure is a bit of an oddity - it can't actually be used in 'raw form' # by the user, but is always aliased in a declare block. # # Also, fields is an OrderedDict, since the order of fields matters for layout, Struct = namedtuple('Struct', ['fields']) // ... rest of the code ...
595419eaa5b5f411e477357872c7dd28067c9210
src/books/models.py
src/books/models.py
from django.db import models from datetime import date from django.utils import timezone # Create your models here. class Book(models.Model): title = models.CharField(max_length=200) author = models.CharField(max_length=200) year = models.DateTimeField('year published', help_text="Please use the following format: <em>YYYY-MM-DD</em>.") pages = models.IntegerField(default=0) isbn_10 = models.IntegerField(default=0) isbn_13 = models.IntegerField(default=0) description = models.TextField() cover_image = models.ImageField('Cover Image', upload_to='cover_pics/%Y-%m-%d/', null=True, blank=True) add_date = models.DateTimeField('date added', default=datetime.now) def __str__(self): return self.title + " by " + self.author def was_added_recently(self): return self.add_date >= timezone.now() - datetime.timedelta(days=30)
from django.db import models from datetime import datetime from django.utils import timezone # Create your models here. class Book(models.Model): title = models.CharField(max_length=200) author = models.CharField(max_length=200) year = models.DateTimeField('year published', help_text="Please use the following format: <em>YYYY-MM-DD</em>.") pages = models.IntegerField(default=0) isbn_10 = models.IntegerField(default=0) isbn_13 = models.IntegerField(default=0) description = models.TextField() cover_image = models.ImageField('cover Image', upload_to='cover_pics/%Y-%m-%d/', null=True, blank=True) date_added = models.DateTimeField(default=datetime.now) def __str__(self): return self.title + " by " + self.author def was_added_recently(self): return self.date_added >= timezone.now() - datetime.timedelta(days=30)
Fix date error in books model
Fix date error in books model Fix typos and the auto addition of date to the date_added field.
Python
mit
melkisedek/sen_project,melkisedek/sen_project,melkisedek/sen_project
from django.db import models - from datetime import date + from datetime import datetime from django.utils import timezone # Create your models here. class Book(models.Model): title = models.CharField(max_length=200) author = models.CharField(max_length=200) year = models.DateTimeField('year published', help_text="Please use the following format: <em>YYYY-MM-DD</em>.") pages = models.IntegerField(default=0) isbn_10 = models.IntegerField(default=0) isbn_13 = models.IntegerField(default=0) description = models.TextField() - cover_image = models.ImageField('Cover Image', + cover_image = models.ImageField('cover Image', upload_to='cover_pics/%Y-%m-%d/', null=True, blank=True) - add_date = models.DateTimeField('date added', default=datetime.now) + date_added = models.DateTimeField(default=datetime.now) def __str__(self): return self.title + " by " + self.author def was_added_recently(self): - return self.add_date >= timezone.now() - datetime.timedelta(days=30) + return self.date_added >= timezone.now() - datetime.timedelta(days=30)
Fix date error in books model
## Code Before: from django.db import models from datetime import date from django.utils import timezone # Create your models here. class Book(models.Model): title = models.CharField(max_length=200) author = models.CharField(max_length=200) year = models.DateTimeField('year published', help_text="Please use the following format: <em>YYYY-MM-DD</em>.") pages = models.IntegerField(default=0) isbn_10 = models.IntegerField(default=0) isbn_13 = models.IntegerField(default=0) description = models.TextField() cover_image = models.ImageField('Cover Image', upload_to='cover_pics/%Y-%m-%d/', null=True, blank=True) add_date = models.DateTimeField('date added', default=datetime.now) def __str__(self): return self.title + " by " + self.author def was_added_recently(self): return self.add_date >= timezone.now() - datetime.timedelta(days=30) ## Instruction: Fix date error in books model ## Code After: from django.db import models from datetime import datetime from django.utils import timezone # Create your models here. class Book(models.Model): title = models.CharField(max_length=200) author = models.CharField(max_length=200) year = models.DateTimeField('year published', help_text="Please use the following format: <em>YYYY-MM-DD</em>.") pages = models.IntegerField(default=0) isbn_10 = models.IntegerField(default=0) isbn_13 = models.IntegerField(default=0) description = models.TextField() cover_image = models.ImageField('cover Image', upload_to='cover_pics/%Y-%m-%d/', null=True, blank=True) date_added = models.DateTimeField(default=datetime.now) def __str__(self): return self.title + " by " + self.author def was_added_recently(self): return self.date_added >= timezone.now() - datetime.timedelta(days=30)
// ... existing code ... from django.db import models from datetime import datetime from django.utils import timezone // ... modified code ... description = models.TextField() cover_image = models.ImageField('cover Image', upload_to='cover_pics/%Y-%m-%d/', ... blank=True) date_added = models.DateTimeField(default=datetime.now) def __str__(self): ... def was_added_recently(self): return self.date_added >= timezone.now() - datetime.timedelta(days=30) // ... rest of the code ...
a40c617ea605bd667a9906f6c9400fc9562d7c0a
salt/daemons/flo/reactor.py
salt/daemons/flo/reactor.py
''' Start the reactor! ''' # Import salt libs import salt.utils.reactor # Import ioflo libs import ioflo.base.deeding @ioflo.base.deeding.deedify( 'SaltRaetReactorFork', ioinit={ 'opts': '.salt.opts', 'proc_mgr': '.salt.usr.proc_mgr'}) def reactor_fork(self): ''' Add a reactor object to the process manager ''' self.proc_mgr.add_process( salt.utils.reactor.Reactor, args=(self.opts.value,))
''' Start the reactor! ''' # Import salt libs import salt.utils.reactor import salt.utils.event # Import ioflo libs import ioflo.base.deeding @ioflo.base.deeding.deedify( 'SaltRaetReactorFork', ioinit={ 'opts': '.salt.opts', 'proc_mgr': '.salt.usr.proc_mgr'}) def reactor_fork(self): ''' Add a reactor object to the process manager ''' self.proc_mgr.add_process( salt.utils.reactor.Reactor, args=(self.opts.value,)) @ioflo.base.deeding.deedify( 'SaltRaetEventReturnFork', ioinit={ 'opts': '.salt.opts', 'proc_mgr': '.salt.usr.proc_mgr'}) def event_return_fork(self): ''' Add a reactor object to the process manager ''' self.proc_mgr.add_process( salt.utils.event.EventReturn, args=(self.opts.value,))
Add event return fork behavior
Add event return fork behavior
Python
apache-2.0
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
''' Start the reactor! ''' # Import salt libs import salt.utils.reactor + import salt.utils.event # Import ioflo libs import ioflo.base.deeding @ioflo.base.deeding.deedify( 'SaltRaetReactorFork', ioinit={ 'opts': '.salt.opts', 'proc_mgr': '.salt.usr.proc_mgr'}) def reactor_fork(self): ''' Add a reactor object to the process manager ''' self.proc_mgr.add_process( salt.utils.reactor.Reactor, args=(self.opts.value,)) + + @ioflo.base.deeding.deedify( + 'SaltRaetEventReturnFork', + ioinit={ + 'opts': '.salt.opts', + 'proc_mgr': '.salt.usr.proc_mgr'}) + def event_return_fork(self): + ''' + Add a reactor object to the process manager + ''' + self.proc_mgr.add_process( + salt.utils.event.EventReturn, + args=(self.opts.value,)) +
Add event return fork behavior
## Code Before: ''' Start the reactor! ''' # Import salt libs import salt.utils.reactor # Import ioflo libs import ioflo.base.deeding @ioflo.base.deeding.deedify( 'SaltRaetReactorFork', ioinit={ 'opts': '.salt.opts', 'proc_mgr': '.salt.usr.proc_mgr'}) def reactor_fork(self): ''' Add a reactor object to the process manager ''' self.proc_mgr.add_process( salt.utils.reactor.Reactor, args=(self.opts.value,)) ## Instruction: Add event return fork behavior ## Code After: ''' Start the reactor! ''' # Import salt libs import salt.utils.reactor import salt.utils.event # Import ioflo libs import ioflo.base.deeding @ioflo.base.deeding.deedify( 'SaltRaetReactorFork', ioinit={ 'opts': '.salt.opts', 'proc_mgr': '.salt.usr.proc_mgr'}) def reactor_fork(self): ''' Add a reactor object to the process manager ''' self.proc_mgr.add_process( salt.utils.reactor.Reactor, args=(self.opts.value,)) @ioflo.base.deeding.deedify( 'SaltRaetEventReturnFork', ioinit={ 'opts': '.salt.opts', 'proc_mgr': '.salt.usr.proc_mgr'}) def event_return_fork(self): ''' Add a reactor object to the process manager ''' self.proc_mgr.add_process( salt.utils.event.EventReturn, args=(self.opts.value,))
# ... existing code ... import salt.utils.reactor import salt.utils.event # Import ioflo libs # ... modified code ... args=(self.opts.value,)) @ioflo.base.deeding.deedify( 'SaltRaetEventReturnFork', ioinit={ 'opts': '.salt.opts', 'proc_mgr': '.salt.usr.proc_mgr'}) def event_return_fork(self): ''' Add a reactor object to the process manager ''' self.proc_mgr.add_process( salt.utils.event.EventReturn, args=(self.opts.value,)) # ... rest of the code ...
4c12f68e5d86acb4152acf5ace6a02b6968db925
setup.py
setup.py
from distutils.core import setup setup(name='icalendar', version='0.10', description='iCalendar support module', package_dir = {'': 'src'}, packages=['icalendar'], )
from distutils.core import setup f = open('version.txt', 'r') version = f.read().strip() f.close() setup(name='icalendar', version=version, description='iCalendar support module', package_dir = {'': 'src'}, packages=['icalendar'], )
Tweak so that version information is picked up from version.txt.
Tweak so that version information is picked up from version.txt.
Python
bsd-2-clause
nylas/icalendar,geier/icalendar,untitaker/icalendar
from distutils.core import setup + f = open('version.txt', 'r') + version = f.read().strip() + f.close() + setup(name='icalendar', - version='0.10', + version=version, description='iCalendar support module', package_dir = {'': 'src'}, packages=['icalendar'], )
Tweak so that version information is picked up from version.txt.
## Code Before: from distutils.core import setup setup(name='icalendar', version='0.10', description='iCalendar support module', package_dir = {'': 'src'}, packages=['icalendar'], ) ## Instruction: Tweak so that version information is picked up from version.txt. ## Code After: from distutils.core import setup f = open('version.txt', 'r') version = f.read().strip() f.close() setup(name='icalendar', version=version, description='iCalendar support module', package_dir = {'': 'src'}, packages=['icalendar'], )
... f = open('version.txt', 'r') version = f.read().strip() f.close() setup(name='icalendar', version=version, description='iCalendar support module', ...
424588f4cdad2dd063b15895198611703b187bec
pynpact/tests/steps/conftest.py
pynpact/tests/steps/conftest.py
import pytest import taskqueue @pytest.fixture(scope="session") def async_executor(request): taskqueue.LISTEN_ADDRESS = ('127.0.1.1', 57129) sm = taskqueue.get_ServerManager(make_server=True) sm.start() request.addfinalizer(sm.shutdown) return sm.Server() class NullExecutor(object): "An executor that doens't actually execute anything, just keeps track" tasks = None def __init__(self): self.tasks = {} def enqueue(self, callable, tid=None, after=None): if tid is None: tid = randomid() if after is not None: for aid in after: assert aid in self.tasks, \ "The NullExecutor can't be after a task that doesn't exist yet" if tid not in self.tasks: self.tasks[tid] = callable return tid @pytest.fixture def null_executor(request): return NullExecutor()
import pytest def taskqueue_executor(): import taskqueue taskqueue.LISTEN_ADDRESS = ('127.0.1.1', 57129) sm = taskqueue.get_ServerManager(make_server=True) sm.start() request.addfinalizer(sm.shutdown) return sm.Server() @pytest.fixture(scope="session") def async_executor(request): from pynpact.executors import GeventExecutor return GeventExecutor() class NullExecutor(object): "An executor that doens't actually execute anything, just keeps track" tasks = None def __init__(self): self.tasks = {} def enqueue(self, callable, tid=None, after=None): if tid is None: tid = randomid() if after is not None: for aid in after: assert aid in self.tasks, \ "The NullExecutor can't be after a task that doesn't exist yet" if tid not in self.tasks: self.tasks[tid] = callable return tid @pytest.fixture def null_executor(request): return NullExecutor()
Make pynpact tests use GeventExecutor
Make pynpact tests use GeventExecutor We've almost completely deprecated taskqueue at this point; lets test the new pieces instead of th old.
Python
bsd-3-clause
NProfileAnalysisComputationalTool/npact,NProfileAnalysisComputationalTool/npact,NProfileAnalysisComputationalTool/npact,NProfileAnalysisComputationalTool/npact,NProfileAnalysisComputationalTool/npact
import pytest - import taskqueue - @pytest.fixture(scope="session") - def async_executor(request): + def taskqueue_executor(): + import taskqueue taskqueue.LISTEN_ADDRESS = ('127.0.1.1', 57129) sm = taskqueue.get_ServerManager(make_server=True) sm.start() request.addfinalizer(sm.shutdown) return sm.Server() + + + @pytest.fixture(scope="session") + def async_executor(request): + from pynpact.executors import GeventExecutor + return GeventExecutor() class NullExecutor(object): "An executor that doens't actually execute anything, just keeps track" tasks = None def __init__(self): self.tasks = {} def enqueue(self, callable, tid=None, after=None): if tid is None: tid = randomid() if after is not None: for aid in after: assert aid in self.tasks, \ "The NullExecutor can't be after a task that doesn't exist yet" if tid not in self.tasks: self.tasks[tid] = callable return tid @pytest.fixture def null_executor(request): return NullExecutor()
Make pynpact tests use GeventExecutor
## Code Before: import pytest import taskqueue @pytest.fixture(scope="session") def async_executor(request): taskqueue.LISTEN_ADDRESS = ('127.0.1.1', 57129) sm = taskqueue.get_ServerManager(make_server=True) sm.start() request.addfinalizer(sm.shutdown) return sm.Server() class NullExecutor(object): "An executor that doens't actually execute anything, just keeps track" tasks = None def __init__(self): self.tasks = {} def enqueue(self, callable, tid=None, after=None): if tid is None: tid = randomid() if after is not None: for aid in after: assert aid in self.tasks, \ "The NullExecutor can't be after a task that doesn't exist yet" if tid not in self.tasks: self.tasks[tid] = callable return tid @pytest.fixture def null_executor(request): return NullExecutor() ## Instruction: Make pynpact tests use GeventExecutor ## Code After: import pytest def taskqueue_executor(): import taskqueue taskqueue.LISTEN_ADDRESS = ('127.0.1.1', 57129) sm = taskqueue.get_ServerManager(make_server=True) sm.start() request.addfinalizer(sm.shutdown) return sm.Server() @pytest.fixture(scope="session") def async_executor(request): from pynpact.executors import GeventExecutor return GeventExecutor() class NullExecutor(object): "An executor that doens't actually execute anything, just keeps track" tasks = None def __init__(self): self.tasks = {} def enqueue(self, callable, tid=None, after=None): if tid is None: tid = randomid() if after is not None: for aid in after: assert aid in self.tasks, \ "The NullExecutor can't be after a task that doesn't exist yet" if tid not in self.tasks: self.tasks[tid] = callable return tid @pytest.fixture def null_executor(request): return NullExecutor()
... import pytest ... def taskqueue_executor(): import taskqueue taskqueue.LISTEN_ADDRESS = ('127.0.1.1', 57129) ... return sm.Server() @pytest.fixture(scope="session") def async_executor(request): from pynpact.executors import GeventExecutor return GeventExecutor() ...
805c6097b3dc7e7e2468235a9c28d159cb99f187
satchless/cart/__init__.py
satchless/cart/__init__.py
from django.conf import settings from django.core.exceptions import ImproperlyConfigured from .handler import AddToCartHandler add_to_cart_handler = AddToCartHandler('cart') if not getattr(settings, 'SATCHLESS_DEFAULT_CURRENCY', None): raise ImproperlyConfigured('You need to configure ' 'SATCHLESS_DEFAULT_CURRENCY')
class InvalidQuantityException(Exception): def __init__(self, reason, quantity_delta): self.reason = reason self.quantity_delta = quantity_delta def __str__(self): return self.reason
Add cart quantity exception, remove old handler
Add cart quantity exception, remove old handler
Python
bsd-3-clause
taedori81/satchless
+ class InvalidQuantityException(Exception): - from django.conf import settings - from django.core.exceptions import ImproperlyConfigured - from .handler import AddToCartHandler + def __init__(self, reason, quantity_delta): + self.reason = reason + self.quantity_delta = quantity_delta - add_to_cart_handler = AddToCartHandler('cart') + def __str__(self): + return self.reason + - if not getattr(settings, 'SATCHLESS_DEFAULT_CURRENCY', None): - raise ImproperlyConfigured('You need to configure ' - 'SATCHLESS_DEFAULT_CURRENCY')
Add cart quantity exception, remove old handler
## Code Before: from django.conf import settings from django.core.exceptions import ImproperlyConfigured from .handler import AddToCartHandler add_to_cart_handler = AddToCartHandler('cart') if not getattr(settings, 'SATCHLESS_DEFAULT_CURRENCY', None): raise ImproperlyConfigured('You need to configure ' 'SATCHLESS_DEFAULT_CURRENCY') ## Instruction: Add cart quantity exception, remove old handler ## Code After: class InvalidQuantityException(Exception): def __init__(self, reason, quantity_delta): self.reason = reason self.quantity_delta = quantity_delta def __str__(self): return self.reason
// ... existing code ... class InvalidQuantityException(Exception): def __init__(self, reason, quantity_delta): self.reason = reason self.quantity_delta = quantity_delta def __str__(self): return self.reason // ... rest of the code ...
0905772851b4911466eb8f31dd4853aefb88e478
manage.py
manage.py
import os from app import create_app, db from app.models import User, Category from flask_script import Manager from flask_migrate import Migrate, MigrateCommand app = create_app(os.getenv('FLASK_CONFIG') or 'default') manager = Manager(app) migrate = Migrate(app, db) manager.add_command('db', MigrateCommand) #pylint: disable-msg=E1101 @manager.command def adduser(email, username, admin=False): """ Register a new user""" from getpass import getpass password = getpass() password2 = getpass(prompt='Confirm: ') if password != password2: import sys sys.exit("Error: Passwords do not match!") db.create_all() category = Category.get_by_name('Almenn frétt') if category is None: category = Category(name='Almenn frétt', name_en='General News', active=True) db.session.add(category) user = User(email=email, username=username, password=password, is_admin=admin) db.session.add(user) db.session.commit() print('User {0} was registered successfully!'.format(username)) if __name__ == '__main__': manager.run()
import os from app import create_app, db from app.models import User, Category from flask_script import Manager, Server from flask_migrate import Migrate, MigrateCommand app = create_app(os.getenv('FLASK_CONFIG') or 'default') manager = Manager(app) migrate = Migrate(app, db) manager.add_command('db', MigrateCommand) manager.add_command('runserver', Server(host='127.0.0.1')) #pylint: disable-msg=E1101 @manager.command def adduser(email, username, admin=False): """ Register a new user""" from getpass import getpass password = getpass() password2 = getpass(prompt='Confirm: ') if password != password2: import sys sys.exit("Error: Passwords do not match!") db.create_all() category = Category.get_by_name('Almenn frétt') if category is None: category = Category(name='Almenn frétt', name_en='General News', active=True) db.session.add(category) user = User(email=email, username=username, password=password, is_admin=admin) db.session.add(user) db.session.commit() print('User {0} was registered successfully!'.format(username)) if __name__ == '__main__': manager.run()
Change the runserver command to run a server at a host ip of 127.0.0.1 to easily change the xternal visibility of the application later
Change the runserver command to run a server at a host ip of 127.0.0.1 to easily change the xternal visibility of the application later
Python
mit
finnurtorfa/aflafrettir.is,finnurtorfa/aflafrettir.is,finnurtorfa/aflafrettir.is,finnurtorfa/aflafrettir.is
import os from app import create_app, db from app.models import User, Category - from flask_script import Manager + from flask_script import Manager, Server from flask_migrate import Migrate, MigrateCommand app = create_app(os.getenv('FLASK_CONFIG') or 'default') manager = Manager(app) migrate = Migrate(app, db) manager.add_command('db', MigrateCommand) + manager.add_command('runserver', Server(host='127.0.0.1')) #pylint: disable-msg=E1101 @manager.command def adduser(email, username, admin=False): """ Register a new user""" from getpass import getpass password = getpass() password2 = getpass(prompt='Confirm: ') if password != password2: import sys sys.exit("Error: Passwords do not match!") db.create_all() category = Category.get_by_name('Almenn frétt') if category is None: category = Category(name='Almenn frétt', name_en='General News', active=True) db.session.add(category) user = User(email=email, username=username, password=password, is_admin=admin) db.session.add(user) db.session.commit() print('User {0} was registered successfully!'.format(username)) if __name__ == '__main__': manager.run()
Change the runserver command to run a server at a host ip of 127.0.0.1 to easily change the xternal visibility of the application later
## Code Before: import os from app import create_app, db from app.models import User, Category from flask_script import Manager from flask_migrate import Migrate, MigrateCommand app = create_app(os.getenv('FLASK_CONFIG') or 'default') manager = Manager(app) migrate = Migrate(app, db) manager.add_command('db', MigrateCommand) #pylint: disable-msg=E1101 @manager.command def adduser(email, username, admin=False): """ Register a new user""" from getpass import getpass password = getpass() password2 = getpass(prompt='Confirm: ') if password != password2: import sys sys.exit("Error: Passwords do not match!") db.create_all() category = Category.get_by_name('Almenn frétt') if category is None: category = Category(name='Almenn frétt', name_en='General News', active=True) db.session.add(category) user = User(email=email, username=username, password=password, is_admin=admin) db.session.add(user) db.session.commit() print('User {0} was registered successfully!'.format(username)) if __name__ == '__main__': manager.run() ## Instruction: Change the runserver command to run a server at a host ip of 127.0.0.1 to easily change the xternal visibility of the application later ## Code After: import os from app import create_app, db from app.models import User, Category from flask_script import Manager, Server from flask_migrate import Migrate, MigrateCommand app = create_app(os.getenv('FLASK_CONFIG') or 'default') manager = Manager(app) migrate = Migrate(app, db) manager.add_command('db', MigrateCommand) manager.add_command('runserver', Server(host='127.0.0.1')) #pylint: disable-msg=E1101 @manager.command def adduser(email, username, admin=False): """ Register a new user""" from getpass import getpass password = getpass() password2 = getpass(prompt='Confirm: ') if password != password2: import sys sys.exit("Error: Passwords do not match!") db.create_all() category = Category.get_by_name('Almenn frétt') if category is None: category = Category(name='Almenn frétt', name_en='General News', active=True) db.session.add(category) user = User(email=email, username=username, password=password, is_admin=admin) db.session.add(user) db.session.commit() print('User {0} was registered successfully!'.format(username)) if __name__ == '__main__': manager.run()
// ... existing code ... from flask_script import Manager, Server from flask_migrate import Migrate, MigrateCommand // ... modified code ... manager.add_command('db', MigrateCommand) manager.add_command('runserver', Server(host='127.0.0.1')) // ... rest of the code ...
5e57dce84ffe7be7e699af1e2be953d5a65d8435
tests/test_module.py
tests/test_module.py
import sys import dill import test_mixins as module module.a = 1234 pik_mod = dill.dumps(module) module.a = 0 # remove module del sys.modules[module.__name__] del module module = dill.loads(pik_mod) assert module.a == 1234 assert module.double_add(1, 2, 3) == 2 * module.fx
import sys import dill import test_mixins as module cached = (module.__cached__ if hasattr(module, "__cached__") else module.__file__ + "c") module.a = 1234 pik_mod = dill.dumps(module) module.a = 0 # remove module del sys.modules[module.__name__] del module module = dill.loads(pik_mod) assert hasattr(module, "a") and module.a == 1234 assert module.double_add(1, 2, 3) == 2 * module.fx # clean up import os os.remove(cached) if os.path.exists("__pycache__") and not os.listdir("__pycache__"): os.removedirs("__pycache__")
Add code to clean up
Add code to clean up
Python
bsd-3-clause
wxiang7/dill,mindw/dill
import sys import dill import test_mixins as module + + cached = (module.__cached__ if hasattr(module, "__cached__") + else module.__file__ + "c") module.a = 1234 pik_mod = dill.dumps(module) module.a = 0 # remove module del sys.modules[module.__name__] del module module = dill.loads(pik_mod) - assert module.a == 1234 + assert hasattr(module, "a") and module.a == 1234 assert module.double_add(1, 2, 3) == 2 * module.fx + # clean up + import os + os.remove(cached) + if os.path.exists("__pycache__") and not os.listdir("__pycache__"): + os.removedirs("__pycache__") +
Add code to clean up
## Code Before: import sys import dill import test_mixins as module module.a = 1234 pik_mod = dill.dumps(module) module.a = 0 # remove module del sys.modules[module.__name__] del module module = dill.loads(pik_mod) assert module.a == 1234 assert module.double_add(1, 2, 3) == 2 * module.fx ## Instruction: Add code to clean up ## Code After: import sys import dill import test_mixins as module cached = (module.__cached__ if hasattr(module, "__cached__") else module.__file__ + "c") module.a = 1234 pik_mod = dill.dumps(module) module.a = 0 # remove module del sys.modules[module.__name__] del module module = dill.loads(pik_mod) assert hasattr(module, "a") and module.a == 1234 assert module.double_add(1, 2, 3) == 2 * module.fx # clean up import os os.remove(cached) if os.path.exists("__pycache__") and not os.listdir("__pycache__"): os.removedirs("__pycache__")
// ... existing code ... import test_mixins as module cached = (module.__cached__ if hasattr(module, "__cached__") else module.__file__ + "c") // ... modified code ... module = dill.loads(pik_mod) assert hasattr(module, "a") and module.a == 1234 assert module.double_add(1, 2, 3) == 2 * module.fx # clean up import os os.remove(cached) if os.path.exists("__pycache__") and not os.listdir("__pycache__"): os.removedirs("__pycache__") // ... rest of the code ...
eefff91804317f4fb2c518446ab8e2072af4d87f
app/models.py
app/models.py
from django.db import models import mongoengine from mongoengine import Document, EmbeddedDocument from mongoengine.fields import * # Create your models here. class Greeting(models.Model): when = models.DateTimeField('date created', auto_now_add=True) MONGODB_URI = 'mongodb+srv://fikaadmin:[email protected]/fikanotedb?retryWrites=true&w=majority' mongoengine.connect('fikanotedb', host=MONGODB_URI) class Shownote(EmbeddedDocument): url = URLField() title = StringField() date = DateTimeField() class FikanoteDB(Document): title = StringField() number = IntField() person = ListField(StringField()) agenda = StringField() date = DateTimeField() shownotes = ListField(EmbeddedDocumentField(Shownote)) meta = {'collection': 'fikanotedb'} class AgendaDB(Document): url = URLField() title = StringField() date = DateTimeField() meta = {'collection': 'agendadb'}
from django.db import models import mongoengine from mongoengine import Document, EmbeddedDocument from mongoengine.fields import * import os # Create your models here. class Greeting(models.Model): when = models.DateTimeField('date created', auto_now_add=True) USER = os.getenv('DATABASE_USER') PASWORD = os.getenv('DATABASE_PASSWORD') MONGODB_URI = "mongodb+srv://{}:{}@fikanotedb.ltkpy.mongodb.net/fikanotedb?retryWrites=true&w=majority".format(USER, PASWORD) mongoengine.connect('fikanotedb', host=MONGODB_URI) class Shownote(EmbeddedDocument): url = URLField() title = StringField() date = DateTimeField() class FikanoteDB(Document): title = StringField() number = IntField() person = ListField(StringField()) agenda = StringField() date = DateTimeField() shownotes = ListField(EmbeddedDocumentField(Shownote)) meta = {'collection': 'fikanotedb'} class AgendaDB(Document): url = URLField() title = StringField() date = DateTimeField() meta = {'collection': 'agendadb'}
Remove username and password from repository
Remove username and password from repository
Python
mit
gmkou/FikaNote,gmkou/FikaNote,gmkou/FikaNote
from django.db import models import mongoengine from mongoengine import Document, EmbeddedDocument from mongoengine.fields import * + import os # Create your models here. class Greeting(models.Model): when = models.DateTimeField('date created', auto_now_add=True) + USER = os.getenv('DATABASE_USER') + PASWORD = os.getenv('DATABASE_PASSWORD') - MONGODB_URI = 'mongodb+srv://fikaadmin:[email protected]/fikanotedb?retryWrites=true&w=majority' + MONGODB_URI = "mongodb+srv://{}:{}@fikanotedb.ltkpy.mongodb.net/fikanotedb?retryWrites=true&w=majority".format(USER, PASWORD) mongoengine.connect('fikanotedb', host=MONGODB_URI) class Shownote(EmbeddedDocument): url = URLField() title = StringField() date = DateTimeField() class FikanoteDB(Document): title = StringField() number = IntField() person = ListField(StringField()) agenda = StringField() date = DateTimeField() shownotes = ListField(EmbeddedDocumentField(Shownote)) meta = {'collection': 'fikanotedb'} class AgendaDB(Document): url = URLField() title = StringField() date = DateTimeField() meta = {'collection': 'agendadb'}
Remove username and password from repository
## Code Before: from django.db import models import mongoengine from mongoengine import Document, EmbeddedDocument from mongoengine.fields import * # Create your models here. class Greeting(models.Model): when = models.DateTimeField('date created', auto_now_add=True) MONGODB_URI = 'mongodb+srv://fikaadmin:[email protected]/fikanotedb?retryWrites=true&w=majority' mongoengine.connect('fikanotedb', host=MONGODB_URI) class Shownote(EmbeddedDocument): url = URLField() title = StringField() date = DateTimeField() class FikanoteDB(Document): title = StringField() number = IntField() person = ListField(StringField()) agenda = StringField() date = DateTimeField() shownotes = ListField(EmbeddedDocumentField(Shownote)) meta = {'collection': 'fikanotedb'} class AgendaDB(Document): url = URLField() title = StringField() date = DateTimeField() meta = {'collection': 'agendadb'} ## Instruction: Remove username and password from repository ## Code After: from django.db import models import mongoengine from mongoengine import Document, EmbeddedDocument from mongoengine.fields import * import os # Create your models here. class Greeting(models.Model): when = models.DateTimeField('date created', auto_now_add=True) USER = os.getenv('DATABASE_USER') PASWORD = os.getenv('DATABASE_PASSWORD') MONGODB_URI = "mongodb+srv://{}:{}@fikanotedb.ltkpy.mongodb.net/fikanotedb?retryWrites=true&w=majority".format(USER, PASWORD) mongoengine.connect('fikanotedb', host=MONGODB_URI) class Shownote(EmbeddedDocument): url = URLField() title = StringField() date = DateTimeField() class FikanoteDB(Document): title = StringField() number = IntField() person = ListField(StringField()) agenda = StringField() date = DateTimeField() shownotes = ListField(EmbeddedDocumentField(Shownote)) meta = {'collection': 'fikanotedb'} class AgendaDB(Document): url = URLField() title = StringField() date = DateTimeField() meta = {'collection': 'agendadb'}
// ... existing code ... import os // ... modified code ... USER = os.getenv('DATABASE_USER') PASWORD = os.getenv('DATABASE_PASSWORD') MONGODB_URI = "mongodb+srv://{}:{}@fikanotedb.ltkpy.mongodb.net/fikanotedb?retryWrites=true&w=majority".format(USER, PASWORD) mongoengine.connect('fikanotedb', host=MONGODB_URI) // ... rest of the code ...
fef12d2a5cce5c1db488a4bb11b9c21b83a66cab
avocado/export/_json.py
avocado/export/_json.py
import json import inspect from _base import BaseExporter class JSONGeneratorEncoder(json.JSONEncoder): "Handle generator objects and expressions." def default(self, obj): if inspect.isgenerator(obj): return list(obj) return super(JSONGeneratorEncoder, self).default(obj) class JSONExporter(BaseExporter): file_extension = 'json' content_type = 'application/json' preferred_formats = ('number', 'string') def write(self, iterable, buff=None): buff = self.get_file_obj(buff) encoder = JSONGeneratorEncoder() for chunk in encoder.iterencode(self.read(iterable)): buff.write(chunk) return buff
import inspect from django.core.serializers.json import DjangoJSONEncoder from _base import BaseExporter class JSONGeneratorEncoder(DjangoJSONEncoder): "Handle generator objects and expressions." def default(self, obj): if inspect.isgenerator(obj): return list(obj) return super(JSONGeneratorEncoder, self).default(obj) class JSONExporter(BaseExporter): file_extension = 'json' content_type = 'application/json' preferred_formats = ('number', 'string') def write(self, iterable, buff=None): buff = self.get_file_obj(buff) encoder = JSONGeneratorEncoder() for chunk in encoder.iterencode(self.read(iterable)): buff.write(chunk) return buff
Update JSONGeneratorEncoder to subclass DjangoJSONEncoder This handles Decimals and datetimes
Update JSONGeneratorEncoder to subclass DjangoJSONEncoder This handles Decimals and datetimes
Python
bsd-2-clause
murphyke/avocado,murphyke/avocado,murphyke/avocado,murphyke/avocado
- import json import inspect + from django.core.serializers.json import DjangoJSONEncoder from _base import BaseExporter - class JSONGeneratorEncoder(json.JSONEncoder): + class JSONGeneratorEncoder(DjangoJSONEncoder): "Handle generator objects and expressions." def default(self, obj): if inspect.isgenerator(obj): return list(obj) return super(JSONGeneratorEncoder, self).default(obj) class JSONExporter(BaseExporter): file_extension = 'json' content_type = 'application/json' preferred_formats = ('number', 'string') def write(self, iterable, buff=None): buff = self.get_file_obj(buff) encoder = JSONGeneratorEncoder() for chunk in encoder.iterencode(self.read(iterable)): buff.write(chunk) return buff
Update JSONGeneratorEncoder to subclass DjangoJSONEncoder This handles Decimals and datetimes
## Code Before: import json import inspect from _base import BaseExporter class JSONGeneratorEncoder(json.JSONEncoder): "Handle generator objects and expressions." def default(self, obj): if inspect.isgenerator(obj): return list(obj) return super(JSONGeneratorEncoder, self).default(obj) class JSONExporter(BaseExporter): file_extension = 'json' content_type = 'application/json' preferred_formats = ('number', 'string') def write(self, iterable, buff=None): buff = self.get_file_obj(buff) encoder = JSONGeneratorEncoder() for chunk in encoder.iterencode(self.read(iterable)): buff.write(chunk) return buff ## Instruction: Update JSONGeneratorEncoder to subclass DjangoJSONEncoder This handles Decimals and datetimes ## Code After: import inspect from django.core.serializers.json import DjangoJSONEncoder from _base import BaseExporter class JSONGeneratorEncoder(DjangoJSONEncoder): "Handle generator objects and expressions." def default(self, obj): if inspect.isgenerator(obj): return list(obj) return super(JSONGeneratorEncoder, self).default(obj) class JSONExporter(BaseExporter): file_extension = 'json' content_type = 'application/json' preferred_formats = ('number', 'string') def write(self, iterable, buff=None): buff = self.get_file_obj(buff) encoder = JSONGeneratorEncoder() for chunk in encoder.iterencode(self.read(iterable)): buff.write(chunk) return buff
... import inspect from django.core.serializers.json import DjangoJSONEncoder from _base import BaseExporter ... class JSONGeneratorEncoder(DjangoJSONEncoder): "Handle generator objects and expressions." ...
5a889dee78335d3c7d758c1df16d774160049b12
djangoprojects/django_rest_framework/tutorial/snippets/views.py
djangoprojects/django_rest_framework/tutorial/snippets/views.py
from snippets.models import Snippet from snippets.serializers import SnippetSerializer from rest_framework import generics from rest_framework import permissions from django.contrib.auth.models import User from snippets.serializers import UserSerializer from snippets.permissions import IsOwnerOrReadOnly class UserList(generics.ListAPIView): queryset = User.objects.all() serializer_class = UserSerializer class UserDetail(generics.RetrieveAPIView): queryset = User.objects.all() serializer_class = UserSerializer class SnippetList(generics.ListCreateAPIView): queryset = Snippet.objects.all() serializer_class = SnippetSerializer permission_classes = (permissions.IsAuthenticatedOrReadOnly,) def perform_create(self, serializer): serializer.save(owner=self.request.user) class SnippetDetail(generics.RetrieveUpdateDestroyAPIView): queryset = Snippet.objects.all() serializer_class = SnippetSerializer permission_classes = (permissions.IsAuthenticatedOrReadOnly, IsOwnerOrReadOnly)
from snippets.models import Snippet from snippets.serializers import SnippetSerializer from rest_framework import generics from rest_framework import permissions from django.contrib.auth.models import User from snippets.serializers import UserSerializer from snippets.permissions import IsOwnerOrReadOnly from rest_framework.decorators import api_view from rest_framework.response import Response from rest_framework.reverse import reverse @api_view(("GET", )) def api_root(request, format=None): return Response({ "users": reverse("user-list", request=request, format=format), "snippets": reverse("snippet-list", request=request, format=format) }) class UserList(generics.ListAPIView): queryset = User.objects.all() serializer_class = UserSerializer class UserDetail(generics.RetrieveAPIView): queryset = User.objects.all() serializer_class = UserSerializer class SnippetList(generics.ListCreateAPIView): queryset = Snippet.objects.all() serializer_class = SnippetSerializer permission_classes = (permissions.IsAuthenticatedOrReadOnly,) def perform_create(self, serializer): serializer.save(owner=self.request.user) class SnippetDetail(generics.RetrieveUpdateDestroyAPIView): queryset = Snippet.objects.all() serializer_class = SnippetSerializer permission_classes = (permissions.IsAuthenticatedOrReadOnly, IsOwnerOrReadOnly)
Add an api view for the API root
Add an api view for the API root not plugged yet
Python
unlicense
bertrandvidal/stuff,bertrandvidal/stuff,bertrandvidal/stuff,bertrandvidal/stuff
from snippets.models import Snippet from snippets.serializers import SnippetSerializer from rest_framework import generics from rest_framework import permissions from django.contrib.auth.models import User from snippets.serializers import UserSerializer from snippets.permissions import IsOwnerOrReadOnly + from rest_framework.decorators import api_view + from rest_framework.response import Response + from rest_framework.reverse import reverse + + + @api_view(("GET", )) + def api_root(request, format=None): + return Response({ + "users": reverse("user-list", request=request, format=format), + "snippets": reverse("snippet-list", request=request, format=format) + }) + class UserList(generics.ListAPIView): queryset = User.objects.all() serializer_class = UserSerializer class UserDetail(generics.RetrieveAPIView): queryset = User.objects.all() serializer_class = UserSerializer class SnippetList(generics.ListCreateAPIView): queryset = Snippet.objects.all() serializer_class = SnippetSerializer permission_classes = (permissions.IsAuthenticatedOrReadOnly,) def perform_create(self, serializer): serializer.save(owner=self.request.user) class SnippetDetail(generics.RetrieveUpdateDestroyAPIView): queryset = Snippet.objects.all() serializer_class = SnippetSerializer permission_classes = (permissions.IsAuthenticatedOrReadOnly, IsOwnerOrReadOnly)
Add an api view for the API root
## Code Before: from snippets.models import Snippet from snippets.serializers import SnippetSerializer from rest_framework import generics from rest_framework import permissions from django.contrib.auth.models import User from snippets.serializers import UserSerializer from snippets.permissions import IsOwnerOrReadOnly class UserList(generics.ListAPIView): queryset = User.objects.all() serializer_class = UserSerializer class UserDetail(generics.RetrieveAPIView): queryset = User.objects.all() serializer_class = UserSerializer class SnippetList(generics.ListCreateAPIView): queryset = Snippet.objects.all() serializer_class = SnippetSerializer permission_classes = (permissions.IsAuthenticatedOrReadOnly,) def perform_create(self, serializer): serializer.save(owner=self.request.user) class SnippetDetail(generics.RetrieveUpdateDestroyAPIView): queryset = Snippet.objects.all() serializer_class = SnippetSerializer permission_classes = (permissions.IsAuthenticatedOrReadOnly, IsOwnerOrReadOnly) ## Instruction: Add an api view for the API root ## Code After: from snippets.models import Snippet from snippets.serializers import SnippetSerializer from rest_framework import generics from rest_framework import permissions from django.contrib.auth.models import User from snippets.serializers import UserSerializer from snippets.permissions import IsOwnerOrReadOnly from rest_framework.decorators import api_view from rest_framework.response import Response from rest_framework.reverse import reverse @api_view(("GET", )) def api_root(request, format=None): return Response({ "users": reverse("user-list", request=request, format=format), "snippets": reverse("snippet-list", request=request, format=format) }) class UserList(generics.ListAPIView): queryset = User.objects.all() serializer_class = UserSerializer class UserDetail(generics.RetrieveAPIView): queryset = User.objects.all() serializer_class = UserSerializer class SnippetList(generics.ListCreateAPIView): queryset = Snippet.objects.all() serializer_class = SnippetSerializer permission_classes = (permissions.IsAuthenticatedOrReadOnly,) def perform_create(self, serializer): serializer.save(owner=self.request.user) class SnippetDetail(generics.RetrieveUpdateDestroyAPIView): queryset = Snippet.objects.all() serializer_class = SnippetSerializer permission_classes = (permissions.IsAuthenticatedOrReadOnly, IsOwnerOrReadOnly)
// ... existing code ... from snippets.permissions import IsOwnerOrReadOnly from rest_framework.decorators import api_view from rest_framework.response import Response from rest_framework.reverse import reverse @api_view(("GET", )) def api_root(request, format=None): return Response({ "users": reverse("user-list", request=request, format=format), "snippets": reverse("snippet-list", request=request, format=format) }) // ... rest of the code ...
6603657df4626a9e2c82a3658c63314c7a9537f4
src/experimental/os_walk_with_filechecker.py
src/experimental/os_walk_with_filechecker.py
import os, sys walk_dir = sys.argv[1] print("walk directory: " + walk_dir) print("Walk directory (absolute) = " + os.path.abspath(walk_dir)) print("\n\n\n\n\n\n\n\n\n") for root, subdirs, files in os.walk(walk_dir): print(root) #list_file_path = os.path.join(root, "dirlist.txt") #print("dirlist = ", list_file_path) #for subdir in subdirs: # print(subdir) for filename in files: file_path = os.path.join(root, filename) print(file_path + filename)
import os, sys, datetime #dt = datetime.datetime(1970,1,1).total_seconds() # print(dt) walk_dir = sys.argv[1] with open("fsscan.scan", "w") as f: print("SCANFROM" + walk_dir) for root, subdirs, files in os.walk(walk_dir): f.write(root + "\n") for filename in files: file_path = os.path.join(root, filename) f.write(file_path + filename + "\n")
Add write dump to file
Add write dump to file
Python
bsd-3-clause
paulkramme/btsoot
- import os, sys + import os, sys, datetime + + #dt = datetime.datetime(1970,1,1).total_seconds() + # print(dt) walk_dir = sys.argv[1] - print("walk directory: " + walk_dir) + with open("fsscan.scan", "w") as f: + print("SCANFROM" + walk_dir) - print("Walk directory (absolute) = " + os.path.abspath(walk_dir)) - print("\n\n\n\n\n\n\n\n\n") - for root, subdirs, files in os.walk(walk_dir): + for root, subdirs, files in os.walk(walk_dir): + f.write(root + "\n") - print(root) - #list_file_path = os.path.join(root, "dirlist.txt") - #print("dirlist = ", list_file_path) - - #for subdir in subdirs: - # print(subdir) for filename in files: file_path = os.path.join(root, filename) - print(file_path + filename) + f.write(file_path + filename + "\n")
Add write dump to file
## Code Before: import os, sys walk_dir = sys.argv[1] print("walk directory: " + walk_dir) print("Walk directory (absolute) = " + os.path.abspath(walk_dir)) print("\n\n\n\n\n\n\n\n\n") for root, subdirs, files in os.walk(walk_dir): print(root) #list_file_path = os.path.join(root, "dirlist.txt") #print("dirlist = ", list_file_path) #for subdir in subdirs: # print(subdir) for filename in files: file_path = os.path.join(root, filename) print(file_path + filename) ## Instruction: Add write dump to file ## Code After: import os, sys, datetime #dt = datetime.datetime(1970,1,1).total_seconds() # print(dt) walk_dir = sys.argv[1] with open("fsscan.scan", "w") as f: print("SCANFROM" + walk_dir) for root, subdirs, files in os.walk(walk_dir): f.write(root + "\n") for filename in files: file_path = os.path.join(root, filename) f.write(file_path + filename + "\n")
# ... existing code ... import os, sys, datetime #dt = datetime.datetime(1970,1,1).total_seconds() # print(dt) # ... modified code ... with open("fsscan.scan", "w") as f: print("SCANFROM" + walk_dir) for root, subdirs, files in os.walk(walk_dir): f.write(root + "\n") ... file_path = os.path.join(root, filename) f.write(file_path + filename + "\n") # ... rest of the code ...
1b40a51e371d10cc37f4d8f8c7557dbc741d690f
butterfly/ImageLayer/HDF5.py
butterfly/ImageLayer/HDF5.py
from Datasource import Datasource import numpy as np import h5py class HDF5(Datasource): pass @classmethod def load_tile(ds, query): Sk,Sj,Si = query.all_scales path = query.OUTPUT.INFO.PATH.VALUE (K0,J0,I0),(K1,J1,I1) = query.source_bounds with h5py.File(path) as fd: vol = fd[fd.keys()[0]] return vol[::Sk,::Sj,::Si]
from Datasource import Datasource import numpy as np import h5py class HDF5(Datasource): pass @classmethod def load_tile(ds, query): Sk,Sj,Si = query.all_scales path = query.OUTPUT.INFO.PATH.VALUE z0,y0,x0 = query.index_zyx*query.blocksize z1,y1,x1 = query.index_zyx*query.blocksize + query.blocksize with h5py.File(path) as fd: vol = fd[fd.keys()[0]] return vol[z0:z1:Sk,y0:y1:Sj,x0:x1:Si]
Fix loading a whole tile into memory.
Fix loading a whole tile into memory.
Python
mit
Rhoana/butterfly,Rhoana/butterfly,Rhoana/butterfly2,Rhoana/butterfly,Rhoana/butterfly
from Datasource import Datasource import numpy as np import h5py class HDF5(Datasource): pass @classmethod def load_tile(ds, query): Sk,Sj,Si = query.all_scales path = query.OUTPUT.INFO.PATH.VALUE - (K0,J0,I0),(K1,J1,I1) = query.source_bounds + z0,y0,x0 = query.index_zyx*query.blocksize + z1,y1,x1 = query.index_zyx*query.blocksize + query.blocksize with h5py.File(path) as fd: vol = fd[fd.keys()[0]] - return vol[::Sk,::Sj,::Si] + return vol[z0:z1:Sk,y0:y1:Sj,x0:x1:Si]
Fix loading a whole tile into memory.
## Code Before: from Datasource import Datasource import numpy as np import h5py class HDF5(Datasource): pass @classmethod def load_tile(ds, query): Sk,Sj,Si = query.all_scales path = query.OUTPUT.INFO.PATH.VALUE (K0,J0,I0),(K1,J1,I1) = query.source_bounds with h5py.File(path) as fd: vol = fd[fd.keys()[0]] return vol[::Sk,::Sj,::Si] ## Instruction: Fix loading a whole tile into memory. ## Code After: from Datasource import Datasource import numpy as np import h5py class HDF5(Datasource): pass @classmethod def load_tile(ds, query): Sk,Sj,Si = query.all_scales path = query.OUTPUT.INFO.PATH.VALUE z0,y0,x0 = query.index_zyx*query.blocksize z1,y1,x1 = query.index_zyx*query.blocksize + query.blocksize with h5py.File(path) as fd: vol = fd[fd.keys()[0]] return vol[z0:z1:Sk,y0:y1:Sj,x0:x1:Si]
// ... existing code ... path = query.OUTPUT.INFO.PATH.VALUE z0,y0,x0 = query.index_zyx*query.blocksize z1,y1,x1 = query.index_zyx*query.blocksize + query.blocksize // ... modified code ... vol = fd[fd.keys()[0]] return vol[z0:z1:Sk,y0:y1:Sj,x0:x1:Si] // ... rest of the code ...
5363224395b26528465417ff550d6a2163cbe8e6
spacy/zh/__init__.py
spacy/zh/__init__.py
from ..language import Language from ..tokenizer import Tokenizer from ..tagger import Tagger class CharacterTokenizer(Tokenizer): def __call__(self, text): return self.tokens_from_list(list(text)) class Chinese(Language): lang = u'zh' def __call__(self, text): doc = self.tokenizer.tokens_from_list(list(text)) self.tagger(doc) self.merge_characters(doc) return doc def merge_characters(self, doc): start = 0 chunks = [] for token in doc: if token.tag_ != 'CHAR': chunk = doc[start : token.i + 1] chunks.append(chunk) start = token.i + 1 text = doc.text for chunk in chunks: chunk.merge(chunk[-1].tag_, chunk.text, u'')
import jieba from ..language import Language from ..tokens import Doc class Chinese(Language): lang = u'zh' def make_doc(self, text): words = list(jieba.cut(text, cut_all=True)) return Doc(self.vocab, words=words, spaces=[False]*len(words))
Add draft Jieba tokenizer for Chinese
Add draft Jieba tokenizer for Chinese
Python
mit
spacy-io/spaCy,explosion/spaCy,oroszgy/spaCy.hu,explosion/spaCy,recognai/spaCy,honnibal/spaCy,banglakit/spaCy,explosion/spaCy,Gregory-Howard/spaCy,aikramer2/spaCy,recognai/spaCy,banglakit/spaCy,recognai/spaCy,oroszgy/spaCy.hu,oroszgy/spaCy.hu,spacy-io/spaCy,aikramer2/spaCy,aikramer2/spaCy,explosion/spaCy,honnibal/spaCy,raphael0202/spaCy,spacy-io/spaCy,raphael0202/spaCy,honnibal/spaCy,raphael0202/spaCy,aikramer2/spaCy,honnibal/spaCy,spacy-io/spaCy,recognai/spaCy,Gregory-Howard/spaCy,recognai/spaCy,spacy-io/spaCy,oroszgy/spaCy.hu,raphael0202/spaCy,banglakit/spaCy,recognai/spaCy,explosion/spaCy,raphael0202/spaCy,oroszgy/spaCy.hu,Gregory-Howard/spaCy,aikramer2/spaCy,explosion/spaCy,banglakit/spaCy,Gregory-Howard/spaCy,raphael0202/spaCy,oroszgy/spaCy.hu,Gregory-Howard/spaCy,banglakit/spaCy,spacy-io/spaCy,banglakit/spaCy,Gregory-Howard/spaCy,aikramer2/spaCy
+ import jieba + from ..language import Language + from ..tokens import Doc - from ..tokenizer import Tokenizer - from ..tagger import Tagger - - - class CharacterTokenizer(Tokenizer): - def __call__(self, text): - return self.tokens_from_list(list(text)) class Chinese(Language): lang = u'zh' - def __call__(self, text): + def make_doc(self, text): + words = list(jieba.cut(text, cut_all=True)) + return Doc(self.vocab, words=words, spaces=[False]*len(words)) - doc = self.tokenizer.tokens_from_list(list(text)) - self.tagger(doc) - self.merge_characters(doc) - return doc - def merge_characters(self, doc): - start = 0 - chunks = [] - for token in doc: - if token.tag_ != 'CHAR': - chunk = doc[start : token.i + 1] - chunks.append(chunk) - start = token.i + 1 - text = doc.text - for chunk in chunks: - chunk.merge(chunk[-1].tag_, chunk.text, u'') -
Add draft Jieba tokenizer for Chinese
## Code Before: from ..language import Language from ..tokenizer import Tokenizer from ..tagger import Tagger class CharacterTokenizer(Tokenizer): def __call__(self, text): return self.tokens_from_list(list(text)) class Chinese(Language): lang = u'zh' def __call__(self, text): doc = self.tokenizer.tokens_from_list(list(text)) self.tagger(doc) self.merge_characters(doc) return doc def merge_characters(self, doc): start = 0 chunks = [] for token in doc: if token.tag_ != 'CHAR': chunk = doc[start : token.i + 1] chunks.append(chunk) start = token.i + 1 text = doc.text for chunk in chunks: chunk.merge(chunk[-1].tag_, chunk.text, u'') ## Instruction: Add draft Jieba tokenizer for Chinese ## Code After: import jieba from ..language import Language from ..tokens import Doc class Chinese(Language): lang = u'zh' def make_doc(self, text): words = list(jieba.cut(text, cut_all=True)) return Doc(self.vocab, words=words, spaces=[False]*len(words))
# ... existing code ... import jieba from ..language import Language from ..tokens import Doc # ... modified code ... def make_doc(self, text): words = list(jieba.cut(text, cut_all=True)) return Doc(self.vocab, words=words, spaces=[False]*len(words)) # ... rest of the code ...
73b1273de8f8e17adf51893bdbd24d2067866297
bootstrap/helpers.py
bootstrap/helpers.py
from byceps.services.orga.models import OrgaFlag from byceps.services.user.models.user import User from byceps.services.user import service as user_service from .util import add_to_database # -------------------------------------------------------------------- # # users @add_to_database def create_user(screen_name, email_address, *, enabled=False): user = user_service.build_user(screen_name, email_address) user.enabled = enabled return user def find_user(screen_name): return User.query.filter_by(screen_name=screen_name).one_or_none() def get_user(screen_name): return User.query.filter_by(screen_name=screen_name).one() # -------------------------------------------------------------------- # # orgas @add_to_database def promote_orga(brand, user): return OrgaFlag(brand.id, user.id)
from byceps.services.user.models.user import User from byceps.services.user import service as user_service from .util import add_to_database # -------------------------------------------------------------------- # # users @add_to_database def create_user(screen_name, email_address, *, enabled=False): user = user_service.build_user(screen_name, email_address) user.enabled = enabled return user def find_user(screen_name): return User.query.filter_by(screen_name=screen_name).one_or_none() def get_user(screen_name): return User.query.filter_by(screen_name=screen_name).one()
Remove bootstrap helper to promote a user to organizer as there is a service function (and a script calling it) for that
Remove bootstrap helper to promote a user to organizer as there is a service function (and a script calling it) for that
Python
bsd-3-clause
m-ober/byceps,homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps,m-ober/byceps,m-ober/byceps
- from byceps.services.orga.models import OrgaFlag from byceps.services.user.models.user import User from byceps.services.user import service as user_service from .util import add_to_database # -------------------------------------------------------------------- # # users @add_to_database def create_user(screen_name, email_address, *, enabled=False): user = user_service.build_user(screen_name, email_address) user.enabled = enabled return user def find_user(screen_name): return User.query.filter_by(screen_name=screen_name).one_or_none() def get_user(screen_name): return User.query.filter_by(screen_name=screen_name).one() - - # -------------------------------------------------------------------- # - # orgas - - - @add_to_database - def promote_orga(brand, user): - return OrgaFlag(brand.id, user.id) -
Remove bootstrap helper to promote a user to organizer as there is a service function (and a script calling it) for that
## Code Before: from byceps.services.orga.models import OrgaFlag from byceps.services.user.models.user import User from byceps.services.user import service as user_service from .util import add_to_database # -------------------------------------------------------------------- # # users @add_to_database def create_user(screen_name, email_address, *, enabled=False): user = user_service.build_user(screen_name, email_address) user.enabled = enabled return user def find_user(screen_name): return User.query.filter_by(screen_name=screen_name).one_or_none() def get_user(screen_name): return User.query.filter_by(screen_name=screen_name).one() # -------------------------------------------------------------------- # # orgas @add_to_database def promote_orga(brand, user): return OrgaFlag(brand.id, user.id) ## Instruction: Remove bootstrap helper to promote a user to organizer as there is a service function (and a script calling it) for that ## Code After: from byceps.services.user.models.user import User from byceps.services.user import service as user_service from .util import add_to_database # -------------------------------------------------------------------- # # users @add_to_database def create_user(screen_name, email_address, *, enabled=False): user = user_service.build_user(screen_name, email_address) user.enabled = enabled return user def find_user(screen_name): return User.query.filter_by(screen_name=screen_name).one_or_none() def get_user(screen_name): return User.query.filter_by(screen_name=screen_name).one()
# ... existing code ... from byceps.services.user.models.user import User # ... modified code ... return User.query.filter_by(screen_name=screen_name).one() # ... rest of the code ...
402056a272c94d3d28da62b08cac14ace18c835a
test/python_api/default-constructor/sb_address.py
test/python_api/default-constructor/sb_address.py
import sys import lldb def fuzz_obj(obj): obj.GetFileAddress() obj.GetLoadAddress(lldb.SBTarget()) obj.OffsetAddress(sys.maxint) obj.GetDescription(lldb.SBStream()) obj.Clear()
import sys import lldb def fuzz_obj(obj): obj.GetFileAddress() obj.GetLoadAddress(lldb.SBTarget()) obj.SetLoadAddress(0xffff, lldb.SBTarget()) obj.OffsetAddress(sys.maxint) obj.GetDescription(lldb.SBStream()) obj.Clear()
Add new API for SBAddress to the fuzz test:
Add new API for SBAddress to the fuzz test: SetLoadAddress (lldb::addr_t load_addr, lldb::SBTarget &target); git-svn-id: b33bab8abb5b18c12ee100cd7761ab452d00b2b0@135793 91177308-0d34-0410-b5e6-96231b3b80d8
Python
apache-2.0
llvm-mirror/lldb,apple/swift-lldb,llvm-mirror/lldb,llvm-mirror/lldb,llvm-mirror/lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,llvm-mirror/lldb
import sys import lldb def fuzz_obj(obj): obj.GetFileAddress() obj.GetLoadAddress(lldb.SBTarget()) + obj.SetLoadAddress(0xffff, lldb.SBTarget()) obj.OffsetAddress(sys.maxint) obj.GetDescription(lldb.SBStream()) obj.Clear()
Add new API for SBAddress to the fuzz test:
## Code Before: import sys import lldb def fuzz_obj(obj): obj.GetFileAddress() obj.GetLoadAddress(lldb.SBTarget()) obj.OffsetAddress(sys.maxint) obj.GetDescription(lldb.SBStream()) obj.Clear() ## Instruction: Add new API for SBAddress to the fuzz test: ## Code After: import sys import lldb def fuzz_obj(obj): obj.GetFileAddress() obj.GetLoadAddress(lldb.SBTarget()) obj.SetLoadAddress(0xffff, lldb.SBTarget()) obj.OffsetAddress(sys.maxint) obj.GetDescription(lldb.SBStream()) obj.Clear()
// ... existing code ... obj.GetLoadAddress(lldb.SBTarget()) obj.SetLoadAddress(0xffff, lldb.SBTarget()) obj.OffsetAddress(sys.maxint) // ... rest of the code ...
c83dcddd7451d53214cf02f9ad72e280970a2dc8
hydromet/catchments.py
hydromet/catchments.py
import os import numpy as np from catchment_cutter import get_grid_cells def create_grids(catchments, in_directory, out_directory, grid_file): """ Create grid files for the supplied catchments. :param catchments: List of catchment IDs. :type catchments: Array(string) :param in_directory: Path to catchment boundary (geojson) directory. :type in_directory: string :param out_directory: Output directory for catchment grid csv files. :type out_directory: string :param grid_file: Path to input grid file for coordinates to match boundaries against. :type grid_file: string """ for catchment in catchments: boundary = os.path.join(in_directory, catchment + '.json') cells = np.asarray(get_grid_cells(boundary, grid_file)) np.savetxt(os.path.join(out_directory, catchment + '.csv'), cells, fmt="%.2f", delimiter=',')
import os import numpy as np from catchment_tools import get_grid_cells def create_grids(catchments, in_directory, out_directory, grid_file): """ Create grid files for the supplied catchments. :param catchments: List of catchment IDs. :type catchments: Array(string) :param in_directory: Path to catchment boundary (geojson) directory. :type in_directory: string :param out_directory: Output directory for catchment grid csv files. :type out_directory: string :param grid_file: Path to input grid file for coordinates to match boundaries against. :type grid_file: string """ for catchment in catchments: boundary = os.path.join(in_directory, catchment + '.json') cells = np.asarray(get_grid_cells(boundary, grid_file, 0.3)) np.savetxt(os.path.join(out_directory, catchment + '.csv'), cells, fmt="%.2f", delimiter=',')
Use updated catchment tools import
Use updated catchment tools import
Python
bsd-3-clause
amacd31/hydromet-toolkit,amacd31/hydromet-toolkit
import os import numpy as np - from catchment_cutter import get_grid_cells + from catchment_tools import get_grid_cells def create_grids(catchments, in_directory, out_directory, grid_file): """ Create grid files for the supplied catchments. :param catchments: List of catchment IDs. :type catchments: Array(string) :param in_directory: Path to catchment boundary (geojson) directory. :type in_directory: string :param out_directory: Output directory for catchment grid csv files. :type out_directory: string :param grid_file: Path to input grid file for coordinates to match boundaries against. :type grid_file: string """ for catchment in catchments: boundary = os.path.join(in_directory, catchment + '.json') - cells = np.asarray(get_grid_cells(boundary, grid_file)) + cells = np.asarray(get_grid_cells(boundary, grid_file, 0.3)) np.savetxt(os.path.join(out_directory, catchment + '.csv'), cells, fmt="%.2f", delimiter=',')
Use updated catchment tools import
## Code Before: import os import numpy as np from catchment_cutter import get_grid_cells def create_grids(catchments, in_directory, out_directory, grid_file): """ Create grid files for the supplied catchments. :param catchments: List of catchment IDs. :type catchments: Array(string) :param in_directory: Path to catchment boundary (geojson) directory. :type in_directory: string :param out_directory: Output directory for catchment grid csv files. :type out_directory: string :param grid_file: Path to input grid file for coordinates to match boundaries against. :type grid_file: string """ for catchment in catchments: boundary = os.path.join(in_directory, catchment + '.json') cells = np.asarray(get_grid_cells(boundary, grid_file)) np.savetxt(os.path.join(out_directory, catchment + '.csv'), cells, fmt="%.2f", delimiter=',') ## Instruction: Use updated catchment tools import ## Code After: import os import numpy as np from catchment_tools import get_grid_cells def create_grids(catchments, in_directory, out_directory, grid_file): """ Create grid files for the supplied catchments. :param catchments: List of catchment IDs. :type catchments: Array(string) :param in_directory: Path to catchment boundary (geojson) directory. :type in_directory: string :param out_directory: Output directory for catchment grid csv files. :type out_directory: string :param grid_file: Path to input grid file for coordinates to match boundaries against. :type grid_file: string """ for catchment in catchments: boundary = os.path.join(in_directory, catchment + '.json') cells = np.asarray(get_grid_cells(boundary, grid_file, 0.3)) np.savetxt(os.path.join(out_directory, catchment + '.csv'), cells, fmt="%.2f", delimiter=',')
# ... existing code ... from catchment_tools import get_grid_cells # ... modified code ... boundary = os.path.join(in_directory, catchment + '.json') cells = np.asarray(get_grid_cells(boundary, grid_file, 0.3)) # ... rest of the code ...
e72fa5ab59a8c904d525a33652424b0acf5c9de4
cms/widgets.py
cms/widgets.py
from django.forms.widgets import TextInput class RichTextInput(TextInput): template_name = 'cms/forms/widgets/rich_text.html'
from django.forms.widgets import Textarea class RichTextInput(Textarea): template_name = 'cms/forms/widgets/rich_text.html'
Switch TextInput for Textarea for RichText widget base class
Switch TextInput for Textarea for RichText widget base class
Python
agpl-3.0
Inboxen/Inboxen,Inboxen/Inboxen,Inboxen/Inboxen,Inboxen/Inboxen
- from django.forms.widgets import TextInput + from django.forms.widgets import Textarea - class RichTextInput(TextInput): + class RichTextInput(Textarea): template_name = 'cms/forms/widgets/rich_text.html'
Switch TextInput for Textarea for RichText widget base class
## Code Before: from django.forms.widgets import TextInput class RichTextInput(TextInput): template_name = 'cms/forms/widgets/rich_text.html' ## Instruction: Switch TextInput for Textarea for RichText widget base class ## Code After: from django.forms.widgets import Textarea class RichTextInput(Textarea): template_name = 'cms/forms/widgets/rich_text.html'
# ... existing code ... from django.forms.widgets import Textarea # ... modified code ... class RichTextInput(Textarea): template_name = 'cms/forms/widgets/rich_text.html' # ... rest of the code ...
ff308a17c79fe2c27dcb2a1f888ee1332f6fdc11
events.py
events.py
import Natural.util as util import sublime, sublime_plugin class PerformEventListener(sublime_plugin.EventListener): """Suggest subroutine completions for the perform statement.""" def on_query_completions(self, view, prefix, points): if not util.is_natural_file(view): return None texts = util.text_preceding_points(view, points) if all([text.strip().endswith('perform') for text in texts]): subroutines = util.find_text_by_selector(view, 'entity.name.function.natural') if not subroutines: return None subroutines.sort() completions = [[sub, sub] for sub in subroutines] return (completions, sublime.INHIBIT_WORD_COMPLETIONS)
import Natural.util as util import sublime, sublime_plugin class PerformEventListener(sublime_plugin.EventListener): """Suggest subroutine completions for the perform statement.""" def on_query_completions(self, view, prefix, points): if not util.is_natural_file(view): return None texts = util.text_preceding_points(view, points) if all([text.strip().endswith('perform') for text in texts]): subroutines = util.find_text_by_selector(view, 'entity.name.function.natural') if not subroutines: return None subroutines.sort() completions = [[sub, sub] for sub in subroutines] return (completions, sublime.INHIBIT_WORD_COMPLETIONS) class AddRulerToColumn72Listener(sublime_plugin.EventListener): """Add a ruler to column 72 when a Natural file is opened. If the user has other rulers, they're not messed with.""" def on_load(self, view): if not util.is_natural_file(view): return rulers = view.settings().get('rulers') if 72 not in rulers: rulers.append(72) rulers.sort() view.settings().set('rulers', rulers)
Add a ruler to column 72
Add a ruler to column 72
Python
mit
andref/Unnatural-Sublime-Package
import Natural.util as util import sublime, sublime_plugin class PerformEventListener(sublime_plugin.EventListener): """Suggest subroutine completions for the perform statement.""" def on_query_completions(self, view, prefix, points): if not util.is_natural_file(view): return None texts = util.text_preceding_points(view, points) if all([text.strip().endswith('perform') for text in texts]): subroutines = util.find_text_by_selector(view, 'entity.name.function.natural') if not subroutines: return None subroutines.sort() completions = [[sub, sub] for sub in subroutines] return (completions, sublime.INHIBIT_WORD_COMPLETIONS) + + class AddRulerToColumn72Listener(sublime_plugin.EventListener): + """Add a ruler to column 72 when a Natural file is opened. If the user has + other rulers, they're not messed with.""" + + def on_load(self, view): + if not util.is_natural_file(view): + return + rulers = view.settings().get('rulers') + if 72 not in rulers: + rulers.append(72) + rulers.sort() + view.settings().set('rulers', rulers) +
Add a ruler to column 72
## Code Before: import Natural.util as util import sublime, sublime_plugin class PerformEventListener(sublime_plugin.EventListener): """Suggest subroutine completions for the perform statement.""" def on_query_completions(self, view, prefix, points): if not util.is_natural_file(view): return None texts = util.text_preceding_points(view, points) if all([text.strip().endswith('perform') for text in texts]): subroutines = util.find_text_by_selector(view, 'entity.name.function.natural') if not subroutines: return None subroutines.sort() completions = [[sub, sub] for sub in subroutines] return (completions, sublime.INHIBIT_WORD_COMPLETIONS) ## Instruction: Add a ruler to column 72 ## Code After: import Natural.util as util import sublime, sublime_plugin class PerformEventListener(sublime_plugin.EventListener): """Suggest subroutine completions for the perform statement.""" def on_query_completions(self, view, prefix, points): if not util.is_natural_file(view): return None texts = util.text_preceding_points(view, points) if all([text.strip().endswith('perform') for text in texts]): subroutines = util.find_text_by_selector(view, 'entity.name.function.natural') if not subroutines: return None subroutines.sort() completions = [[sub, sub] for sub in subroutines] return (completions, sublime.INHIBIT_WORD_COMPLETIONS) class AddRulerToColumn72Listener(sublime_plugin.EventListener): """Add a ruler to column 72 when a Natural file is opened. If the user has other rulers, they're not messed with.""" def on_load(self, view): if not util.is_natural_file(view): return rulers = view.settings().get('rulers') if 72 not in rulers: rulers.append(72) rulers.sort() view.settings().set('rulers', rulers)
// ... existing code ... return (completions, sublime.INHIBIT_WORD_COMPLETIONS) class AddRulerToColumn72Listener(sublime_plugin.EventListener): """Add a ruler to column 72 when a Natural file is opened. If the user has other rulers, they're not messed with.""" def on_load(self, view): if not util.is_natural_file(view): return rulers = view.settings().get('rulers') if 72 not in rulers: rulers.append(72) rulers.sort() view.settings().set('rulers', rulers) // ... rest of the code ...
9b4f83ec89c76d8a5b5d0502e2903e2821078271
logger.py
logger.py
import sys import serial def log_serial(filename, device='/dev/ttyACM0', baud=9600): ser = serial.Serial(device, baud) outfile = open(filename, 'w') while True: line = ser.readline() outfile.write(line) if __name__ == '__main__': filename = sys.argv[1] log_serial(filename)
import sys import serial def log_serial(filename, device='/dev/ttyACM0', baud=9600): ser = serial.Serial(device, baud) outfile = open(filename, 'w') while True: line = ser.readline() print(line) outfile.write(line) if __name__ == '__main__': filename = sys.argv[1] log_serial(filename)
Print lines that are logged
Print lines that are logged
Python
mit
wapcaplet/ardiff
import sys import serial def log_serial(filename, device='/dev/ttyACM0', baud=9600): ser = serial.Serial(device, baud) outfile = open(filename, 'w') while True: line = ser.readline() + print(line) outfile.write(line) if __name__ == '__main__': filename = sys.argv[1] log_serial(filename)
Print lines that are logged
## Code Before: import sys import serial def log_serial(filename, device='/dev/ttyACM0', baud=9600): ser = serial.Serial(device, baud) outfile = open(filename, 'w') while True: line = ser.readline() outfile.write(line) if __name__ == '__main__': filename = sys.argv[1] log_serial(filename) ## Instruction: Print lines that are logged ## Code After: import sys import serial def log_serial(filename, device='/dev/ttyACM0', baud=9600): ser = serial.Serial(device, baud) outfile = open(filename, 'w') while True: line = ser.readline() print(line) outfile.write(line) if __name__ == '__main__': filename = sys.argv[1] log_serial(filename)
... line = ser.readline() print(line) outfile.write(line) ...
3fe4f1788d82719eac70ffe0fbbbae4dbe85f00b
evexml/forms.py
evexml/forms.py
from django import forms from django.forms.fields import IntegerField, CharField import evelink.account class AddAPIForm(forms.Form): key_id = IntegerField() v_code = CharField(max_length=64, min_length=1) def clean(self): self._clean() return super(AddAPIForm, self).clean() def _clean(self): """Check the access mask and characters of the supplied keypair. """ key_id = self.cleaned_data.get('key_id') v_code = self.cleaned_data.get('v_code') if key_id and v_code: api = evelink.api.API(api_key=(key_id, v_code)) account = evelink.account.Account(api) try: key_info = account.key_info().result except evelink.api.APIError as error: self.add_error(None, error.message) return if key_info['type'] != 'account': self.add_error(None, 'The API key should select Character: All')
from django import forms from django.forms.fields import IntegerField, CharField import evelink.account class AddAPIForm(forms.Form): key_id = IntegerField() v_code = CharField(max_length=64, min_length=1) def clean(self): self._clean() return super(AddAPIForm, self).clean() def _clean(self): """Check the access mask and characters of the supplied keypair. """ key_id = self.cleaned_data.get('key_id') v_code = self.cleaned_data.get('v_code') if not (key_id and v_code): return api = evelink.api.API(api_key=(key_id, v_code)) account = evelink.account.Account(api) try: key_info = account.key_info().result except evelink.api.APIError as error: self.add_error(None, error.message) return if key_info['type'] != 'account': self.add_error(None, 'The API key should select Character: All') if key_info['access_mask'] != 4294967295: self.add_error(None, 'The API key should have full access') if key_info['expire_ts']: self.add_error(None, 'The API key should have no expiry checked')
Implement checks to pass tests
Implement checks to pass tests
Python
mit
randomic/aniauth-tdd,randomic/aniauth-tdd
from django import forms from django.forms.fields import IntegerField, CharField import evelink.account class AddAPIForm(forms.Form): key_id = IntegerField() v_code = CharField(max_length=64, min_length=1) def clean(self): self._clean() return super(AddAPIForm, self).clean() def _clean(self): """Check the access mask and characters of the supplied keypair. """ key_id = self.cleaned_data.get('key_id') v_code = self.cleaned_data.get('v_code') - if key_id and v_code: + if not (key_id and v_code): - api = evelink.api.API(api_key=(key_id, v_code)) - account = evelink.account.Account(api) - try: - key_info = account.key_info().result - except evelink.api.APIError as error: - self.add_error(None, error.message) - return + return - if key_info['type'] != 'account': - self.add_error(None, 'The API key should select Character: All') + api = evelink.api.API(api_key=(key_id, v_code)) + account = evelink.account.Account(api) + try: + key_info = account.key_info().result + except evelink.api.APIError as error: + self.add_error(None, error.message) + return + if key_info['type'] != 'account': + self.add_error(None, 'The API key should select Character: All') + if key_info['access_mask'] != 4294967295: + self.add_error(None, 'The API key should have full access') + if key_info['expire_ts']: + self.add_error(None, 'The API key should have no expiry checked') +
Implement checks to pass tests
## Code Before: from django import forms from django.forms.fields import IntegerField, CharField import evelink.account class AddAPIForm(forms.Form): key_id = IntegerField() v_code = CharField(max_length=64, min_length=1) def clean(self): self._clean() return super(AddAPIForm, self).clean() def _clean(self): """Check the access mask and characters of the supplied keypair. """ key_id = self.cleaned_data.get('key_id') v_code = self.cleaned_data.get('v_code') if key_id and v_code: api = evelink.api.API(api_key=(key_id, v_code)) account = evelink.account.Account(api) try: key_info = account.key_info().result except evelink.api.APIError as error: self.add_error(None, error.message) return if key_info['type'] != 'account': self.add_error(None, 'The API key should select Character: All') ## Instruction: Implement checks to pass tests ## Code After: from django import forms from django.forms.fields import IntegerField, CharField import evelink.account class AddAPIForm(forms.Form): key_id = IntegerField() v_code = CharField(max_length=64, min_length=1) def clean(self): self._clean() return super(AddAPIForm, self).clean() def _clean(self): """Check the access mask and characters of the supplied keypair. """ key_id = self.cleaned_data.get('key_id') v_code = self.cleaned_data.get('v_code') if not (key_id and v_code): return api = evelink.api.API(api_key=(key_id, v_code)) account = evelink.account.Account(api) try: key_info = account.key_info().result except evelink.api.APIError as error: self.add_error(None, error.message) return if key_info['type'] != 'account': self.add_error(None, 'The API key should select Character: All') if key_info['access_mask'] != 4294967295: self.add_error(None, 'The API key should have full access') if key_info['expire_ts']: self.add_error(None, 'The API key should have no expiry checked')
# ... existing code ... v_code = self.cleaned_data.get('v_code') if not (key_id and v_code): return api = evelink.api.API(api_key=(key_id, v_code)) account = evelink.account.Account(api) try: key_info = account.key_info().result except evelink.api.APIError as error: self.add_error(None, error.message) return if key_info['type'] != 'account': self.add_error(None, 'The API key should select Character: All') if key_info['access_mask'] != 4294967295: self.add_error(None, 'The API key should have full access') if key_info['expire_ts']: self.add_error(None, 'The API key should have no expiry checked') # ... rest of the code ...
a268a886e0e3cab1057810f488feb6c2227414d3
users/serializers.py
users/serializers.py
from django.conf import settings from rest_framework import serializers from rest_framework.exceptions import ValidationError from users.models import User class UserSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = User fields = ( 'username', 'email', 'gravatar', 'password', 'password_repeat', settings.DRF_URL_FIELD_NAME, ) extra_kwargs = { settings.DRF_URL_FIELD_NAME: { "view_name": "users:user-detail", }, } password = serializers.CharField( write_only=True, required=True, allow_blank=False, min_length=6, max_length=32, ) password_repeat = serializers.CharField( write_only=True, required=True, allow_blank=False, min_length=6, max_length=32, ) def create(self, validated_data): if validated_data['password'] != validated_data['password']: raise ValidationError( detail={ "password_repeat": "Tow password doesn't match", } ) validated_data.pop('password_repeat') password = validated_data.pop('password') user = super(UserSerializer, self).create( validated_data, ) user.set_password(password) user.save() return user
from copy import deepcopy from django.conf import settings from django.contrib.auth import login from rest_framework import serializers from rest_framework.exceptions import ValidationError from users.models import User class UserSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = User fields = ( 'username', 'email', 'gravatar', 'password', 'password_repeat', settings.DRF_URL_FIELD_NAME, ) extra_kwargs = { settings.DRF_URL_FIELD_NAME: { "view_name": "users:user-detail", }, } password = serializers.CharField( write_only=True, required=True, allow_blank=False, min_length=6, max_length=32, ) password_repeat = serializers.CharField( write_only=True, required=True, allow_blank=False, min_length=6, max_length=32, ) def create(self, validated_data): if validated_data['password'] != validated_data['password']: raise ValidationError( detail={ "password_repeat": "Tow password doesn't match", } ) validated_data.pop('password_repeat') password = validated_data.pop('password') user = super(UserSerializer, self).create( validated_data, ) user.set_password(password) user.save() login( self.context['request'], user=user, backend=settings.AUTHENTICATION_BACKENDS[0], ) return user
Fix bug in register aftter login
Fix: Fix bug in register aftter login
Python
bsd-2-clause
pinry/pinry,lapo-luchini/pinry,pinry/pinry,lapo-luchini/pinry,lapo-luchini/pinry,pinry/pinry,pinry/pinry,lapo-luchini/pinry
+ from copy import deepcopy + from django.conf import settings + from django.contrib.auth import login from rest_framework import serializers from rest_framework.exceptions import ValidationError from users.models import User class UserSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = User fields = ( 'username', 'email', 'gravatar', 'password', 'password_repeat', settings.DRF_URL_FIELD_NAME, ) extra_kwargs = { settings.DRF_URL_FIELD_NAME: { "view_name": "users:user-detail", }, } password = serializers.CharField( write_only=True, required=True, allow_blank=False, min_length=6, max_length=32, ) password_repeat = serializers.CharField( write_only=True, required=True, allow_blank=False, min_length=6, max_length=32, ) def create(self, validated_data): if validated_data['password'] != validated_data['password']: raise ValidationError( detail={ "password_repeat": "Tow password doesn't match", } ) validated_data.pop('password_repeat') password = validated_data.pop('password') user = super(UserSerializer, self).create( validated_data, ) user.set_password(password) user.save() + login( + self.context['request'], + user=user, + backend=settings.AUTHENTICATION_BACKENDS[0], + ) return user
Fix bug in register aftter login
## Code Before: from django.conf import settings from rest_framework import serializers from rest_framework.exceptions import ValidationError from users.models import User class UserSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = User fields = ( 'username', 'email', 'gravatar', 'password', 'password_repeat', settings.DRF_URL_FIELD_NAME, ) extra_kwargs = { settings.DRF_URL_FIELD_NAME: { "view_name": "users:user-detail", }, } password = serializers.CharField( write_only=True, required=True, allow_blank=False, min_length=6, max_length=32, ) password_repeat = serializers.CharField( write_only=True, required=True, allow_blank=False, min_length=6, max_length=32, ) def create(self, validated_data): if validated_data['password'] != validated_data['password']: raise ValidationError( detail={ "password_repeat": "Tow password doesn't match", } ) validated_data.pop('password_repeat') password = validated_data.pop('password') user = super(UserSerializer, self).create( validated_data, ) user.set_password(password) user.save() return user ## Instruction: Fix bug in register aftter login ## Code After: from copy import deepcopy from django.conf import settings from django.contrib.auth import login from rest_framework import serializers from rest_framework.exceptions import ValidationError from users.models import User class UserSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = User fields = ( 'username', 'email', 'gravatar', 'password', 'password_repeat', settings.DRF_URL_FIELD_NAME, ) extra_kwargs = { settings.DRF_URL_FIELD_NAME: { "view_name": "users:user-detail", }, } password = serializers.CharField( write_only=True, required=True, allow_blank=False, min_length=6, max_length=32, ) password_repeat = serializers.CharField( write_only=True, required=True, allow_blank=False, min_length=6, max_length=32, ) def create(self, validated_data): if validated_data['password'] != validated_data['password']: raise ValidationError( detail={ "password_repeat": "Tow password doesn't match", } ) validated_data.pop('password_repeat') password = validated_data.pop('password') user = super(UserSerializer, self).create( validated_data, ) user.set_password(password) user.save() login( self.context['request'], user=user, backend=settings.AUTHENTICATION_BACKENDS[0], ) return user
... from copy import deepcopy from django.conf import settings from django.contrib.auth import login from rest_framework import serializers ... user.save() login( self.context['request'], user=user, backend=settings.AUTHENTICATION_BACKENDS[0], ) return user ...
834b7ff81d6e2777d3952bb588a53f12f5ace5f5
setup.py
setup.py
from distutils.core import setup # If we did a straight `import regobj` here we wouldn't be able # to build on non-win32 machines. regobj = {} try: execfile("regobj.py",regobj) except ImportError: pass VERSION = regobj["__version__"] NAME = "regobj" DESCRIPTION = "Pythonic object-based access to the Windows Registry." LONG_DESC = regobj["__doc__"] AUTHOR = "Ryan Kelly" AUTHOR_EMAIL = "[email protected]" URL="https://github.com/rfk/regobj" LICENSE = "MIT" KEYWORDS = "windows registry" setup(name=NAME, version=VERSION, author=AUTHOR, author_email=AUTHOR_EMAIL, url=URL, description=DESCRIPTION, long_description=LONG_DESC, license=LICENSE, keywords=KEYWORDS, py_modules=["regobj"], )
from distutils.core import setup # If we did a straight `import regobj` here we wouldn't be able # to build on non-win32 machines. regobj = {} try: execfile("regobj.py",regobj) except ImportError: pass VERSION = regobj["__version__"] NAME = "regobj" DESCRIPTION = "Pythonic object-based access to the Windows Registry." LONG_DESC = regobj["__doc__"] AUTHOR = "Ryan Kelly" AUTHOR_EMAIL = "[email protected]" URL="https://github.com/rfk/regobj" LICENSE = "MIT" KEYWORDS = "windows registry" setup(name=NAME, version=VERSION, author=AUTHOR, author_email=AUTHOR_EMAIL, url=URL, description=DESCRIPTION, long_description=LONG_DESC, license=LICENSE, keywords=KEYWORDS, py_modules=["regobj"], classifiers=[c.strip() for c in """ Intended Audience :: Developers License :: OSI Approved :: MIT License Programming Language :: Python :: 2 Programming Language :: Python :: 3 Topic :: Software Development :: Libraries :: Python Modules """.split('\n') if c.strip()], )
Add a Python 3 classifier recommended by community
Add a Python 3 classifier recommended by community
Python
mit
rfk/regobj
from distutils.core import setup # If we did a straight `import regobj` here we wouldn't be able # to build on non-win32 machines. regobj = {} try: execfile("regobj.py",regobj) except ImportError: pass VERSION = regobj["__version__"] NAME = "regobj" DESCRIPTION = "Pythonic object-based access to the Windows Registry." LONG_DESC = regobj["__doc__"] AUTHOR = "Ryan Kelly" AUTHOR_EMAIL = "[email protected]" URL="https://github.com/rfk/regobj" LICENSE = "MIT" KEYWORDS = "windows registry" setup(name=NAME, version=VERSION, author=AUTHOR, author_email=AUTHOR_EMAIL, url=URL, description=DESCRIPTION, long_description=LONG_DESC, license=LICENSE, keywords=KEYWORDS, py_modules=["regobj"], + classifiers=[c.strip() for c in """ + Intended Audience :: Developers + License :: OSI Approved :: MIT License + Programming Language :: Python :: 2 + Programming Language :: Python :: 3 + Topic :: Software Development :: Libraries :: Python Modules + """.split('\n') if c.strip()], )
Add a Python 3 classifier recommended by community
## Code Before: from distutils.core import setup # If we did a straight `import regobj` here we wouldn't be able # to build on non-win32 machines. regobj = {} try: execfile("regobj.py",regobj) except ImportError: pass VERSION = regobj["__version__"] NAME = "regobj" DESCRIPTION = "Pythonic object-based access to the Windows Registry." LONG_DESC = regobj["__doc__"] AUTHOR = "Ryan Kelly" AUTHOR_EMAIL = "[email protected]" URL="https://github.com/rfk/regobj" LICENSE = "MIT" KEYWORDS = "windows registry" setup(name=NAME, version=VERSION, author=AUTHOR, author_email=AUTHOR_EMAIL, url=URL, description=DESCRIPTION, long_description=LONG_DESC, license=LICENSE, keywords=KEYWORDS, py_modules=["regobj"], ) ## Instruction: Add a Python 3 classifier recommended by community ## Code After: from distutils.core import setup # If we did a straight `import regobj` here we wouldn't be able # to build on non-win32 machines. regobj = {} try: execfile("regobj.py",regobj) except ImportError: pass VERSION = regobj["__version__"] NAME = "regobj" DESCRIPTION = "Pythonic object-based access to the Windows Registry." LONG_DESC = regobj["__doc__"] AUTHOR = "Ryan Kelly" AUTHOR_EMAIL = "[email protected]" URL="https://github.com/rfk/regobj" LICENSE = "MIT" KEYWORDS = "windows registry" setup(name=NAME, version=VERSION, author=AUTHOR, author_email=AUTHOR_EMAIL, url=URL, description=DESCRIPTION, long_description=LONG_DESC, license=LICENSE, keywords=KEYWORDS, py_modules=["regobj"], classifiers=[c.strip() for c in """ Intended Audience :: Developers License :: OSI Approved :: MIT License Programming Language :: Python :: 2 Programming Language :: Python :: 3 Topic :: Software Development :: Libraries :: Python Modules """.split('\n') if c.strip()], )
# ... existing code ... py_modules=["regobj"], classifiers=[c.strip() for c in """ Intended Audience :: Developers License :: OSI Approved :: MIT License Programming Language :: Python :: 2 Programming Language :: Python :: 3 Topic :: Software Development :: Libraries :: Python Modules """.split('\n') if c.strip()], ) # ... rest of the code ...
08542b47b127d6bcf128bdedb5f25956f909784e
website_snippet_anchor/__openerp__.py
website_snippet_anchor/__openerp__.py
{ "name": "Set Snippet's Anchor", "summary": "Allow to reach a concrete section in the page", "version": "8.0.1.0.0", "category": "Website", "website": "http://www.antiun.com", "author": "Antiun Ingeniería S.L., Odoo Community Association (OCA)", "license": "AGPL-3", "application": False, "installable": True, "external_dependencies": { "python": [], "bin": [], }, "depends": [ "website", ], "data": [ "views/assets.xml", "views/snippets.xml", ], }
{ "name": "Set Snippet's Anchor", "summary": "Allow to reach a concrete section in the page", "version": "8.0.1.0.0", "category": "Website", "website": "http://www.antiun.com", "author": "Antiun Ingeniería S.L., Odoo Community Association (OCA)", "license": "AGPL-3", "application": False, "installable": True, "depends": [ "website", ], "data": [ "views/assets.xml", "views/snippets.xml", ], }
Remove unused keys from manifest.
Remove unused keys from manifest.
Python
agpl-3.0
pedrobaeza/website,brain-tec/website,LasLabs/website,gfcapalbo/website,gfcapalbo/website,acsone/website,LasLabs/website,LasLabs/website,open-synergy/website,pedrobaeza/website,brain-tec/website,pedrobaeza/website,nuobit/website,nuobit/website,nuobit/website,gfcapalbo/website,Endika/website,pedrobaeza/website,Yajo/website,gfcapalbo/website,Endika/website,open-synergy/website,Antiun/website,kaerdsar/website,open-synergy/website,open-synergy/website,brain-tec/website,Yajo/website,acsone/website,nuobit/website,acsone/website,kaerdsar/website,Endika/website,acsone/website,LasLabs/website,Antiun/website,Endika/website,Yajo/website,Antiun/website,kaerdsar/website,brain-tec/website,Yajo/website,Antiun/website
{ "name": "Set Snippet's Anchor", "summary": "Allow to reach a concrete section in the page", "version": "8.0.1.0.0", "category": "Website", "website": "http://www.antiun.com", "author": "Antiun Ingeniería S.L., Odoo Community Association (OCA)", "license": "AGPL-3", "application": False, "installable": True, - "external_dependencies": { - "python": [], - "bin": [], - }, "depends": [ "website", ], "data": [ "views/assets.xml", "views/snippets.xml", ], }
Remove unused keys from manifest.
## Code Before: { "name": "Set Snippet's Anchor", "summary": "Allow to reach a concrete section in the page", "version": "8.0.1.0.0", "category": "Website", "website": "http://www.antiun.com", "author": "Antiun Ingeniería S.L., Odoo Community Association (OCA)", "license": "AGPL-3", "application": False, "installable": True, "external_dependencies": { "python": [], "bin": [], }, "depends": [ "website", ], "data": [ "views/assets.xml", "views/snippets.xml", ], } ## Instruction: Remove unused keys from manifest. ## Code After: { "name": "Set Snippet's Anchor", "summary": "Allow to reach a concrete section in the page", "version": "8.0.1.0.0", "category": "Website", "website": "http://www.antiun.com", "author": "Antiun Ingeniería S.L., Odoo Community Association (OCA)", "license": "AGPL-3", "application": False, "installable": True, "depends": [ "website", ], "data": [ "views/assets.xml", "views/snippets.xml", ], }
... "installable": True, "depends": [ ...
8a36070c76d1552e2d2e61c1e5c47202cc28b329
basket/news/backends/common.py
basket/news/backends/common.py
from functools import wraps from time import time from django_statsd.clients import statsd class UnauthorizedException(Exception): """Failure to log into the email server.""" pass class NewsletterException(Exception): """Error when trying to talk to the the email server.""" def __init__(self, msg=None, error_code=None, status_code=None): self.error_code = error_code self.status_code = status_code super(NewsletterException, self).__init__(msg) class NewsletterNoResultsException(NewsletterException): """ No results were returned from the mail server (but the request didn't report any errors) """ pass def get_timer_decorator(prefix): """ Decorator for timing and counting requests to the API """ def decorator(f): @wraps(f) def wrapped(*args, **kwargs): starttime = time() e = None try: resp = f(*args, **kwargs) except NewsletterException as e: # noqa pass except Exception: raise totaltime = int((time() - starttime) * 1000) statsd.timing(prefix + '.timing', totaltime) statsd.timing(prefix + '.{}.timing'.format(f.__name__), totaltime) statsd.incr(prefix + '.count') statsd.incr(prefix + '.{}.count'.format(f.__name__)) if e: raise else: return resp return wrapped return decorator
from functools import wraps from time import time from django_statsd.clients import statsd class UnauthorizedException(Exception): """Failure to log into the email server.""" pass class NewsletterException(Exception): """Error when trying to talk to the the email server.""" def __init__(self, msg=None, error_code=None, status_code=None): self.error_code = error_code self.status_code = status_code super(NewsletterException, self).__init__(msg) class NewsletterNoResultsException(NewsletterException): """ No results were returned from the mail server (but the request didn't report any errors) """ pass def get_timer_decorator(prefix): """ Decorator for timing and counting requests to the API """ def decorator(f): @wraps(f) def wrapped(*args, **kwargs): starttime = time() def record_timing(): totaltime = int((time() - starttime) * 1000) statsd.timing(prefix + '.timing', totaltime) statsd.timing(prefix + '.{}.timing'.format(f.__name__), totaltime) statsd.incr(prefix + '.count') statsd.incr(prefix + '.{}.count'.format(f.__name__)) try: resp = f(*args, **kwargs) except NewsletterException: record_timing() raise record_timing() return resp return wrapped return decorator
Refactor the timing decorator to be less confusing
Refactor the timing decorator to be less confusing Also means that we don't have to ignore a flake8 error.
Python
mpl-2.0
glogiotatidis/basket,glogiotatidis/basket,glogiotatidis/basket
from functools import wraps from time import time from django_statsd.clients import statsd class UnauthorizedException(Exception): """Failure to log into the email server.""" pass class NewsletterException(Exception): """Error when trying to talk to the the email server.""" def __init__(self, msg=None, error_code=None, status_code=None): self.error_code = error_code self.status_code = status_code super(NewsletterException, self).__init__(msg) class NewsletterNoResultsException(NewsletterException): """ No results were returned from the mail server (but the request didn't report any errors) """ pass def get_timer_decorator(prefix): """ Decorator for timing and counting requests to the API """ def decorator(f): @wraps(f) def wrapped(*args, **kwargs): starttime = time() - e = None + + def record_timing(): + totaltime = int((time() - starttime) * 1000) + statsd.timing(prefix + '.timing', totaltime) + statsd.timing(prefix + '.{}.timing'.format(f.__name__), totaltime) + statsd.incr(prefix + '.count') + statsd.incr(prefix + '.{}.count'.format(f.__name__)) + try: resp = f(*args, **kwargs) - except NewsletterException as e: # noqa + except NewsletterException: + record_timing() - pass - except Exception: raise + record_timing() - totaltime = int((time() - starttime) * 1000) - statsd.timing(prefix + '.timing', totaltime) - statsd.timing(prefix + '.{}.timing'.format(f.__name__), totaltime) - statsd.incr(prefix + '.count') - statsd.incr(prefix + '.{}.count'.format(f.__name__)) - if e: - raise - else: - return resp + return resp return wrapped return decorator
Refactor the timing decorator to be less confusing
## Code Before: from functools import wraps from time import time from django_statsd.clients import statsd class UnauthorizedException(Exception): """Failure to log into the email server.""" pass class NewsletterException(Exception): """Error when trying to talk to the the email server.""" def __init__(self, msg=None, error_code=None, status_code=None): self.error_code = error_code self.status_code = status_code super(NewsletterException, self).__init__(msg) class NewsletterNoResultsException(NewsletterException): """ No results were returned from the mail server (but the request didn't report any errors) """ pass def get_timer_decorator(prefix): """ Decorator for timing and counting requests to the API """ def decorator(f): @wraps(f) def wrapped(*args, **kwargs): starttime = time() e = None try: resp = f(*args, **kwargs) except NewsletterException as e: # noqa pass except Exception: raise totaltime = int((time() - starttime) * 1000) statsd.timing(prefix + '.timing', totaltime) statsd.timing(prefix + '.{}.timing'.format(f.__name__), totaltime) statsd.incr(prefix + '.count') statsd.incr(prefix + '.{}.count'.format(f.__name__)) if e: raise else: return resp return wrapped return decorator ## Instruction: Refactor the timing decorator to be less confusing ## Code After: from functools import wraps from time import time from django_statsd.clients import statsd class UnauthorizedException(Exception): """Failure to log into the email server.""" pass class NewsletterException(Exception): """Error when trying to talk to the the email server.""" def __init__(self, msg=None, error_code=None, status_code=None): self.error_code = error_code self.status_code = status_code super(NewsletterException, self).__init__(msg) class NewsletterNoResultsException(NewsletterException): """ No results were returned from the mail server (but the request didn't report any errors) """ pass def get_timer_decorator(prefix): """ Decorator for timing and counting requests to the API """ def decorator(f): @wraps(f) def wrapped(*args, **kwargs): starttime = time() def record_timing(): totaltime = int((time() - starttime) * 1000) statsd.timing(prefix + '.timing', totaltime) statsd.timing(prefix + '.{}.timing'.format(f.__name__), totaltime) statsd.incr(prefix + '.count') statsd.incr(prefix + '.{}.count'.format(f.__name__)) try: resp = f(*args, **kwargs) except NewsletterException: record_timing() raise record_timing() return resp return wrapped return decorator
... starttime = time() def record_timing(): totaltime = int((time() - starttime) * 1000) statsd.timing(prefix + '.timing', totaltime) statsd.timing(prefix + '.{}.timing'.format(f.__name__), totaltime) statsd.incr(prefix + '.count') statsd.incr(prefix + '.{}.count'.format(f.__name__)) try: ... resp = f(*args, **kwargs) except NewsletterException: record_timing() raise ... record_timing() return resp ...
59afb96f2211983ee2a2786c60791074b13c3e7f
ni/__main__.py
ni/__main__.py
"""Implement a server to check if a contribution is covered by a CLA(s).""" from aiohttp import web from . import abc from . import ContribHost from . import ServerHost from . import CLAHost class Handler: """Handle requests from the contribution host.""" def __init__(self, server: ServerHost, cla_records: CLAHost): self.server = server self.cla_records = cla_records async def respond(request: web.Request) -> web.StreamResponse: # XXX untested """Handle a webhook trigger from the contribution host.""" try: contribution = ContribHost.process(request) usernames = await contribution.usernames() # XXX not implemented cla_status = await self.cla_records.check(usernames) # XXX not implemented # With a background queue, one could add the update as a work item # and return an HTTP 202 response. return (await contribution.update(cla_status)) # XXX not implemented except abc.ResponseExit as exc: return exc.response except Exception as exc: self.server.log(exc) return web.Response( status=http.HTTPStatus.INTERNAL_SERVER_ERROR.value) if __name__ == '__main__': server = ServerHost() cla_records = CLAHost() handler = Handler(server, cla_records) app = web.Application() app.router.add_route(*ContribHost.route, handler.respond) web.run_app(app, port=server.port())
"""Implement a server to check if a contribution is covered by a CLA(s).""" from aiohttp import web from . import abc from . import ContribHost from . import ServerHost from . import CLAHost class Handler: """Handle requests from the contribution host.""" def __init__(self, server: ServerHost, cla_records: CLAHost): self.server = server self.cla_records = cla_records async def respond(request: web.Request) -> web.StreamResponse: # XXX untested """Handle a webhook trigger from the contribution host.""" try: contribution = ContribHost.process(request) usernames = await contribution.usernames() # XXX not implemented cla_status = await self.cla_records.check(usernames) # XXX not implemented # With a work queue, one could make the updating of the # contribution a work item and return an HTTP 202 response. return (await contribution.update(cla_status)) # XXX not implemented except abc.ResponseExit as exc: return exc.response except Exception as exc: self.server.log(exc) return web.Response( status=http.HTTPStatus.INTERNAL_SERVER_ERROR.value) if __name__ == '__main__': server = ServerHost() cla_records = CLAHost() handler = Handler(server, cla_records) app = web.Application() app.router.add_route(*ContribHost.route, handler.respond) web.run_app(app, port=server.port())
Tweak comment about 202 response
Tweak comment about 202 response
Python
apache-2.0
python/the-knights-who-say-ni,python/the-knights-who-say-ni
"""Implement a server to check if a contribution is covered by a CLA(s).""" from aiohttp import web from . import abc from . import ContribHost from . import ServerHost from . import CLAHost class Handler: """Handle requests from the contribution host.""" def __init__(self, server: ServerHost, cla_records: CLAHost): self.server = server self.cla_records = cla_records async def respond(request: web.Request) -> web.StreamResponse: # XXX untested """Handle a webhook trigger from the contribution host.""" try: contribution = ContribHost.process(request) usernames = await contribution.usernames() # XXX not implemented cla_status = await self.cla_records.check(usernames) # XXX not implemented - # With a background queue, one could add the update as a work item + # With a work queue, one could make the updating of the - # and return an HTTP 202 response. + # contribution a work item and return an HTTP 202 response. return (await contribution.update(cla_status)) # XXX not implemented except abc.ResponseExit as exc: return exc.response except Exception as exc: self.server.log(exc) return web.Response( status=http.HTTPStatus.INTERNAL_SERVER_ERROR.value) if __name__ == '__main__': server = ServerHost() cla_records = CLAHost() handler = Handler(server, cla_records) app = web.Application() app.router.add_route(*ContribHost.route, handler.respond) web.run_app(app, port=server.port())
Tweak comment about 202 response
## Code Before: """Implement a server to check if a contribution is covered by a CLA(s).""" from aiohttp import web from . import abc from . import ContribHost from . import ServerHost from . import CLAHost class Handler: """Handle requests from the contribution host.""" def __init__(self, server: ServerHost, cla_records: CLAHost): self.server = server self.cla_records = cla_records async def respond(request: web.Request) -> web.StreamResponse: # XXX untested """Handle a webhook trigger from the contribution host.""" try: contribution = ContribHost.process(request) usernames = await contribution.usernames() # XXX not implemented cla_status = await self.cla_records.check(usernames) # XXX not implemented # With a background queue, one could add the update as a work item # and return an HTTP 202 response. return (await contribution.update(cla_status)) # XXX not implemented except abc.ResponseExit as exc: return exc.response except Exception as exc: self.server.log(exc) return web.Response( status=http.HTTPStatus.INTERNAL_SERVER_ERROR.value) if __name__ == '__main__': server = ServerHost() cla_records = CLAHost() handler = Handler(server, cla_records) app = web.Application() app.router.add_route(*ContribHost.route, handler.respond) web.run_app(app, port=server.port()) ## Instruction: Tweak comment about 202 response ## Code After: """Implement a server to check if a contribution is covered by a CLA(s).""" from aiohttp import web from . import abc from . import ContribHost from . import ServerHost from . import CLAHost class Handler: """Handle requests from the contribution host.""" def __init__(self, server: ServerHost, cla_records: CLAHost): self.server = server self.cla_records = cla_records async def respond(request: web.Request) -> web.StreamResponse: # XXX untested """Handle a webhook trigger from the contribution host.""" try: contribution = ContribHost.process(request) usernames = await contribution.usernames() # XXX not implemented cla_status = await self.cla_records.check(usernames) # XXX not implemented # With a work queue, one could make the updating of the # contribution a work item and return an HTTP 202 response. return (await contribution.update(cla_status)) # XXX not implemented except abc.ResponseExit as exc: return exc.response except Exception as exc: self.server.log(exc) return web.Response( status=http.HTTPStatus.INTERNAL_SERVER_ERROR.value) if __name__ == '__main__': server = ServerHost() cla_records = CLAHost() handler = Handler(server, cla_records) app = web.Application() app.router.add_route(*ContribHost.route, handler.respond) web.run_app(app, port=server.port())
... cla_status = await self.cla_records.check(usernames) # XXX not implemented # With a work queue, one could make the updating of the # contribution a work item and return an HTTP 202 response. return (await contribution.update(cla_status)) # XXX not implemented ...
1c516e64518597404e3928d445fb3239748a4861
performanceplatform/collector/logging_setup.py
performanceplatform/collector/logging_setup.py
from logstash_formatter import LogstashFormatter import logging import os import pdb import sys import traceback def get_log_file_handler(path): handler = logging.FileHandler(path) handler.setFormatter(logging.Formatter( "%(asctime)s [%(levelname)s] -> %(message)s")) return handler def get_json_log_handler(path, app_name): handler = logging.FileHandler(path) formatter = LogstashFormatter() formatter.defaults['@tags'] = ['collector', app_name] handler.setFormatter(formatter) return handler def uncaught_exception_handler(*exc_info): text = "".join(traceback.format_exception(*exc_info)) logging.error("Unhandled exception: %s", text) def set_up_logging(app_name, log_level, logfile_path): sys.excepthook = uncaught_exception_handler logger = logging.getLogger() logger.setLevel(log_level) logger.addHandler(get_log_file_handler( os.path.join(logfile_path, 'collector.log'))) logger.addHandler(get_json_log_handler( os.path.join(logfile_path, 'collector.log.json'), app_name)) logger.info("{0} logging started".format(app_name))
from logstash_formatter import LogstashFormatter import logging import os import pdb import sys import traceback def get_log_file_handler(path): handler = logging.FileHandler(path) handler.setFormatter(logging.Formatter( "%(asctime)s [%(levelname)s] -> %(message)s")) return handler def get_json_log_handler(path, app_name, json_fields): handler = logging.FileHandler(path) formatter = LogstashFormatter() formatter.defaults['@tags'] = ['collector', app_name] formatter.defaults.update(json_fields) handler.setFormatter(formatter) return handler def uncaught_exception_handler(*exc_info): text = "".join(traceback.format_exception(*exc_info)) logging.error("Unhandled exception: %s", text) def set_up_logging(app_name, log_level, logfile_path, json_fields=None): sys.excepthook = uncaught_exception_handler logger = logging.getLogger() logger.setLevel(log_level) logger.addHandler(get_log_file_handler( os.path.join(logfile_path, 'collector.log'))) logger.addHandler(get_json_log_handler( os.path.join(logfile_path, 'collector.log.json'), app_name, json_fields=json_fields if json_fields else {})) logger.info("{0} logging started".format(app_name))
Add `json_fields` parameter to set_up_logging
Add `json_fields` parameter to set_up_logging This will allow the main function to add extra fields to JSON log messages, for example to pass through command-line arguments. See https://www.pivotaltracker.com/story/show/70748012
Python
mit
alphagov/performanceplatform-collector,alphagov/performanceplatform-collector,alphagov/performanceplatform-collector
from logstash_formatter import LogstashFormatter import logging import os import pdb import sys import traceback def get_log_file_handler(path): handler = logging.FileHandler(path) handler.setFormatter(logging.Formatter( "%(asctime)s [%(levelname)s] -> %(message)s")) return handler - def get_json_log_handler(path, app_name): + def get_json_log_handler(path, app_name, json_fields): handler = logging.FileHandler(path) formatter = LogstashFormatter() formatter.defaults['@tags'] = ['collector', app_name] + formatter.defaults.update(json_fields) handler.setFormatter(formatter) return handler def uncaught_exception_handler(*exc_info): text = "".join(traceback.format_exception(*exc_info)) logging.error("Unhandled exception: %s", text) - def set_up_logging(app_name, log_level, logfile_path): + def set_up_logging(app_name, log_level, logfile_path, json_fields=None): sys.excepthook = uncaught_exception_handler logger = logging.getLogger() logger.setLevel(log_level) logger.addHandler(get_log_file_handler( os.path.join(logfile_path, 'collector.log'))) logger.addHandler(get_json_log_handler( - os.path.join(logfile_path, 'collector.log.json'), app_name)) + os.path.join(logfile_path, 'collector.log.json'), + app_name, + json_fields=json_fields if json_fields else {})) logger.info("{0} logging started".format(app_name))
Add `json_fields` parameter to set_up_logging
## Code Before: from logstash_formatter import LogstashFormatter import logging import os import pdb import sys import traceback def get_log_file_handler(path): handler = logging.FileHandler(path) handler.setFormatter(logging.Formatter( "%(asctime)s [%(levelname)s] -> %(message)s")) return handler def get_json_log_handler(path, app_name): handler = logging.FileHandler(path) formatter = LogstashFormatter() formatter.defaults['@tags'] = ['collector', app_name] handler.setFormatter(formatter) return handler def uncaught_exception_handler(*exc_info): text = "".join(traceback.format_exception(*exc_info)) logging.error("Unhandled exception: %s", text) def set_up_logging(app_name, log_level, logfile_path): sys.excepthook = uncaught_exception_handler logger = logging.getLogger() logger.setLevel(log_level) logger.addHandler(get_log_file_handler( os.path.join(logfile_path, 'collector.log'))) logger.addHandler(get_json_log_handler( os.path.join(logfile_path, 'collector.log.json'), app_name)) logger.info("{0} logging started".format(app_name)) ## Instruction: Add `json_fields` parameter to set_up_logging ## Code After: from logstash_formatter import LogstashFormatter import logging import os import pdb import sys import traceback def get_log_file_handler(path): handler = logging.FileHandler(path) handler.setFormatter(logging.Formatter( "%(asctime)s [%(levelname)s] -> %(message)s")) return handler def get_json_log_handler(path, app_name, json_fields): handler = logging.FileHandler(path) formatter = LogstashFormatter() formatter.defaults['@tags'] = ['collector', app_name] formatter.defaults.update(json_fields) handler.setFormatter(formatter) return handler def uncaught_exception_handler(*exc_info): text = "".join(traceback.format_exception(*exc_info)) logging.error("Unhandled exception: %s", text) def set_up_logging(app_name, log_level, logfile_path, json_fields=None): sys.excepthook = uncaught_exception_handler logger = logging.getLogger() logger.setLevel(log_level) logger.addHandler(get_log_file_handler( os.path.join(logfile_path, 'collector.log'))) logger.addHandler(get_json_log_handler( os.path.join(logfile_path, 'collector.log.json'), app_name, json_fields=json_fields if json_fields else {})) logger.info("{0} logging started".format(app_name))
# ... existing code ... def get_json_log_handler(path, app_name, json_fields): handler = logging.FileHandler(path) # ... modified code ... formatter.defaults['@tags'] = ['collector', app_name] formatter.defaults.update(json_fields) handler.setFormatter(formatter) ... def set_up_logging(app_name, log_level, logfile_path, json_fields=None): sys.excepthook = uncaught_exception_handler ... logger.addHandler(get_json_log_handler( os.path.join(logfile_path, 'collector.log.json'), app_name, json_fields=json_fields if json_fields else {})) logger.info("{0} logging started".format(app_name)) # ... rest of the code ...
376b327379caeb0845007c3a0e7c33e1f15869f0
flatisfy/constants.py
flatisfy/constants.py
from __future__ import absolute_import, print_function, unicode_literals # Some backends give more infos than others. Here is the precedence we want to # use. First is most important one, last is the one that will always be # considered as less trustable if two backends have similar info about a # housing. BACKENDS_BY_PRECEDENCE = [ "foncia", "seloger", "pap", "leboncoin", "explorimmo", "logicimmo", "entreparticuliers" ]
from __future__ import absolute_import, print_function, unicode_literals # Some backends give more infos than others. Here is the precedence we want to # use. First is most important one, last is the one that will always be # considered as less trustable if two backends have similar info about a # housing. BACKENDS_BY_PRECEDENCE = [ "foncia", "seloger", "pap", "leboncoin", "explorimmo", "logicimmo" ]
Drop support for entreparticuliers Weboob module
Drop support for entreparticuliers Weboob module
Python
mit
Phyks/Flatisfy,Phyks/Flatisfy,Phyks/Flatisfy,Phyks/Flatisfy
from __future__ import absolute_import, print_function, unicode_literals # Some backends give more infos than others. Here is the precedence we want to # use. First is most important one, last is the one that will always be # considered as less trustable if two backends have similar info about a # housing. BACKENDS_BY_PRECEDENCE = [ "foncia", "seloger", "pap", "leboncoin", "explorimmo", - "logicimmo", + "logicimmo" - "entreparticuliers" ]
Drop support for entreparticuliers Weboob module
## Code Before: from __future__ import absolute_import, print_function, unicode_literals # Some backends give more infos than others. Here is the precedence we want to # use. First is most important one, last is the one that will always be # considered as less trustable if two backends have similar info about a # housing. BACKENDS_BY_PRECEDENCE = [ "foncia", "seloger", "pap", "leboncoin", "explorimmo", "logicimmo", "entreparticuliers" ] ## Instruction: Drop support for entreparticuliers Weboob module ## Code After: from __future__ import absolute_import, print_function, unicode_literals # Some backends give more infos than others. Here is the precedence we want to # use. First is most important one, last is the one that will always be # considered as less trustable if two backends have similar info about a # housing. BACKENDS_BY_PRECEDENCE = [ "foncia", "seloger", "pap", "leboncoin", "explorimmo", "logicimmo" ]
// ... existing code ... "explorimmo", "logicimmo" ] // ... rest of the code ...
8c0af29e7b6ec3a5e76fdb1efc56068bf276ad39
helenae/flask_app.py
helenae/flask_app.py
from flask import Flask from flask_sqlalchemy import SQLAlchemy from db import tables as dbTables app = Flask(__name__, template_folder='./web/templates/') app.config['SECRET_KEY'] = 'some_secret_key' app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql://user:password@localhost/csan' db_connection = SQLAlchemy(app) import web.admin import web.views
from flask import Flask, request, session from flask_sqlalchemy import SQLAlchemy from flask.ext.babelex import Babel from db import tables as dbTables app = Flask(__name__, template_folder='./web/templates/', static_folder='./web/static/', static_url_path='') app.config['SECRET_KEY'] = 'some_secret_key' app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql://user:password@localhost/csan' db_connection = SQLAlchemy(app) # Initialize babel babel = Babel(app) @babel.localeselector def get_locale(): override = request.args.get('lang') if override: session['lang'] = override return session.get('lang', 'ru') import web.admin import web.views
Add babel plugin for Flask
Add babel plugin for Flask
Python
mit
Relrin/Helenae,Relrin/Helenae,Relrin/Helenae
- from flask import Flask + from flask import Flask, request, session from flask_sqlalchemy import SQLAlchemy + from flask.ext.babelex import Babel from db import tables as dbTables - app = Flask(__name__, template_folder='./web/templates/') + app = Flask(__name__, template_folder='./web/templates/', static_folder='./web/static/', static_url_path='') app.config['SECRET_KEY'] = 'some_secret_key' app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql://user:password@localhost/csan' db_connection = SQLAlchemy(app) + # Initialize babel + babel = Babel(app) + @babel.localeselector + def get_locale(): + override = request.args.get('lang') + if override: + session['lang'] = override + return session.get('lang', 'ru') + import web.admin import web.views
Add babel plugin for Flask
## Code Before: from flask import Flask from flask_sqlalchemy import SQLAlchemy from db import tables as dbTables app = Flask(__name__, template_folder='./web/templates/') app.config['SECRET_KEY'] = 'some_secret_key' app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql://user:password@localhost/csan' db_connection = SQLAlchemy(app) import web.admin import web.views ## Instruction: Add babel plugin for Flask ## Code After: from flask import Flask, request, session from flask_sqlalchemy import SQLAlchemy from flask.ext.babelex import Babel from db import tables as dbTables app = Flask(__name__, template_folder='./web/templates/', static_folder='./web/static/', static_url_path='') app.config['SECRET_KEY'] = 'some_secret_key' app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql://user:password@localhost/csan' db_connection = SQLAlchemy(app) # Initialize babel babel = Babel(app) @babel.localeselector def get_locale(): override = request.args.get('lang') if override: session['lang'] = override return session.get('lang', 'ru') import web.admin import web.views
# ... existing code ... from flask import Flask, request, session from flask_sqlalchemy import SQLAlchemy from flask.ext.babelex import Babel from db import tables as dbTables # ... modified code ... app = Flask(__name__, template_folder='./web/templates/', static_folder='./web/static/', static_url_path='') app.config['SECRET_KEY'] = 'some_secret_key' ... # Initialize babel babel = Babel(app) @babel.localeselector def get_locale(): override = request.args.get('lang') if override: session['lang'] = override return session.get('lang', 'ru') import web.admin # ... rest of the code ...
2029256cda7e3bc752d30361357932053cb98744
shell.py
shell.py
from person import Person def go(db): global status while status == 1: inputText = input("command>") for i in commands: if inputText == i: commands[i](db) def helpMe(db): print("help:") for i in commandsHelp: print(i, ":", commandsHelp[i]) def add(db): print("add") firstName = input("firstName:") secondName = input("secondName:") birthdayDate = input("birthdayDate:") namedayDate = input("namedayDate:") mail = input("mail:") telNumber = input("telNumber:") facebook = input("facebook:") group = input("group:") newPerson = Person(firstName, secondName, birthdayDate,namedayDate, mail, telNumber, facebook, group) db.add(newPerson) def showDb(db): for index in db.db: print(index) def quit(db): global status status = 0 status = 1 commands = {"h": helpMe, "a": add, "q": quit, "l": showDb} commandsHelp = {"h": "help for command help", "a": "help for command add", "q": "help for command quit"}
from person import Person def go(db): global status while status == 1: inputText = input("command> ") for i in commands: if inputText == i: commands[i](db) def helpMe(db): print("help:") for i in commandsHelp: print("\t", i, ":", commandsHelp[i]) def add(db): print("add") firstName = input("\tfirstName: ") secondName = input("\tsecondName: ") birthdayDate = input("\tbirthdayDate: ") namedayDate = input("\tnamedayDate: ") mail = input("\tmail: ") telNumber = input("\ttelNumber: ") facebook = input("\tfacebook: ") group = input("\tgroup: ") newPerson = Person(firstName, secondName, birthdayDate,namedayDate, mail, telNumber, facebook, group) db.add(newPerson) def showDb(db): for index in db.db: print(index) def quit(db): global status status = 0 status = 1 commands = {"h": helpMe, "a": add, "q": quit, "l": showDb} commandsHelp = {"h": "help for command help", "a": "help for command add", "q": "help for command quit"}
Add whitespaces to print it better.
Add whitespaces to print it better. Signed-off-by: Matej Dujava <[email protected]>
Python
mit
matejd11/birthdayNotify
from person import Person def go(db): global status while status == 1: - inputText = input("command>") + inputText = input("command> ") for i in commands: if inputText == i: commands[i](db) def helpMe(db): print("help:") for i in commandsHelp: - print(i, ":", commandsHelp[i]) + print("\t", i, ":", commandsHelp[i]) def add(db): print("add") - firstName = input("firstName:") + firstName = input("\tfirstName: ") - secondName = input("secondName:") + secondName = input("\tsecondName: ") - birthdayDate = input("birthdayDate:") + birthdayDate = input("\tbirthdayDate: ") - namedayDate = input("namedayDate:") + namedayDate = input("\tnamedayDate: ") - mail = input("mail:") + mail = input("\tmail: ") - telNumber = input("telNumber:") + telNumber = input("\ttelNumber: ") - facebook = input("facebook:") + facebook = input("\tfacebook: ") - group = input("group:") + group = input("\tgroup: ") newPerson = Person(firstName, secondName, birthdayDate,namedayDate, mail, telNumber, facebook, group) db.add(newPerson) def showDb(db): for index in db.db: print(index) def quit(db): global status status = 0 status = 1 commands = {"h": helpMe, "a": add, "q": quit, "l": showDb} commandsHelp = {"h": "help for command help", "a": "help for command add", "q": "help for command quit"}
Add whitespaces to print it better.
## Code Before: from person import Person def go(db): global status while status == 1: inputText = input("command>") for i in commands: if inputText == i: commands[i](db) def helpMe(db): print("help:") for i in commandsHelp: print(i, ":", commandsHelp[i]) def add(db): print("add") firstName = input("firstName:") secondName = input("secondName:") birthdayDate = input("birthdayDate:") namedayDate = input("namedayDate:") mail = input("mail:") telNumber = input("telNumber:") facebook = input("facebook:") group = input("group:") newPerson = Person(firstName, secondName, birthdayDate,namedayDate, mail, telNumber, facebook, group) db.add(newPerson) def showDb(db): for index in db.db: print(index) def quit(db): global status status = 0 status = 1 commands = {"h": helpMe, "a": add, "q": quit, "l": showDb} commandsHelp = {"h": "help for command help", "a": "help for command add", "q": "help for command quit"} ## Instruction: Add whitespaces to print it better. ## Code After: from person import Person def go(db): global status while status == 1: inputText = input("command> ") for i in commands: if inputText == i: commands[i](db) def helpMe(db): print("help:") for i in commandsHelp: print("\t", i, ":", commandsHelp[i]) def add(db): print("add") firstName = input("\tfirstName: ") secondName = input("\tsecondName: ") birthdayDate = input("\tbirthdayDate: ") namedayDate = input("\tnamedayDate: ") mail = input("\tmail: ") telNumber = input("\ttelNumber: ") facebook = input("\tfacebook: ") group = input("\tgroup: ") newPerson = Person(firstName, secondName, birthdayDate,namedayDate, mail, telNumber, facebook, group) db.add(newPerson) def showDb(db): for index in db.db: print(index) def quit(db): global status status = 0 status = 1 commands = {"h": helpMe, "a": add, "q": quit, "l": showDb} commandsHelp = {"h": "help for command help", "a": "help for command add", "q": "help for command quit"}
// ... existing code ... while status == 1: inputText = input("command> ") for i in commands: // ... modified code ... for i in commandsHelp: print("\t", i, ":", commandsHelp[i]) ... firstName = input("\tfirstName: ") secondName = input("\tsecondName: ") birthdayDate = input("\tbirthdayDate: ") namedayDate = input("\tnamedayDate: ") mail = input("\tmail: ") telNumber = input("\ttelNumber: ") facebook = input("\tfacebook: ") group = input("\tgroup: ") // ... rest of the code ...
3b5f1749a8065bb9241d6a8ed77c047a05b3f6e2
bcbio/distributed/sge.py
bcbio/distributed/sge.py
import re import subprocess _jobid_pat = re.compile('Your job (?P<jobid>\d+) \("') def submit_job(scheduler_args, command): """Submit a job to the scheduler, returning the supplied job ID. """ cl = ["qsub", "-cwd", "-b", "y", "-j", "y"] + scheduler_args + command status = subprocess.check_output(cl) match = _jobid_pat.search(status) return match.groups("jobid")[0] def stop_job(jobid): cl = ["qdel", jobid] subprocess.check_call(cl) def are_running(jobids): """Check if submitted job IDs are running. """ run_info = subprocess.check_output(["qstat"]) running = [] for parts in (l.split() for l in run_info.split("\n") if l.strip()): if len(parts) >= 5: pid, _, _, _, status = parts[:5] if status.lower() in ["r"]: running.append(pid) want_running = set(running).intersection(set(jobids)) return len(want_running) == len(jobids)
import re import time import subprocess _jobid_pat = re.compile('Your job (?P<jobid>\d+) \("') def submit_job(scheduler_args, command): """Submit a job to the scheduler, returning the supplied job ID. """ cl = ["qsub", "-cwd", "-b", "y", "-j", "y"] + scheduler_args + command status = subprocess.check_output(cl) match = _jobid_pat.search(status) return match.groups("jobid")[0] def stop_job(jobid): cl = ["qdel", jobid] subprocess.check_call(cl) def are_running(jobids): """Check if submitted job IDs are running. """ # handle SGE errors, retrying to get the current status max_retries = 10 tried = 0 while 1: try: run_info = subprocess.check_output(["qstat"]) break except: tried += 1 if tried > max_retries: raise time.sleep(5) running = [] for parts in (l.split() for l in run_info.split("\n") if l.strip()): if len(parts) >= 5: pid, _, _, _, status = parts[:5] if status.lower() in ["r"]: running.append(pid) want_running = set(running).intersection(set(jobids)) return len(want_running) == len(jobids)
Handle temporary errors returned from SGE qstat
Handle temporary errors returned from SGE qstat
Python
mit
biocyberman/bcbio-nextgen,Cyberbio-Lab/bcbio-nextgen,mjafin/bcbio-nextgen,chapmanb/bcbio-nextgen,lpantano/bcbio-nextgen,fw1121/bcbio-nextgen,brainstorm/bcbio-nextgen,fw1121/bcbio-nextgen,elkingtonmcb/bcbio-nextgen,verdurin/bcbio-nextgen,mjafin/bcbio-nextgen,vladsaveliev/bcbio-nextgen,SciLifeLab/bcbio-nextgen,SciLifeLab/bcbio-nextgen,chapmanb/bcbio-nextgen,lbeltrame/bcbio-nextgen,a113n/bcbio-nextgen,biocyberman/bcbio-nextgen,elkingtonmcb/bcbio-nextgen,hjanime/bcbio-nextgen,Cyberbio-Lab/bcbio-nextgen,hjanime/bcbio-nextgen,verdurin/bcbio-nextgen,vladsaveliev/bcbio-nextgen,a113n/bcbio-nextgen,lpantano/bcbio-nextgen,mjafin/bcbio-nextgen,gifford-lab/bcbio-nextgen,gifford-lab/bcbio-nextgen,hjanime/bcbio-nextgen,brainstorm/bcbio-nextgen,gifford-lab/bcbio-nextgen,guillermo-carrasco/bcbio-nextgen,chapmanb/bcbio-nextgen,vladsaveliev/bcbio-nextgen,lpantano/bcbio-nextgen,a113n/bcbio-nextgen,brainstorm/bcbio-nextgen,elkingtonmcb/bcbio-nextgen,guillermo-carrasco/bcbio-nextgen,biocyberman/bcbio-nextgen,Cyberbio-Lab/bcbio-nextgen,SciLifeLab/bcbio-nextgen,fw1121/bcbio-nextgen,verdurin/bcbio-nextgen,lbeltrame/bcbio-nextgen,guillermo-carrasco/bcbio-nextgen,lbeltrame/bcbio-nextgen
import re + import time import subprocess _jobid_pat = re.compile('Your job (?P<jobid>\d+) \("') def submit_job(scheduler_args, command): """Submit a job to the scheduler, returning the supplied job ID. """ cl = ["qsub", "-cwd", "-b", "y", "-j", "y"] + scheduler_args + command status = subprocess.check_output(cl) match = _jobid_pat.search(status) return match.groups("jobid")[0] def stop_job(jobid): cl = ["qdel", jobid] subprocess.check_call(cl) def are_running(jobids): """Check if submitted job IDs are running. """ + # handle SGE errors, retrying to get the current status + max_retries = 10 + tried = 0 + while 1: + try: - run_info = subprocess.check_output(["qstat"]) + run_info = subprocess.check_output(["qstat"]) + break + except: + tried += 1 + if tried > max_retries: + raise + time.sleep(5) running = [] for parts in (l.split() for l in run_info.split("\n") if l.strip()): if len(parts) >= 5: pid, _, _, _, status = parts[:5] if status.lower() in ["r"]: running.append(pid) want_running = set(running).intersection(set(jobids)) return len(want_running) == len(jobids)
Handle temporary errors returned from SGE qstat
## Code Before: import re import subprocess _jobid_pat = re.compile('Your job (?P<jobid>\d+) \("') def submit_job(scheduler_args, command): """Submit a job to the scheduler, returning the supplied job ID. """ cl = ["qsub", "-cwd", "-b", "y", "-j", "y"] + scheduler_args + command status = subprocess.check_output(cl) match = _jobid_pat.search(status) return match.groups("jobid")[0] def stop_job(jobid): cl = ["qdel", jobid] subprocess.check_call(cl) def are_running(jobids): """Check if submitted job IDs are running. """ run_info = subprocess.check_output(["qstat"]) running = [] for parts in (l.split() for l in run_info.split("\n") if l.strip()): if len(parts) >= 5: pid, _, _, _, status = parts[:5] if status.lower() in ["r"]: running.append(pid) want_running = set(running).intersection(set(jobids)) return len(want_running) == len(jobids) ## Instruction: Handle temporary errors returned from SGE qstat ## Code After: import re import time import subprocess _jobid_pat = re.compile('Your job (?P<jobid>\d+) \("') def submit_job(scheduler_args, command): """Submit a job to the scheduler, returning the supplied job ID. """ cl = ["qsub", "-cwd", "-b", "y", "-j", "y"] + scheduler_args + command status = subprocess.check_output(cl) match = _jobid_pat.search(status) return match.groups("jobid")[0] def stop_job(jobid): cl = ["qdel", jobid] subprocess.check_call(cl) def are_running(jobids): """Check if submitted job IDs are running. """ # handle SGE errors, retrying to get the current status max_retries = 10 tried = 0 while 1: try: run_info = subprocess.check_output(["qstat"]) break except: tried += 1 if tried > max_retries: raise time.sleep(5) running = [] for parts in (l.split() for l in run_info.split("\n") if l.strip()): if len(parts) >= 5: pid, _, _, _, status = parts[:5] if status.lower() in ["r"]: running.append(pid) want_running = set(running).intersection(set(jobids)) return len(want_running) == len(jobids)
# ... existing code ... import re import time import subprocess # ... modified code ... """ # handle SGE errors, retrying to get the current status max_retries = 10 tried = 0 while 1: try: run_info = subprocess.check_output(["qstat"]) break except: tried += 1 if tried > max_retries: raise time.sleep(5) running = [] # ... rest of the code ...
9c34c9cfca30104d5bd17b38df5fa50cb24ee9ae
tests/write_abort_test.py
tests/write_abort_test.py
import os.path import pycurl import sys import unittest class WriteAbortTest(unittest.TestCase): def setUp(self): self.curl = pycurl.Curl() def tearDown(self): self.curl.close() def test_write_abort(self): def write_cb(_): # this should cause pycurl.WRITEFUNCTION (without any range errors) return -1 # download the script itself through the file:// protocol into write_cb self.curl.setopt(pycurl.URL, 'file://' + os.path.abspath(sys.argv[0])) self.curl.setopt(pycurl.WRITEFUNCTION, write_cb) try: self.curl.perform() except pycurl.error: err, msg = sys.exc_info()[1] # we expect pycurl.E_WRITE_ERROR as the response assert pycurl.E_WRITE_ERROR == err # no additional errors should be reported assert not hasattr(sys, 'last_value')
import os.path import pycurl import sys import unittest class WriteAbortTest(unittest.TestCase): def setUp(self): self.curl = pycurl.Curl() def tearDown(self): self.curl.close() def test_write_abort(self): def write_cb(_): # this should cause pycurl.WRITEFUNCTION (without any range errors) return -1 try: # set when running full test suite if any earlier tests # failed in Python code called from C del sys.last_value except AttributeError: pass # download the script itself through the file:// protocol into write_cb self.curl.setopt(pycurl.URL, 'file://' + os.path.abspath(sys.argv[0])) self.curl.setopt(pycurl.WRITEFUNCTION, write_cb) try: self.curl.perform() except pycurl.error: err, msg = sys.exc_info()[1] # we expect pycurl.E_WRITE_ERROR as the response assert pycurl.E_WRITE_ERROR == err # no additional errors should be reported assert not hasattr(sys, 'last_value')
Handle the possibility of other tests failing in Python code called from C
Handle the possibility of other tests failing in Python code called from C
Python
lgpl-2.1
pycurl/pycurl,pycurl/pycurl,pycurl/pycurl
import os.path import pycurl import sys import unittest class WriteAbortTest(unittest.TestCase): def setUp(self): self.curl = pycurl.Curl() def tearDown(self): self.curl.close() def test_write_abort(self): def write_cb(_): # this should cause pycurl.WRITEFUNCTION (without any range errors) return -1 + try: + # set when running full test suite if any earlier tests + # failed in Python code called from C + del sys.last_value + except AttributeError: + pass + # download the script itself through the file:// protocol into write_cb self.curl.setopt(pycurl.URL, 'file://' + os.path.abspath(sys.argv[0])) self.curl.setopt(pycurl.WRITEFUNCTION, write_cb) try: self.curl.perform() except pycurl.error: err, msg = sys.exc_info()[1] # we expect pycurl.E_WRITE_ERROR as the response assert pycurl.E_WRITE_ERROR == err # no additional errors should be reported assert not hasattr(sys, 'last_value')
Handle the possibility of other tests failing in Python code called from C
## Code Before: import os.path import pycurl import sys import unittest class WriteAbortTest(unittest.TestCase): def setUp(self): self.curl = pycurl.Curl() def tearDown(self): self.curl.close() def test_write_abort(self): def write_cb(_): # this should cause pycurl.WRITEFUNCTION (without any range errors) return -1 # download the script itself through the file:// protocol into write_cb self.curl.setopt(pycurl.URL, 'file://' + os.path.abspath(sys.argv[0])) self.curl.setopt(pycurl.WRITEFUNCTION, write_cb) try: self.curl.perform() except pycurl.error: err, msg = sys.exc_info()[1] # we expect pycurl.E_WRITE_ERROR as the response assert pycurl.E_WRITE_ERROR == err # no additional errors should be reported assert not hasattr(sys, 'last_value') ## Instruction: Handle the possibility of other tests failing in Python code called from C ## Code After: import os.path import pycurl import sys import unittest class WriteAbortTest(unittest.TestCase): def setUp(self): self.curl = pycurl.Curl() def tearDown(self): self.curl.close() def test_write_abort(self): def write_cb(_): # this should cause pycurl.WRITEFUNCTION (without any range errors) return -1 try: # set when running full test suite if any earlier tests # failed in Python code called from C del sys.last_value except AttributeError: pass # download the script itself through the file:// protocol into write_cb self.curl.setopt(pycurl.URL, 'file://' + os.path.abspath(sys.argv[0])) self.curl.setopt(pycurl.WRITEFUNCTION, write_cb) try: self.curl.perform() except pycurl.error: err, msg = sys.exc_info()[1] # we expect pycurl.E_WRITE_ERROR as the response assert pycurl.E_WRITE_ERROR == err # no additional errors should be reported assert not hasattr(sys, 'last_value')
// ... existing code ... try: # set when running full test suite if any earlier tests # failed in Python code called from C del sys.last_value except AttributeError: pass # download the script itself through the file:// protocol into write_cb // ... rest of the code ...
34e0934d2f62a98728601a18126fa21eb50cf5a5
doubles/testing.py
doubles/testing.py
class User(object): """An importable dummy class used for testing purposes.""" class_attribute = 'foo' @staticmethod def static_method(): pass @classmethod def class_method(cls): return 'class method' def __init__(self, name, age): self.name = name self.age = age def get_name(self): return self.name def instance_method(self): pass def method_with_varargs(self, *args): pass def method_with_default_args(self, foo, bar='baz'): pass def method_with_varkwargs(self, **kwargs): pass def method_with_positional_arguments(self, foo): pass
class User(object): """An importable dummy class used for testing purposes.""" class_attribute = 'foo' @staticmethod def static_method(): return 'static_method return value' @classmethod def class_method(cls): return 'class_method return value' def __init__(self, name, age): self.name = name self.age = age def get_name(self): return self.name def instance_method(self): return 'instance_method return value' def method_with_varargs(self, *args): return 'method_with_varargs return value' def method_with_default_args(self, foo, bar='baz'): return 'method_with_default_args return value' def method_with_varkwargs(self, **kwargs): return 'method_with_varkwargs return value' def method_with_positional_arguments(self, foo): return 'method_with_positional_arguments return value'
Return easily identifiable values from User methods.
Return easily identifiable values from User methods.
Python
mit
uber/doubles
class User(object): """An importable dummy class used for testing purposes.""" class_attribute = 'foo' @staticmethod def static_method(): - pass + return 'static_method return value' @classmethod def class_method(cls): - return 'class method' + return 'class_method return value' def __init__(self, name, age): self.name = name self.age = age def get_name(self): return self.name def instance_method(self): - pass + return 'instance_method return value' def method_with_varargs(self, *args): - pass + return 'method_with_varargs return value' def method_with_default_args(self, foo, bar='baz'): - pass + return 'method_with_default_args return value' def method_with_varkwargs(self, **kwargs): - pass + return 'method_with_varkwargs return value' def method_with_positional_arguments(self, foo): - pass + return 'method_with_positional_arguments return value'
Return easily identifiable values from User methods.
## Code Before: class User(object): """An importable dummy class used for testing purposes.""" class_attribute = 'foo' @staticmethod def static_method(): pass @classmethod def class_method(cls): return 'class method' def __init__(self, name, age): self.name = name self.age = age def get_name(self): return self.name def instance_method(self): pass def method_with_varargs(self, *args): pass def method_with_default_args(self, foo, bar='baz'): pass def method_with_varkwargs(self, **kwargs): pass def method_with_positional_arguments(self, foo): pass ## Instruction: Return easily identifiable values from User methods. ## Code After: class User(object): """An importable dummy class used for testing purposes.""" class_attribute = 'foo' @staticmethod def static_method(): return 'static_method return value' @classmethod def class_method(cls): return 'class_method return value' def __init__(self, name, age): self.name = name self.age = age def get_name(self): return self.name def instance_method(self): return 'instance_method return value' def method_with_varargs(self, *args): return 'method_with_varargs return value' def method_with_default_args(self, foo, bar='baz'): return 'method_with_default_args return value' def method_with_varkwargs(self, **kwargs): return 'method_with_varkwargs return value' def method_with_positional_arguments(self, foo): return 'method_with_positional_arguments return value'
# ... existing code ... def static_method(): return 'static_method return value' # ... modified code ... def class_method(cls): return 'class_method return value' ... def instance_method(self): return 'instance_method return value' ... def method_with_varargs(self, *args): return 'method_with_varargs return value' ... def method_with_default_args(self, foo, bar='baz'): return 'method_with_default_args return value' ... def method_with_varkwargs(self, **kwargs): return 'method_with_varkwargs return value' ... def method_with_positional_arguments(self, foo): return 'method_with_positional_arguments return value' # ... rest of the code ...
3ca46f1407d8984ca5cbd1eb0581765386533d71
observatory/rcos/tests/test_rcos.py
observatory/rcos/tests/test_rcos.py
import pytest from django.core.urlresolvers import reverse @pytest.mark.django_db def test_homepage(client): for url in ( "/donor", "/students", "/courses", "/talks", "/programming-competition", "/achievements", "/urp-application", "/links-and-contacts", "/talk-sign-up", "/irc", "/faq", "/calendar", "/howtojoin", "/past-projects", ): #Load Site response = client.get(url) #Check for normal processing assert response.status_code in [200, 301]
import pytest from django.core.urlresolvers import reverse @pytest.mark.django_db def test_homepage(client): for url in ( "/", "/donor", "/students", "/courses", "/talks", "/programming-competition", "/achievements", "/urp-application", "/links-and-contacts", "/talk-sign-up", "/irc", "/faq", "/calendar", "/howtojoin", "/past-projects", ): #Load Site response = client.get(url) #Check for normal processing assert response.status_code in [200, 301]
Add / to rcos tests
rcos: Add / to rcos tests
Python
isc
rcos/Observatory,rcos/Observatory,rcos/Observatory,rcos/Observatory,rcos/Observatory,rcos/Observatory
import pytest from django.core.urlresolvers import reverse @pytest.mark.django_db def test_homepage(client): for url in ( + "/", "/donor", "/students", "/courses", "/talks", "/programming-competition", "/achievements", "/urp-application", "/links-and-contacts", "/talk-sign-up", "/irc", "/faq", "/calendar", "/howtojoin", "/past-projects", ): #Load Site response = client.get(url) #Check for normal processing assert response.status_code in [200, 301]
Add / to rcos tests
## Code Before: import pytest from django.core.urlresolvers import reverse @pytest.mark.django_db def test_homepage(client): for url in ( "/donor", "/students", "/courses", "/talks", "/programming-competition", "/achievements", "/urp-application", "/links-and-contacts", "/talk-sign-up", "/irc", "/faq", "/calendar", "/howtojoin", "/past-projects", ): #Load Site response = client.get(url) #Check for normal processing assert response.status_code in [200, 301] ## Instruction: Add / to rcos tests ## Code After: import pytest from django.core.urlresolvers import reverse @pytest.mark.django_db def test_homepage(client): for url in ( "/", "/donor", "/students", "/courses", "/talks", "/programming-competition", "/achievements", "/urp-application", "/links-and-contacts", "/talk-sign-up", "/irc", "/faq", "/calendar", "/howtojoin", "/past-projects", ): #Load Site response = client.get(url) #Check for normal processing assert response.status_code in [200, 301]
// ... existing code ... for url in ( "/", "/donor", // ... rest of the code ...
56c25218cb3c987201839917930fc1ae791b5601
reg/__init__.py
reg/__init__.py
from .dispatch import dispatch, Dispatch from .context import (dispatch_method, DispatchMethod, methodify, clean_dispatch_methods) from .arginfo import arginfo from .error import RegistrationError from .predicate import (Predicate, KeyIndex, ClassIndex, match_key, match_instance, match_class) from .cache import DictCachingKeyLookup, LruCachingKeyLookup
from .dispatch import dispatch, Dispatch, LookupEntry from .context import (dispatch_method, DispatchMethod, methodify, clean_dispatch_methods) from .arginfo import arginfo from .error import RegistrationError from .predicate import (Predicate, KeyIndex, ClassIndex, match_key, match_instance, match_class) from .cache import DictCachingKeyLookup, LruCachingKeyLookup
Add LookupEntry to the API.
Add LookupEntry to the API.
Python
bsd-3-clause
morepath/reg,taschini/reg
- from .dispatch import dispatch, Dispatch + from .dispatch import dispatch, Dispatch, LookupEntry from .context import (dispatch_method, DispatchMethod, methodify, clean_dispatch_methods) from .arginfo import arginfo from .error import RegistrationError from .predicate import (Predicate, KeyIndex, ClassIndex, match_key, match_instance, match_class) from .cache import DictCachingKeyLookup, LruCachingKeyLookup
Add LookupEntry to the API.
## Code Before: from .dispatch import dispatch, Dispatch from .context import (dispatch_method, DispatchMethod, methodify, clean_dispatch_methods) from .arginfo import arginfo from .error import RegistrationError from .predicate import (Predicate, KeyIndex, ClassIndex, match_key, match_instance, match_class) from .cache import DictCachingKeyLookup, LruCachingKeyLookup ## Instruction: Add LookupEntry to the API. ## Code After: from .dispatch import dispatch, Dispatch, LookupEntry from .context import (dispatch_method, DispatchMethod, methodify, clean_dispatch_methods) from .arginfo import arginfo from .error import RegistrationError from .predicate import (Predicate, KeyIndex, ClassIndex, match_key, match_instance, match_class) from .cache import DictCachingKeyLookup, LruCachingKeyLookup
# ... existing code ... from .dispatch import dispatch, Dispatch, LookupEntry from .context import (dispatch_method, DispatchMethod, # ... rest of the code ...
cb39c1edf395f7da1c241010fc833fe512fa74ac
bcbio/distributed/clargs.py
bcbio/distributed/clargs.py
def to_parallel(args, module="bcbio.distributed"): """Convert input arguments into a parallel dictionary for passing to processing. """ ptype, cores = _get_cores_and_type(args.numcores, getattr(args, "paralleltype", None), args.scheduler) parallel = {"type": ptype, "cores": cores, "scheduler": args.scheduler, "queue": args.queue, "tag": args.tag, "module": module, "resources": args.resources, "timeout": args.timeout, "retries": args.retries, "run_local": args.queue == "localrun", "local_controller": args.local_controller} return parallel def _get_cores_and_type(numcores, paralleltype, scheduler): """Return core and parallelization approach from command line providing sane defaults. """ if scheduler is not None: paralleltype = "ipython" if paralleltype is None: paralleltype = "local" if not numcores or int(numcores) < 1: numcores = 1 return paralleltype, int(numcores)
def to_parallel(args, module="bcbio.distributed"): """Convert input arguments into a parallel dictionary for passing to processing. """ ptype, cores = _get_cores_and_type(args.numcores, getattr(args, "paralleltype", None), args.scheduler) local_controller = getattr(args, "local_controller", False) parallel = {"type": ptype, "cores": cores, "scheduler": args.scheduler, "queue": args.queue, "tag": args.tag, "module": module, "resources": args.resources, "timeout": args.timeout, "retries": args.retries, "run_local": args.queue == "localrun", "local_controller": local_controller} return parallel def _get_cores_and_type(numcores, paralleltype, scheduler): """Return core and parallelization approach from command line providing sane defaults. """ if scheduler is not None: paralleltype = "ipython" if paralleltype is None: paralleltype = "local" if not numcores or int(numcores) < 1: numcores = 1 return paralleltype, int(numcores)
Fix for bcbio-nextgen-vm not passing the local_controller option.
Fix for bcbio-nextgen-vm not passing the local_controller option.
Python
mit
brainstorm/bcbio-nextgen,brainstorm/bcbio-nextgen,vladsaveliev/bcbio-nextgen,vladsaveliev/bcbio-nextgen,lbeltrame/bcbio-nextgen,brainstorm/bcbio-nextgen,lbeltrame/bcbio-nextgen,a113n/bcbio-nextgen,biocyberman/bcbio-nextgen,vladsaveliev/bcbio-nextgen,lbeltrame/bcbio-nextgen,chapmanb/bcbio-nextgen,a113n/bcbio-nextgen,chapmanb/bcbio-nextgen,chapmanb/bcbio-nextgen,a113n/bcbio-nextgen,biocyberman/bcbio-nextgen,biocyberman/bcbio-nextgen
def to_parallel(args, module="bcbio.distributed"): """Convert input arguments into a parallel dictionary for passing to processing. """ - ptype, cores = _get_cores_and_type(args.numcores, getattr(args, "paralleltype", None), + ptype, cores = _get_cores_and_type(args.numcores, + getattr(args, "paralleltype", None), args.scheduler) + local_controller = getattr(args, "local_controller", False) parallel = {"type": ptype, "cores": cores, "scheduler": args.scheduler, "queue": args.queue, "tag": args.tag, "module": module, "resources": args.resources, "timeout": args.timeout, "retries": args.retries, "run_local": args.queue == "localrun", - "local_controller": args.local_controller} + "local_controller": local_controller} return parallel def _get_cores_and_type(numcores, paralleltype, scheduler): """Return core and parallelization approach from command line providing sane defaults. """ if scheduler is not None: paralleltype = "ipython" if paralleltype is None: paralleltype = "local" if not numcores or int(numcores) < 1: numcores = 1 return paralleltype, int(numcores)
Fix for bcbio-nextgen-vm not passing the local_controller option.
## Code Before: def to_parallel(args, module="bcbio.distributed"): """Convert input arguments into a parallel dictionary for passing to processing. """ ptype, cores = _get_cores_and_type(args.numcores, getattr(args, "paralleltype", None), args.scheduler) parallel = {"type": ptype, "cores": cores, "scheduler": args.scheduler, "queue": args.queue, "tag": args.tag, "module": module, "resources": args.resources, "timeout": args.timeout, "retries": args.retries, "run_local": args.queue == "localrun", "local_controller": args.local_controller} return parallel def _get_cores_and_type(numcores, paralleltype, scheduler): """Return core and parallelization approach from command line providing sane defaults. """ if scheduler is not None: paralleltype = "ipython" if paralleltype is None: paralleltype = "local" if not numcores or int(numcores) < 1: numcores = 1 return paralleltype, int(numcores) ## Instruction: Fix for bcbio-nextgen-vm not passing the local_controller option. ## Code After: def to_parallel(args, module="bcbio.distributed"): """Convert input arguments into a parallel dictionary for passing to processing. """ ptype, cores = _get_cores_and_type(args.numcores, getattr(args, "paralleltype", None), args.scheduler) local_controller = getattr(args, "local_controller", False) parallel = {"type": ptype, "cores": cores, "scheduler": args.scheduler, "queue": args.queue, "tag": args.tag, "module": module, "resources": args.resources, "timeout": args.timeout, "retries": args.retries, "run_local": args.queue == "localrun", "local_controller": local_controller} return parallel def _get_cores_and_type(numcores, paralleltype, scheduler): """Return core and parallelization approach from command line providing sane defaults. """ if scheduler is not None: paralleltype = "ipython" if paralleltype is None: paralleltype = "local" if not numcores or int(numcores) < 1: numcores = 1 return paralleltype, int(numcores)
// ... existing code ... """ ptype, cores = _get_cores_and_type(args.numcores, getattr(args, "paralleltype", None), args.scheduler) local_controller = getattr(args, "local_controller", False) parallel = {"type": ptype, "cores": cores, // ... modified code ... "run_local": args.queue == "localrun", "local_controller": local_controller} return parallel // ... rest of the code ...
d60112e569e13333cfd6316d30683282ceff8bee
changes/jobs/cleanup_builds.py
changes/jobs/cleanup_builds.py
from datetime import datetime, timedelta from changes.config import db, queue from changes.constants import Status from changes.models.build import Build def cleanup_builds(): """ Look for any jobs which haven't checked in (but are listed in a pending state) and mark them as finished in an unknown state. """ now = datetime.utcnow() cutoff = now - timedelta(minutes=5) build_list = Build.query.filter( Build.status != Status.finished, Build.date_modified < cutoff, ) db.session.query(Build).filter( Build.id.in_(b.id for b in build_list), ).update({ Build.date_modified: now, }) for build in build_list: queue.delay('sync_build', kwargs={ 'build_id': build.id.hex, })
from datetime import datetime, timedelta from sqlalchemy.sql import func from changes.config import db, queue from changes.constants import Status from changes.models.build import Build def cleanup_builds(): """ Look for any jobs which haven't checked in (but are listed in a pending state) and mark them as finished in an unknown state. """ now = datetime.utcnow() cutoff = now - timedelta(minutes=5) build_list = Build.query.filter( Build.status != Status.finished, Build.date_modified < cutoff, ) db.session.query(Build).filter( Build.id.in_(b.id for b in build_list), ).update({ Build.date_modified: func.now(), }) for build in build_list: queue.delay('sync_build', kwargs={ 'build_id': build.id.hex, })
Use func.now for timestamp update
Use func.now for timestamp update
Python
apache-2.0
dropbox/changes,bowlofstew/changes,wfxiang08/changes,bowlofstew/changes,dropbox/changes,wfxiang08/changes,bowlofstew/changes,bowlofstew/changes,dropbox/changes,wfxiang08/changes,wfxiang08/changes,dropbox/changes
from datetime import datetime, timedelta + from sqlalchemy.sql import func from changes.config import db, queue from changes.constants import Status from changes.models.build import Build def cleanup_builds(): """ Look for any jobs which haven't checked in (but are listed in a pending state) and mark them as finished in an unknown state. """ now = datetime.utcnow() cutoff = now - timedelta(minutes=5) build_list = Build.query.filter( Build.status != Status.finished, Build.date_modified < cutoff, ) db.session.query(Build).filter( Build.id.in_(b.id for b in build_list), ).update({ - Build.date_modified: now, + Build.date_modified: func.now(), }) for build in build_list: queue.delay('sync_build', kwargs={ 'build_id': build.id.hex, })
Use func.now for timestamp update
## Code Before: from datetime import datetime, timedelta from changes.config import db, queue from changes.constants import Status from changes.models.build import Build def cleanup_builds(): """ Look for any jobs which haven't checked in (but are listed in a pending state) and mark them as finished in an unknown state. """ now = datetime.utcnow() cutoff = now - timedelta(minutes=5) build_list = Build.query.filter( Build.status != Status.finished, Build.date_modified < cutoff, ) db.session.query(Build).filter( Build.id.in_(b.id for b in build_list), ).update({ Build.date_modified: now, }) for build in build_list: queue.delay('sync_build', kwargs={ 'build_id': build.id.hex, }) ## Instruction: Use func.now for timestamp update ## Code After: from datetime import datetime, timedelta from sqlalchemy.sql import func from changes.config import db, queue from changes.constants import Status from changes.models.build import Build def cleanup_builds(): """ Look for any jobs which haven't checked in (but are listed in a pending state) and mark them as finished in an unknown state. """ now = datetime.utcnow() cutoff = now - timedelta(minutes=5) build_list = Build.query.filter( Build.status != Status.finished, Build.date_modified < cutoff, ) db.session.query(Build).filter( Build.id.in_(b.id for b in build_list), ).update({ Build.date_modified: func.now(), }) for build in build_list: queue.delay('sync_build', kwargs={ 'build_id': build.id.hex, })
# ... existing code ... from datetime import datetime, timedelta from sqlalchemy.sql import func # ... modified code ... ).update({ Build.date_modified: func.now(), }) # ... rest of the code ...
5f601f742f7a63a1a504e9af3fca61df9deb4707
util.py
util.py
def topological_sort(start_nodes, dependencies): """ Return a topologically sorted list of :param:`start_nodes` and :param:`dependencies`. Nodes are checked on identity, not equality. Raises a ValueError if no topological sort is possible. :param start_nodes: list of nodes of graph with no incoming edges :param dependencies: list of dependency edges; ([dependencies], target), dependencies may be of any sequence or be atomic :returns: topologically sorted nodes :rtype: list of nodes """ seen = set(start_nodes) for deps, target in dependencies: if deps is None: seen.add(target) order = list(seen) old_dependencies = [] while True: dependencies = [(deps, target) for deps, target in dependencies if target not in seen] if not dependencies: return order if old_dependencies == dependencies: raise ValueError("no topological sort possible") for deps, target in dependencies: # test for sequences try: deps = iter(deps) # atomic object except TypeError: deps = [deps] if all(dep in seen for dep in deps): order.append(target) seen.add(target) # copy dependencies to check progress old_dependencies = list(dependencies)
def topological_sort(start_nodes, dependencies): """ Return a topologically sorted list of :param:`start_nodes` and :param:`dependencies`. Nodes are checked on identity, not equality. Raises a ValueError if no topological sort is possible. :param start_nodes: sequence of nodes of graph with no incoming edges :param dependencies: sequence of dependency edges; ([dependencies], target), dependencies may be of any sequence or be atomic, if there is no dependency it must be ``None`` :returns: topologically sorted nodes :rtype: list of nodes """ seen = set(start_nodes) for deps, target in dependencies: if deps is None: seen.add(target) order = list(seen) old_dependencies = [] while True: dependencies = [(deps, target) for deps, target in dependencies if target not in seen] if not dependencies: return order if old_dependencies == dependencies: raise ValueError("no topological sort possible") for deps, target in dependencies: # test for sequences try: deps = iter(deps) # atomic object except TypeError: deps = [deps] if all(dep in seen for dep in deps): order.append(target) seen.add(target) # copy dependencies to check progress old_dependencies = list(dependencies)
Fix documentation of top sort.
Fix documentation of top sort. Change wording from list to sequence and include note about how to specify no dependencies. Signed-off-by: Michael Markert <[email protected]>
Python
mit
fhirschmann/penchy,fhirschmann/penchy
def topological_sort(start_nodes, dependencies): """ Return a topologically sorted list of :param:`start_nodes` and :param:`dependencies`. Nodes are checked on identity, not equality. Raises a ValueError if no topological sort is possible. - :param start_nodes: list of nodes of graph with no incoming edges + :param start_nodes: sequence of nodes of graph with no incoming edges - :param dependencies: list of dependency edges; ([dependencies], target), + :param dependencies: sequence of dependency edges; ([dependencies], target), - dependencies may be of any sequence or be atomic + dependencies may be of any sequence or be atomic, if + there is no dependency it must be ``None`` :returns: topologically sorted nodes :rtype: list of nodes """ seen = set(start_nodes) for deps, target in dependencies: if deps is None: seen.add(target) order = list(seen) old_dependencies = [] while True: dependencies = [(deps, target) for deps, target in dependencies if target not in seen] if not dependencies: return order if old_dependencies == dependencies: raise ValueError("no topological sort possible") for deps, target in dependencies: # test for sequences try: deps = iter(deps) # atomic object except TypeError: deps = [deps] if all(dep in seen for dep in deps): order.append(target) seen.add(target) # copy dependencies to check progress old_dependencies = list(dependencies)
Fix documentation of top sort.
## Code Before: def topological_sort(start_nodes, dependencies): """ Return a topologically sorted list of :param:`start_nodes` and :param:`dependencies`. Nodes are checked on identity, not equality. Raises a ValueError if no topological sort is possible. :param start_nodes: list of nodes of graph with no incoming edges :param dependencies: list of dependency edges; ([dependencies], target), dependencies may be of any sequence or be atomic :returns: topologically sorted nodes :rtype: list of nodes """ seen = set(start_nodes) for deps, target in dependencies: if deps is None: seen.add(target) order = list(seen) old_dependencies = [] while True: dependencies = [(deps, target) for deps, target in dependencies if target not in seen] if not dependencies: return order if old_dependencies == dependencies: raise ValueError("no topological sort possible") for deps, target in dependencies: # test for sequences try: deps = iter(deps) # atomic object except TypeError: deps = [deps] if all(dep in seen for dep in deps): order.append(target) seen.add(target) # copy dependencies to check progress old_dependencies = list(dependencies) ## Instruction: Fix documentation of top sort. ## Code After: def topological_sort(start_nodes, dependencies): """ Return a topologically sorted list of :param:`start_nodes` and :param:`dependencies`. Nodes are checked on identity, not equality. Raises a ValueError if no topological sort is possible. :param start_nodes: sequence of nodes of graph with no incoming edges :param dependencies: sequence of dependency edges; ([dependencies], target), dependencies may be of any sequence or be atomic, if there is no dependency it must be ``None`` :returns: topologically sorted nodes :rtype: list of nodes """ seen = set(start_nodes) for deps, target in dependencies: if deps is None: seen.add(target) order = list(seen) old_dependencies = [] while True: dependencies = [(deps, target) for deps, target in dependencies if target not in seen] if not dependencies: return order if old_dependencies == dependencies: raise ValueError("no topological sort possible") for deps, target in dependencies: # test for sequences try: deps = iter(deps) # atomic object except TypeError: deps = [deps] if all(dep in seen for dep in deps): order.append(target) seen.add(target) # copy dependencies to check progress old_dependencies = list(dependencies)
... :param start_nodes: sequence of nodes of graph with no incoming edges :param dependencies: sequence of dependency edges; ([dependencies], target), dependencies may be of any sequence or be atomic, if there is no dependency it must be ``None`` :returns: topologically sorted nodes ...
9cb485e97873eff66ba283f30765bb9c66a3c864
djangae/core/management/__init__.py
djangae/core/management/__init__.py
import argparse import djangae.sandbox as sandbox def execute_from_command_line(argv=None): """Wraps Django's `execute_from_command_line` to initialize a djangae sandbox before running a management command. Note: The '--sandbox' arg must come first. All other args are forwarded to Django as normal. """ parser = argparse.ArgumentParser(prog='manage.py') parser.add_argument( '--sandbox', default=sandbox.LOCAL, choices=sandbox.SANDBOXES.keys()) parser.add_argument('args', nargs=argparse.REMAINDER) namespace = parser.parse_args(argv[1:]) django_argv = ['manage.py'] + namespace.args with sandbox.activate(namespace.sandbox, add_sdk_to_path=True): import django.core.management as django_management # Now on the path django_management.execute_from_command_line(django_argv)
import sys import argparse import djangae.sandbox as sandbox def execute_from_command_line(argv=None): """Wraps Django's `execute_from_command_line` to initialize a djangae sandbox before running a management command. Note: The '--sandbox' arg must come first. All other args are forwarded to Django as normal. """ argv = argv or sys.argv parser = argparse.ArgumentParser(prog='manage.py') parser.add_argument( '--sandbox', default=sandbox.LOCAL, choices=sandbox.SANDBOXES.keys()) parser.add_argument('args', nargs=argparse.REMAINDER) namespace = parser.parse_args(argv[1:]) django_argv = ['manage.py'] + namespace.args with sandbox.activate(namespace.sandbox, add_sdk_to_path=True): import django.core.management as django_management # Now on the path django_management.execute_from_command_line(django_argv)
Support no-args for djangae.core.management.execute_from_commandline - matches django implementation.
Support no-args for djangae.core.management.execute_from_commandline - matches django implementation.
Python
bsd-3-clause
kirberich/djangae,nealedj/djangae,asendecka/djangae,stucox/djangae,martinogden/djangae,chargrizzle/djangae,stucox/djangae,armirusco/djangae,leekchan/djangae,armirusco/djangae,trik/djangae,jscissr/djangae,grzes/djangae,SiPiggles/djangae,trik/djangae,kirberich/djangae,armirusco/djangae,trik/djangae,asendecka/djangae,stucox/djangae,pablorecio/djangae,wangjun/djangae,pablorecio/djangae,potatolondon/djangae,martinogden/djangae,jscissr/djangae,kirberich/djangae,potatolondon/djangae,martinogden/djangae,leekchan/djangae,grzes/djangae,SiPiggles/djangae,chargrizzle/djangae,jscissr/djangae,nealedj/djangae,chargrizzle/djangae,asendecka/djangae,grzes/djangae,pablorecio/djangae,b-cannon/my_djae,wangjun/djangae,SiPiggles/djangae,leekchan/djangae,nealedj/djangae,wangjun/djangae
+ import sys import argparse import djangae.sandbox as sandbox def execute_from_command_line(argv=None): """Wraps Django's `execute_from_command_line` to initialize a djangae sandbox before running a management command. Note: The '--sandbox' arg must come first. All other args are forwarded to Django as normal. """ + argv = argv or sys.argv parser = argparse.ArgumentParser(prog='manage.py') parser.add_argument( '--sandbox', default=sandbox.LOCAL, choices=sandbox.SANDBOXES.keys()) parser.add_argument('args', nargs=argparse.REMAINDER) namespace = parser.parse_args(argv[1:]) django_argv = ['manage.py'] + namespace.args with sandbox.activate(namespace.sandbox, add_sdk_to_path=True): import django.core.management as django_management # Now on the path django_management.execute_from_command_line(django_argv)
Support no-args for djangae.core.management.execute_from_commandline - matches django implementation.
## Code Before: import argparse import djangae.sandbox as sandbox def execute_from_command_line(argv=None): """Wraps Django's `execute_from_command_line` to initialize a djangae sandbox before running a management command. Note: The '--sandbox' arg must come first. All other args are forwarded to Django as normal. """ parser = argparse.ArgumentParser(prog='manage.py') parser.add_argument( '--sandbox', default=sandbox.LOCAL, choices=sandbox.SANDBOXES.keys()) parser.add_argument('args', nargs=argparse.REMAINDER) namespace = parser.parse_args(argv[1:]) django_argv = ['manage.py'] + namespace.args with sandbox.activate(namespace.sandbox, add_sdk_to_path=True): import django.core.management as django_management # Now on the path django_management.execute_from_command_line(django_argv) ## Instruction: Support no-args for djangae.core.management.execute_from_commandline - matches django implementation. ## Code After: import sys import argparse import djangae.sandbox as sandbox def execute_from_command_line(argv=None): """Wraps Django's `execute_from_command_line` to initialize a djangae sandbox before running a management command. Note: The '--sandbox' arg must come first. All other args are forwarded to Django as normal. """ argv = argv or sys.argv parser = argparse.ArgumentParser(prog='manage.py') parser.add_argument( '--sandbox', default=sandbox.LOCAL, choices=sandbox.SANDBOXES.keys()) parser.add_argument('args', nargs=argparse.REMAINDER) namespace = parser.parse_args(argv[1:]) django_argv = ['manage.py'] + namespace.args with sandbox.activate(namespace.sandbox, add_sdk_to_path=True): import django.core.management as django_management # Now on the path django_management.execute_from_command_line(django_argv)
// ... existing code ... import sys import argparse // ... modified code ... """ argv = argv or sys.argv parser = argparse.ArgumentParser(prog='manage.py') // ... rest of the code ...
c98ac4ca313606c966dc45dbe7861898177f2f04
api/tests/test_delete_bucket_list.py
api/tests/test_delete_bucket_list.py
import json from api.test import BaseTestCase from api.models import BucketList class TestDeleteBucketList(BaseTestCase): def test_delete_bucket_list(self): bucket_list_one = { "description": "Movies i have to watch by the end of the week", "status": "Pending", "title": "Entertainment", "user_id": 1 } self.client.post('/api/v1/bucketlists', headers={ 'Authorization': 'JWT ' + self.token }, data=json.dumps(bucket_list_one), content_type='application/json') count = len(BucketList.query.all()) self.client.delete('/api/v1/bucketlists/1', headers={ 'Authorization': 'JWT ' + self.token },) new_count = len(BucketList.query.all()) self.assertEqual(new_count - count, -1)
import json from api.test import BaseTestCase from api.models import BucketList class TestDeleteBucketList(BaseTestCase): def test_delete_bucket_list(self): bucket_list_one = { "description": "Movies i have to watch by the end of the week", "status": "Pending", "title": "Entertainment", "user_id": 1 } self.client.post('/api/v1/bucketlists', headers={ 'Authorization': 'JWT ' + self.token }, data=json.dumps(bucket_list_one), content_type='application/json') count = len(BucketList.query.all()) self.client.delete('/api/v1/bucketlists/1', headers={ 'Authorization': 'JWT ' + self.token },) new_count = len(BucketList.query.all()) self.assertEqual(new_count - count, -1) response = self.client.get( '/api/v1/bucketlists/1', headers=dict( Authorization='Bearer ' + self.token ) ) self.assertIn("Bucket list not found", str(response.data)) self.assertEqual(response.status_code, 404)
Modify test to test that bucketlist nolonger exists in system
Modify test to test that bucketlist nolonger exists in system
Python
mit
EdwinKato/bucket-list,EdwinKato/bucket-list,EdwinKato/bucket-list,EdwinKato/bucket-list,EdwinKato/bucket-list
import json from api.test import BaseTestCase from api.models import BucketList class TestDeleteBucketList(BaseTestCase): def test_delete_bucket_list(self): bucket_list_one = { "description": "Movies i have to watch by the end of the week", "status": "Pending", "title": "Entertainment", "user_id": 1 } self.client.post('/api/v1/bucketlists', headers={ 'Authorization': 'JWT ' + self.token }, data=json.dumps(bucket_list_one), content_type='application/json') count = len(BucketList.query.all()) self.client.delete('/api/v1/bucketlists/1', headers={ 'Authorization': 'JWT ' + self.token },) new_count = len(BucketList.query.all()) self.assertEqual(new_count - count, -1) + response = self.client.get( + '/api/v1/bucketlists/1', + headers=dict( + Authorization='Bearer ' + self.token + ) + ) + + self.assertIn("Bucket list not found", str(response.data)) + self.assertEqual(response.status_code, 404) +
Modify test to test that bucketlist nolonger exists in system
## Code Before: import json from api.test import BaseTestCase from api.models import BucketList class TestDeleteBucketList(BaseTestCase): def test_delete_bucket_list(self): bucket_list_one = { "description": "Movies i have to watch by the end of the week", "status": "Pending", "title": "Entertainment", "user_id": 1 } self.client.post('/api/v1/bucketlists', headers={ 'Authorization': 'JWT ' + self.token }, data=json.dumps(bucket_list_one), content_type='application/json') count = len(BucketList.query.all()) self.client.delete('/api/v1/bucketlists/1', headers={ 'Authorization': 'JWT ' + self.token },) new_count = len(BucketList.query.all()) self.assertEqual(new_count - count, -1) ## Instruction: Modify test to test that bucketlist nolonger exists in system ## Code After: import json from api.test import BaseTestCase from api.models import BucketList class TestDeleteBucketList(BaseTestCase): def test_delete_bucket_list(self): bucket_list_one = { "description": "Movies i have to watch by the end of the week", "status": "Pending", "title": "Entertainment", "user_id": 1 } self.client.post('/api/v1/bucketlists', headers={ 'Authorization': 'JWT ' + self.token }, data=json.dumps(bucket_list_one), content_type='application/json') count = len(BucketList.query.all()) self.client.delete('/api/v1/bucketlists/1', headers={ 'Authorization': 'JWT ' + self.token },) new_count = len(BucketList.query.all()) self.assertEqual(new_count - count, -1) response = self.client.get( '/api/v1/bucketlists/1', headers=dict( Authorization='Bearer ' + self.token ) ) self.assertIn("Bucket list not found", str(response.data)) self.assertEqual(response.status_code, 404)
// ... existing code ... self.assertEqual(new_count - count, -1) response = self.client.get( '/api/v1/bucketlists/1', headers=dict( Authorization='Bearer ' + self.token ) ) self.assertIn("Bucket list not found", str(response.data)) self.assertEqual(response.status_code, 404) // ... rest of the code ...
66aa43a5e8963c440261128e5b317679d01917e6
server/routes.py
server/routes.py
from __init__ import app, db from subprocess import call from models import User from flask import request from flask import abort from flask import jsonify @app.route('/register', methods=['POST']) def register(): if not request.json or not 'guid' in request.json: abort(400) # Malformed Packet guid = request.json['guid'] user = User(guid) db.session.add(user) db.session.commit() registerObject = { 'id': user.guid } return jsonify(registerObject), 201 @app.route('/phone', methods=['POST']) def phone(): if not request.json or (not ('call-time' in request.json)) or (not ('id' in request.json)): abort(400) # Malformed Packet user = User.query.filter_by(id=request.json["id"]).first() if not user: #Check database for id to make sure it exists abort(401) # Todo Steve will do this return "", 201 @app.route('/') def landing_page(): return 'Nothing seems to be here' @app.route('/update-server', methods=['GET', 'POST']) def update(): call(["git pull"], shell=True) return 'Success!'
from __init__ import app, db from subprocess import call from models import User from flask import request from flask import abort from flask import jsonify @app.route('/register', methods=['POST']) def register(): if not request.json or not 'guid' in request.json: abort(400) # Malformed Packet guid = request.json['guid'] user = User(guid) db.session.add(user) db.session.commit() registerObject = { 'id': user.guid } return jsonify(registerObject), 201 @app.route('/phone', methods=['POST']) def phone(): if not request.json or (not ('call-time' in request.json)) or (not ('id' in request.json)): abort(400) # Malformed Packet user = User.query.filter_by(id=request.json["id"]).first() if not user: #Check database for id to make sure it exists abort(401) # Todo Steve will do this return "", 200 @app.route('/msg_to', methods=['POST']) def msg_to(): if not request.json or (not ('phone_number' in request.json)) or (not ('id' in request.json)): abort(400) # Malformed Packet user = User.query.filter_by(id=request.json["id"]).first() if not user: #Check database for id to make sure it exists abort(401) # Waiting on Steve's commit return "", 200 @app.route('/') def landing_page(): return 'Nothing seems to be here' @app.route('/update-server', methods=['GET', 'POST']) def update(): call(["git pull"], shell=True) return 'Success!'
Return header fix and msg_to route
Return header fix and msg_to route
Python
mit
stevex86/RandomActsOfKindness,stevex86/RandomActsOfKindness
from __init__ import app, db from subprocess import call from models import User from flask import request from flask import abort from flask import jsonify @app.route('/register', methods=['POST']) def register(): if not request.json or not 'guid' in request.json: abort(400) # Malformed Packet guid = request.json['guid'] user = User(guid) db.session.add(user) db.session.commit() registerObject = { 'id': user.guid } return jsonify(registerObject), 201 @app.route('/phone', methods=['POST']) def phone(): if not request.json or (not ('call-time' in request.json)) or (not ('id' in request.json)): abort(400) # Malformed Packet user = User.query.filter_by(id=request.json["id"]).first() if not user: #Check database for id to make sure it exists abort(401) # Todo Steve will do this - return "", 201 + return "", 200 + + @app.route('/msg_to', methods=['POST']) + def msg_to(): + if not request.json or (not ('phone_number' in request.json)) or (not ('id' in request.json)): + abort(400) # Malformed Packet + + user = User.query.filter_by(id=request.json["id"]).first() + + if not user: #Check database for id to make sure it exists + abort(401) + + # Waiting on Steve's commit + + return "", 200 @app.route('/') def landing_page(): return 'Nothing seems to be here' @app.route('/update-server', methods=['GET', 'POST']) def update(): call(["git pull"], shell=True) return 'Success!'
Return header fix and msg_to route
## Code Before: from __init__ import app, db from subprocess import call from models import User from flask import request from flask import abort from flask import jsonify @app.route('/register', methods=['POST']) def register(): if not request.json or not 'guid' in request.json: abort(400) # Malformed Packet guid = request.json['guid'] user = User(guid) db.session.add(user) db.session.commit() registerObject = { 'id': user.guid } return jsonify(registerObject), 201 @app.route('/phone', methods=['POST']) def phone(): if not request.json or (not ('call-time' in request.json)) or (not ('id' in request.json)): abort(400) # Malformed Packet user = User.query.filter_by(id=request.json["id"]).first() if not user: #Check database for id to make sure it exists abort(401) # Todo Steve will do this return "", 201 @app.route('/') def landing_page(): return 'Nothing seems to be here' @app.route('/update-server', methods=['GET', 'POST']) def update(): call(["git pull"], shell=True) return 'Success!' ## Instruction: Return header fix and msg_to route ## Code After: from __init__ import app, db from subprocess import call from models import User from flask import request from flask import abort from flask import jsonify @app.route('/register', methods=['POST']) def register(): if not request.json or not 'guid' in request.json: abort(400) # Malformed Packet guid = request.json['guid'] user = User(guid) db.session.add(user) db.session.commit() registerObject = { 'id': user.guid } return jsonify(registerObject), 201 @app.route('/phone', methods=['POST']) def phone(): if not request.json or (not ('call-time' in request.json)) or (not ('id' in request.json)): abort(400) # Malformed Packet user = User.query.filter_by(id=request.json["id"]).first() if not user: #Check database for id to make sure it exists abort(401) # Todo Steve will do this return "", 200 @app.route('/msg_to', methods=['POST']) def msg_to(): if not request.json or (not ('phone_number' in request.json)) or (not ('id' in request.json)): abort(400) # Malformed Packet user = User.query.filter_by(id=request.json["id"]).first() if not user: #Check database for id to make sure it exists abort(401) # Waiting on Steve's commit return "", 200 @app.route('/') def landing_page(): return 'Nothing seems to be here' @app.route('/update-server', methods=['GET', 'POST']) def update(): call(["git pull"], shell=True) return 'Success!'
// ... existing code ... return "", 200 @app.route('/msg_to', methods=['POST']) def msg_to(): if not request.json or (not ('phone_number' in request.json)) or (not ('id' in request.json)): abort(400) # Malformed Packet user = User.query.filter_by(id=request.json["id"]).first() if not user: #Check database for id to make sure it exists abort(401) # Waiting on Steve's commit return "", 200 // ... rest of the code ...
e881465050ef9edbf2b47071b1fa2fc27ac26c1a
tests/Settings/TestExtruderStack.py
tests/Settings/TestExtruderStack.py
import pytest #This module contains automated tests. import unittest.mock #For the mocking and monkeypatching functionality. import cura.Settings.ExtruderStack #The module we're testing. from cura.Settings.Exceptions import InvalidOperationError #To check whether the correct exceptions are raised. ## An empty extruder stack to test with. @pytest.fixture() def extruder_stack() -> cura.Settings.ExtruderStack.ExtruderStack: return cura.Settings.ExtruderStack.ExtruderStack ## Tests whether adding a container is properly forbidden. def test_addContainer(extruder_stack): with pytest.raises(InvalidOperationError): extruder_stack.addContainer(unittest.mock.MagicMock()) ## Tests whether inserting a container is properly forbidden. def test_insertContainer(extruder_stack): with pytest.raises(InvalidOperationError): extruder_stack.insertContainer(0, unittest.mock.MagicMock()) ## Tests whether removing a container is properly forbidden. def test_removeContainer(extruder_stack): with pytest.raises(InvalidOperationError): extruder_stack.removeContainer(unittest.mock.MagicMock())
import pytest #This module contains automated tests. import unittest.mock #For the mocking and monkeypatching functionality. import cura.Settings.ExtruderStack #The module we're testing. from cura.Settings.Exceptions import InvalidOperationError #To check whether the correct exceptions are raised. ## An empty extruder stack to test with. @pytest.fixture() def extruder_stack() -> cura.Settings.ExtruderStack.ExtruderStack: return cura.Settings.ExtruderStack.ExtruderStack #############################START OF TEST CASES################################ ## Tests whether adding a container is properly forbidden. def test_addContainer(extruder_stack): with pytest.raises(InvalidOperationError): extruder_stack.addContainer(unittest.mock.MagicMock()) ## Tests whether inserting a container is properly forbidden. def test_insertContainer(extruder_stack): with pytest.raises(InvalidOperationError): extruder_stack.insertContainer(0, unittest.mock.MagicMock()) ## Tests whether removing a container is properly forbidden. def test_removeContainer(extruder_stack): with pytest.raises(InvalidOperationError): extruder_stack.removeContainer(unittest.mock.MagicMock())
Add delimiter between global stuff and test cases
Add delimiter between global stuff and test cases Helps provide some oversight since this module is about to explode in size. Contributes to issue CURA-3497.
Python
agpl-3.0
hmflash/Cura,ynotstartups/Wanhao,Curahelper/Cura,ynotstartups/Wanhao,hmflash/Cura,fieldOfView/Cura,fieldOfView/Cura,Curahelper/Cura
import pytest #This module contains automated tests. import unittest.mock #For the mocking and monkeypatching functionality. import cura.Settings.ExtruderStack #The module we're testing. from cura.Settings.Exceptions import InvalidOperationError #To check whether the correct exceptions are raised. ## An empty extruder stack to test with. @pytest.fixture() def extruder_stack() -> cura.Settings.ExtruderStack.ExtruderStack: return cura.Settings.ExtruderStack.ExtruderStack + + #############################START OF TEST CASES################################ ## Tests whether adding a container is properly forbidden. def test_addContainer(extruder_stack): with pytest.raises(InvalidOperationError): extruder_stack.addContainer(unittest.mock.MagicMock()) ## Tests whether inserting a container is properly forbidden. def test_insertContainer(extruder_stack): with pytest.raises(InvalidOperationError): extruder_stack.insertContainer(0, unittest.mock.MagicMock()) ## Tests whether removing a container is properly forbidden. def test_removeContainer(extruder_stack): with pytest.raises(InvalidOperationError): extruder_stack.removeContainer(unittest.mock.MagicMock())
Add delimiter between global stuff and test cases
## Code Before: import pytest #This module contains automated tests. import unittest.mock #For the mocking and monkeypatching functionality. import cura.Settings.ExtruderStack #The module we're testing. from cura.Settings.Exceptions import InvalidOperationError #To check whether the correct exceptions are raised. ## An empty extruder stack to test with. @pytest.fixture() def extruder_stack() -> cura.Settings.ExtruderStack.ExtruderStack: return cura.Settings.ExtruderStack.ExtruderStack ## Tests whether adding a container is properly forbidden. def test_addContainer(extruder_stack): with pytest.raises(InvalidOperationError): extruder_stack.addContainer(unittest.mock.MagicMock()) ## Tests whether inserting a container is properly forbidden. def test_insertContainer(extruder_stack): with pytest.raises(InvalidOperationError): extruder_stack.insertContainer(0, unittest.mock.MagicMock()) ## Tests whether removing a container is properly forbidden. def test_removeContainer(extruder_stack): with pytest.raises(InvalidOperationError): extruder_stack.removeContainer(unittest.mock.MagicMock()) ## Instruction: Add delimiter between global stuff and test cases ## Code After: import pytest #This module contains automated tests. import unittest.mock #For the mocking and monkeypatching functionality. import cura.Settings.ExtruderStack #The module we're testing. from cura.Settings.Exceptions import InvalidOperationError #To check whether the correct exceptions are raised. ## An empty extruder stack to test with. @pytest.fixture() def extruder_stack() -> cura.Settings.ExtruderStack.ExtruderStack: return cura.Settings.ExtruderStack.ExtruderStack #############################START OF TEST CASES################################ ## Tests whether adding a container is properly forbidden. def test_addContainer(extruder_stack): with pytest.raises(InvalidOperationError): extruder_stack.addContainer(unittest.mock.MagicMock()) ## Tests whether inserting a container is properly forbidden. def test_insertContainer(extruder_stack): with pytest.raises(InvalidOperationError): extruder_stack.insertContainer(0, unittest.mock.MagicMock()) ## Tests whether removing a container is properly forbidden. def test_removeContainer(extruder_stack): with pytest.raises(InvalidOperationError): extruder_stack.removeContainer(unittest.mock.MagicMock())
... return cura.Settings.ExtruderStack.ExtruderStack #############################START OF TEST CASES################################ ...
8d5b0682c3262fa210c3ed5e50c91259f1f2550c
myhome/blog/models.py
myhome/blog/models.py
from django.db import models class BlogPostTag(models.Model): name = models.CharField(max_length=255) def __str__(self): return self.name class BlogPost(models.Model): datetime = models.DateTimeField() title = models.CharField(max_length=255) content = models.TextField() live = models.BooleanField(default=False) blog_post_tags = models.ManyToManyField(BlogPostTag, blank=True) class Meta: ordering = ['-datetime'] def __str__(self): return '%s (%s)' % (self.title, self.datetime) def __repr__(self): return '<BlogPost id=%d, datetime=%s, title=%s>' % (self.id, self.datetime, self.title) def prev_post(self): prev_datetime = BlogPost.objects.filter(live=True, datetime__lt=self.datetime).aggregate(models.Max('datetime'))['datetime__max'] try: return BlogPost.objects.filter(datetime=prev_datetime)[0] except IndexError: return None def next_post(self): next_datetime = BlogPost.objects.filter(live=True, datetime__gt=self.datetime).aggregate(models.Min('datetime'))['datetime__min'] try: return BlogPost.objects.filter(datetime=next_datetime)[0] except IndexError: return None
from django.db import models class BlogPostTag(models.Model): name = models.CharField(max_length=255) class Meta: ordering = ['name'] def __str__(self): return self.name class BlogPost(models.Model): datetime = models.DateTimeField() title = models.CharField(max_length=255) content = models.TextField() live = models.BooleanField(default=False) blog_post_tags = models.ManyToManyField(BlogPostTag, blank=True) class Meta: ordering = ['-datetime'] def __str__(self): return '%s (%s)' % (self.title, self.datetime) def __repr__(self): return '<BlogPost id=%d, datetime=%s, title=%s>' % (self.id, self.datetime, self.title) def prev_post(self): prev_datetime = BlogPost.objects.filter(live=True, datetime__lt=self.datetime).aggregate(models.Max('datetime'))['datetime__max'] try: return BlogPost.objects.filter(datetime=prev_datetime)[0] except IndexError: return None def next_post(self): next_datetime = BlogPost.objects.filter(live=True, datetime__gt=self.datetime).aggregate(models.Min('datetime'))['datetime__min'] try: return BlogPost.objects.filter(datetime=next_datetime)[0] except IndexError: return None
Set default ordering for blog post tags
Set default ordering for blog post tags
Python
mit
plumdog/myhome,plumdog/myhome,plumdog/myhome,plumdog/myhome
from django.db import models class BlogPostTag(models.Model): name = models.CharField(max_length=255) + + class Meta: + ordering = ['name'] def __str__(self): return self.name class BlogPost(models.Model): datetime = models.DateTimeField() title = models.CharField(max_length=255) content = models.TextField() live = models.BooleanField(default=False) blog_post_tags = models.ManyToManyField(BlogPostTag, blank=True) class Meta: ordering = ['-datetime'] def __str__(self): return '%s (%s)' % (self.title, self.datetime) def __repr__(self): return '<BlogPost id=%d, datetime=%s, title=%s>' % (self.id, self.datetime, self.title) def prev_post(self): prev_datetime = BlogPost.objects.filter(live=True, datetime__lt=self.datetime).aggregate(models.Max('datetime'))['datetime__max'] try: return BlogPost.objects.filter(datetime=prev_datetime)[0] except IndexError: return None def next_post(self): next_datetime = BlogPost.objects.filter(live=True, datetime__gt=self.datetime).aggregate(models.Min('datetime'))['datetime__min'] try: return BlogPost.objects.filter(datetime=next_datetime)[0] except IndexError: return None
Set default ordering for blog post tags
## Code Before: from django.db import models class BlogPostTag(models.Model): name = models.CharField(max_length=255) def __str__(self): return self.name class BlogPost(models.Model): datetime = models.DateTimeField() title = models.CharField(max_length=255) content = models.TextField() live = models.BooleanField(default=False) blog_post_tags = models.ManyToManyField(BlogPostTag, blank=True) class Meta: ordering = ['-datetime'] def __str__(self): return '%s (%s)' % (self.title, self.datetime) def __repr__(self): return '<BlogPost id=%d, datetime=%s, title=%s>' % (self.id, self.datetime, self.title) def prev_post(self): prev_datetime = BlogPost.objects.filter(live=True, datetime__lt=self.datetime).aggregate(models.Max('datetime'))['datetime__max'] try: return BlogPost.objects.filter(datetime=prev_datetime)[0] except IndexError: return None def next_post(self): next_datetime = BlogPost.objects.filter(live=True, datetime__gt=self.datetime).aggregate(models.Min('datetime'))['datetime__min'] try: return BlogPost.objects.filter(datetime=next_datetime)[0] except IndexError: return None ## Instruction: Set default ordering for blog post tags ## Code After: from django.db import models class BlogPostTag(models.Model): name = models.CharField(max_length=255) class Meta: ordering = ['name'] def __str__(self): return self.name class BlogPost(models.Model): datetime = models.DateTimeField() title = models.CharField(max_length=255) content = models.TextField() live = models.BooleanField(default=False) blog_post_tags = models.ManyToManyField(BlogPostTag, blank=True) class Meta: ordering = ['-datetime'] def __str__(self): return '%s (%s)' % (self.title, self.datetime) def __repr__(self): return '<BlogPost id=%d, datetime=%s, title=%s>' % (self.id, self.datetime, self.title) def prev_post(self): prev_datetime = BlogPost.objects.filter(live=True, datetime__lt=self.datetime).aggregate(models.Max('datetime'))['datetime__max'] try: return BlogPost.objects.filter(datetime=prev_datetime)[0] except IndexError: return None def next_post(self): next_datetime = BlogPost.objects.filter(live=True, datetime__gt=self.datetime).aggregate(models.Min('datetime'))['datetime__min'] try: return BlogPost.objects.filter(datetime=next_datetime)[0] except IndexError: return None
// ... existing code ... name = models.CharField(max_length=255) class Meta: ordering = ['name'] // ... rest of the code ...
0b1f38b8354a0ad6a021f247a7bc1336ae5d50fb
arcade/__init__.py
arcade/__init__.py
import arcade.key import arcade.color from .version import * from .window_commands import * from .draw_commands import * from .sprite import * from .physics_engines import * from .physics_engine_2d import * from .application import * from .sound import * from .shape_objects import *
import arcade.key import arcade.color from arcade.version import * from arcade.window_commands import * from arcade.draw_commands import * from arcade.sprite import * from arcade.physics_engines import * from arcade.physics_engine_2d import * from arcade.application import * from arcade.sound import * from arcade.shape_objects import *
Change some of the relative imports, which fail in doctests, to absolute imports.
Change some of the relative imports, which fail in doctests, to absolute imports.
Python
mit
mikemhenry/arcade,mikemhenry/arcade
import arcade.key import arcade.color - from .version import * + from arcade.version import * - from .window_commands import * + from arcade.window_commands import * - from .draw_commands import * + from arcade.draw_commands import * - from .sprite import * + from arcade.sprite import * - from .physics_engines import * + from arcade.physics_engines import * - from .physics_engine_2d import * + from arcade.physics_engine_2d import * - from .application import * + from arcade.application import * - from .sound import * + from arcade.sound import * - from .shape_objects import * + from arcade.shape_objects import *
Change some of the relative imports, which fail in doctests, to absolute imports.
## Code Before: import arcade.key import arcade.color from .version import * from .window_commands import * from .draw_commands import * from .sprite import * from .physics_engines import * from .physics_engine_2d import * from .application import * from .sound import * from .shape_objects import * ## Instruction: Change some of the relative imports, which fail in doctests, to absolute imports. ## Code After: import arcade.key import arcade.color from arcade.version import * from arcade.window_commands import * from arcade.draw_commands import * from arcade.sprite import * from arcade.physics_engines import * from arcade.physics_engine_2d import * from arcade.application import * from arcade.sound import * from arcade.shape_objects import *
... from arcade.version import * from arcade.window_commands import * from arcade.draw_commands import * from arcade.sprite import * from arcade.physics_engines import * from arcade.physics_engine_2d import * from arcade.application import * from arcade.sound import * from arcade.shape_objects import * ...
4409823a5611d0f426ca09541d7e9dc982bc8c9f
asyncqlio/utils.py
asyncqlio/utils.py
import collections.abc class IterToAiter(collections.abc.Iterator, collections.abc.AsyncIterator): """ Transforms an `__iter__` method into an `__aiter__` method. """ def __init__(self, iterator: collections.abc.Iterator): self._it = iterator # magic methods def __iter__(self): return self def __next__(self): return self._it.__next__() def __aiter__(self): return self async def __anext__(self): return self.__next__() def iter_to_aiter(type_): """ Transforms a normal iterable type into an async iterable type. """ def __aiter__(self): return IterToAiter(iter(self)) type_.__aiter__ = __aiter__ return type_
import collections.abc class IterToAiter(collections.abc.Iterator, collections.abc.AsyncIterator): """ Transforms an `__iter__` method into an `__aiter__` method. """ def __init__(self, iterator: collections.abc.Iterator): self._it = iterator # magic methods def __iter__(self): return self def __next__(self): return self._it.__next__() def __aiter__(self): return self async def __anext__(self): try: return self.__next__() except StopIteration: raise StopAsyncIteration def iter_to_aiter(type_): """ Transforms a normal iterable type into an async iterable type. """ def __aiter__(self): return IterToAiter(iter(self)) type_.__aiter__ = __aiter__ return type_
Raise StopAsyncIteration instead of StopAsyncIteration in aiter wrapper.
Raise StopAsyncIteration instead of StopAsyncIteration in aiter wrapper.
Python
mit
SunDwarf/asyncqlio
import collections.abc class IterToAiter(collections.abc.Iterator, collections.abc.AsyncIterator): """ Transforms an `__iter__` method into an `__aiter__` method. """ def __init__(self, iterator: collections.abc.Iterator): self._it = iterator # magic methods def __iter__(self): return self def __next__(self): return self._it.__next__() def __aiter__(self): return self async def __anext__(self): + try: - return self.__next__() + return self.__next__() + except StopIteration: + raise StopAsyncIteration def iter_to_aiter(type_): """ Transforms a normal iterable type into an async iterable type. """ def __aiter__(self): return IterToAiter(iter(self)) type_.__aiter__ = __aiter__ return type_
Raise StopAsyncIteration instead of StopAsyncIteration in aiter wrapper.
## Code Before: import collections.abc class IterToAiter(collections.abc.Iterator, collections.abc.AsyncIterator): """ Transforms an `__iter__` method into an `__aiter__` method. """ def __init__(self, iterator: collections.abc.Iterator): self._it = iterator # magic methods def __iter__(self): return self def __next__(self): return self._it.__next__() def __aiter__(self): return self async def __anext__(self): return self.__next__() def iter_to_aiter(type_): """ Transforms a normal iterable type into an async iterable type. """ def __aiter__(self): return IterToAiter(iter(self)) type_.__aiter__ = __aiter__ return type_ ## Instruction: Raise StopAsyncIteration instead of StopAsyncIteration in aiter wrapper. ## Code After: import collections.abc class IterToAiter(collections.abc.Iterator, collections.abc.AsyncIterator): """ Transforms an `__iter__` method into an `__aiter__` method. """ def __init__(self, iterator: collections.abc.Iterator): self._it = iterator # magic methods def __iter__(self): return self def __next__(self): return self._it.__next__() def __aiter__(self): return self async def __anext__(self): try: return self.__next__() except StopIteration: raise StopAsyncIteration def iter_to_aiter(type_): """ Transforms a normal iterable type into an async iterable type. """ def __aiter__(self): return IterToAiter(iter(self)) type_.__aiter__ = __aiter__ return type_
# ... existing code ... async def __anext__(self): try: return self.__next__() except StopIteration: raise StopAsyncIteration # ... rest of the code ...
39ea336297b0479abb29a70f831b2a02a01fcc18
portas/portas/utils.py
portas/portas/utils.py
import contextlib import functools import logging import sys import webob.exc LOG = logging.getLogger(__name__) def http_success_code(code): """Attaches response code to a method. This decorator associates a response code with a method. Note that the function attributes are directly manipulated; the method is not wrapped. """ def decorator(func): func.wsgi_code = code return func return decorator def verify_tenant(func): @functools.wraps(func) def __inner(self, req, tenant_id, *args, **kwargs): if hasattr(req, 'context') and tenant_id != req.context.tenant_id: LOG.info('User is not authorized to access this tenant.') raise webob.exc.HTTPUnauthorized return func(self, req, tenant_id, *args, **kwargs) return __inner def require_admin(func): @functools.wraps(func) def __inner(self, req, *args, **kwargs): if hasattr(req, 'context') and not req.context.is_admin: LOG.info('User has no admin priviledges.') raise webob.exc.HTTPUnauthorized return func(self, req, *args, **kwargs) return __inner @contextlib.contextmanager def save_and_reraise_exception(): """Save current exception, run some code and then re-raise. In some cases the exception context can be cleared, resulting in None being attempted to be reraised after an exception handler is run. This can happen when eventlet switches greenthreads or when running an exception handler, code raises and catches an exception. In both cases the exception context will be cleared. To work around this, we save the exception state, run handler code, and then re-raise the original exception. If another exception occurs, the saved exception is logged and the new exception is reraised. """ type_, value, traceback = sys.exc_info() try: yield except Exception: LOG.error('Original exception being dropped', exc_info=(type_, value, traceback)) raise raise type_, value, traceback
import logging LOG = logging.getLogger(__name__) # def verify_tenant(func): # @functools.wraps(func) # def __inner(self, req, tenant_id, *args, **kwargs): # if hasattr(req, 'context') and tenant_id != req.context.tenant: # LOG.info('User is not authorized to access this tenant.') # raise webob.exc.HTTPUnauthorized # return func(self, req, tenant_id, *args, **kwargs) # return __inner # # # def require_admin(func): # @functools.wraps(func) # def __inner(self, req, *args, **kwargs): # if hasattr(req, 'context') and not req.context.is_admin: # LOG.info('User has no admin priviledges.') # raise webob.exc.HTTPUnauthorized # return func(self, req, *args, **kwargs) # return __inner
Remove unnecessary blocks of code
Remove unnecessary blocks of code
Python
apache-2.0
Bloomie/murano-agent,openstack/python-muranoclient,ativelkov/murano-api,sajuptpm/murano,NeCTAR-RC/murano,satish-avninetworks/murano,satish-avninetworks/murano,openstack/murano,openstack/murano-agent,openstack/murano-agent,telefonicaid/murano,satish-avninetworks/murano,DavidPurcell/murano_temp,openstack/murano-agent,sergmelikyan/murano,chenyujie/hybrid-murano,olivierlemasle/murano,olivierlemasle/murano,ativelkov/murano-api,sajuptpm/murano,Bloomie/murano-agent,satish-avninetworks/murano,openstack/murano,telefonicaid/murano-agent,olivierlemasle/murano,chenyujie/hybrid-murano,telefonicaid/murano-agent,Bloomie/murano-agent,openstack/python-muranoclient,telefonicaid/murano-agent,DavidPurcell/murano_temp,openstack/murano-agent,DavidPurcell/murano_temp,NeCTAR-RC/murano,NeCTAR-RC/murano,telefonicaid/murano,olivierlemasle/murano,Bloomie/murano-agent,DavidPurcell/murano_temp,sergmelikyan/murano,NeCTAR-RC/murano
- import contextlib - import functools import logging - import sys - - import webob.exc LOG = logging.getLogger(__name__) - def http_success_code(code): - """Attaches response code to a method. - - This decorator associates a response code with a method. Note - that the function attributes are directly manipulated; the method - is not wrapped. - """ - - def decorator(func): - func.wsgi_code = code - return func - return decorator + # def verify_tenant(func): + # @functools.wraps(func) + # def __inner(self, req, tenant_id, *args, **kwargs): + # if hasattr(req, 'context') and tenant_id != req.context.tenant: + # LOG.info('User is not authorized to access this tenant.') + # raise webob.exc.HTTPUnauthorized + # return func(self, req, tenant_id, *args, **kwargs) + # return __inner + # + # + # def require_admin(func): + # @functools.wraps(func) + # def __inner(self, req, *args, **kwargs): + # if hasattr(req, 'context') and not req.context.is_admin: + # LOG.info('User has no admin priviledges.') + # raise webob.exc.HTTPUnauthorized + # return func(self, req, *args, **kwargs) + # return __inner - def verify_tenant(func): - @functools.wraps(func) - def __inner(self, req, tenant_id, *args, **kwargs): - if hasattr(req, 'context') and tenant_id != req.context.tenant_id: - LOG.info('User is not authorized to access this tenant.') - raise webob.exc.HTTPUnauthorized - return func(self, req, tenant_id, *args, **kwargs) - return __inner - - - def require_admin(func): - @functools.wraps(func) - def __inner(self, req, *args, **kwargs): - if hasattr(req, 'context') and not req.context.is_admin: - LOG.info('User has no admin priviledges.') - raise webob.exc.HTTPUnauthorized - return func(self, req, *args, **kwargs) - return __inner - - - @contextlib.contextmanager - def save_and_reraise_exception(): - """Save current exception, run some code and then re-raise. - - In some cases the exception context can be cleared, resulting in None - being attempted to be reraised after an exception handler is run. This - can happen when eventlet switches greenthreads or when running an - exception handler, code raises and catches an exception. In both - cases the exception context will be cleared. - - To work around this, we save the exception state, run handler code, and - then re-raise the original exception. If another exception occurs, the - saved exception is logged and the new exception is reraised. - """ - type_, value, traceback = sys.exc_info() - try: - yield - except Exception: - LOG.error('Original exception being dropped', - exc_info=(type_, value, traceback)) - raise - raise type_, value, traceback -
Remove unnecessary blocks of code
## Code Before: import contextlib import functools import logging import sys import webob.exc LOG = logging.getLogger(__name__) def http_success_code(code): """Attaches response code to a method. This decorator associates a response code with a method. Note that the function attributes are directly manipulated; the method is not wrapped. """ def decorator(func): func.wsgi_code = code return func return decorator def verify_tenant(func): @functools.wraps(func) def __inner(self, req, tenant_id, *args, **kwargs): if hasattr(req, 'context') and tenant_id != req.context.tenant_id: LOG.info('User is not authorized to access this tenant.') raise webob.exc.HTTPUnauthorized return func(self, req, tenant_id, *args, **kwargs) return __inner def require_admin(func): @functools.wraps(func) def __inner(self, req, *args, **kwargs): if hasattr(req, 'context') and not req.context.is_admin: LOG.info('User has no admin priviledges.') raise webob.exc.HTTPUnauthorized return func(self, req, *args, **kwargs) return __inner @contextlib.contextmanager def save_and_reraise_exception(): """Save current exception, run some code and then re-raise. In some cases the exception context can be cleared, resulting in None being attempted to be reraised after an exception handler is run. This can happen when eventlet switches greenthreads or when running an exception handler, code raises and catches an exception. In both cases the exception context will be cleared. To work around this, we save the exception state, run handler code, and then re-raise the original exception. If another exception occurs, the saved exception is logged and the new exception is reraised. """ type_, value, traceback = sys.exc_info() try: yield except Exception: LOG.error('Original exception being dropped', exc_info=(type_, value, traceback)) raise raise type_, value, traceback ## Instruction: Remove unnecessary blocks of code ## Code After: import logging LOG = logging.getLogger(__name__) # def verify_tenant(func): # @functools.wraps(func) # def __inner(self, req, tenant_id, *args, **kwargs): # if hasattr(req, 'context') and tenant_id != req.context.tenant: # LOG.info('User is not authorized to access this tenant.') # raise webob.exc.HTTPUnauthorized # return func(self, req, tenant_id, *args, **kwargs) # return __inner # # # def require_admin(func): # @functools.wraps(func) # def __inner(self, req, *args, **kwargs): # if hasattr(req, 'context') and not req.context.is_admin: # LOG.info('User has no admin priviledges.') # raise webob.exc.HTTPUnauthorized # return func(self, req, *args, **kwargs) # return __inner
... import logging ... # def verify_tenant(func): # @functools.wraps(func) # def __inner(self, req, tenant_id, *args, **kwargs): # if hasattr(req, 'context') and tenant_id != req.context.tenant: # LOG.info('User is not authorized to access this tenant.') # raise webob.exc.HTTPUnauthorized # return func(self, req, tenant_id, *args, **kwargs) # return __inner # # # def require_admin(func): # @functools.wraps(func) # def __inner(self, req, *args, **kwargs): # if hasattr(req, 'context') and not req.context.is_admin: # LOG.info('User has no admin priviledges.') # raise webob.exc.HTTPUnauthorized # return func(self, req, *args, **kwargs) # return __inner ...
c5bcd270e8422ba23bcb29dd4a00ce4fa9e7d437
anki.py
anki.py
import os import re import yaml import lib.genanki.genanki as genanki class Anki: def generate_id(): """Generate a 32-bit ID useful for Anki.""" return random.randrange(1 << 30, 1 << 31) # return datetime.now().timestamp() def import_card_definitions(self, yaml_filepath): """Import card definitions from YAML file.""" path = os.path.dirname(yaml_filepath) + '/' with open(yaml_filepath, 'r') as f: cards = f.read() cards = yaml.load(cards) for subject, model in cards.items(): for template in model['templates']: for fmt in ('qfmt', 'afmt'): with open(path + template[fmt], 'r') as f: lines = f.readlines() temp = '' for line in lines: match = re.match('\s*{{import:(.*)}}', line) if match: with open(path + match.group(1), 'r') as f: line = f.read() temp += line template[fmt] = temp return cards
import os import re import yaml import lib.genanki.genanki as genanki class Anki: def generate_id(): """Generate a 32-bit ID useful for Anki.""" return random.randrange(1 << 30, 1 << 31) # return datetime.now().timestamp() def import_card_definitions(self, yaml_filepath): """Import card definitions from YAML file. Adds a Anki-like {{import:file.txt}} file import command which works similar to the #include preprocessor command in C-like languages, directly replacing the command with text from the import file. """ path = os.path.dirname(yaml_filepath) + '/' with open(yaml_filepath, 'r') as f: cards = f.read() cards = yaml.load(cards) for subject, model in cards.items(): for template in model['templates']: for fmt in ('qfmt', 'afmt'): with open(path + template[fmt], 'r') as f: lines = f.readlines() temp = '' for line in lines: match = re.match('\s*{{import:(.*)}}', line) if match: with open(path + match.group(1), 'r') as f: line = f.read() temp += line template[fmt] = temp return cards
Document new {{import:file.txt}} command for Anki card definitions.
Document new {{import:file.txt}} command for Anki card definitions.
Python
mpl-2.0
holocronweaver/wanikani2anki,holocronweaver/wanikani2anki,holocronweaver/wanikani2anki
import os import re import yaml import lib.genanki.genanki as genanki class Anki: def generate_id(): """Generate a 32-bit ID useful for Anki.""" return random.randrange(1 << 30, 1 << 31) # return datetime.now().timestamp() def import_card_definitions(self, yaml_filepath): - """Import card definitions from YAML file.""" + """Import card definitions from YAML file. + + Adds a Anki-like {{import:file.txt}} file import command which + works similar to the #include preprocessor command in C-like + languages, directly replacing the command with text from the + import file. + """ path = os.path.dirname(yaml_filepath) + '/' with open(yaml_filepath, 'r') as f: cards = f.read() cards = yaml.load(cards) for subject, model in cards.items(): for template in model['templates']: for fmt in ('qfmt', 'afmt'): with open(path + template[fmt], 'r') as f: lines = f.readlines() temp = '' for line in lines: match = re.match('\s*{{import:(.*)}}', line) if match: with open(path + match.group(1), 'r') as f: line = f.read() temp += line template[fmt] = temp return cards
Document new {{import:file.txt}} command for Anki card definitions.
## Code Before: import os import re import yaml import lib.genanki.genanki as genanki class Anki: def generate_id(): """Generate a 32-bit ID useful for Anki.""" return random.randrange(1 << 30, 1 << 31) # return datetime.now().timestamp() def import_card_definitions(self, yaml_filepath): """Import card definitions from YAML file.""" path = os.path.dirname(yaml_filepath) + '/' with open(yaml_filepath, 'r') as f: cards = f.read() cards = yaml.load(cards) for subject, model in cards.items(): for template in model['templates']: for fmt in ('qfmt', 'afmt'): with open(path + template[fmt], 'r') as f: lines = f.readlines() temp = '' for line in lines: match = re.match('\s*{{import:(.*)}}', line) if match: with open(path + match.group(1), 'r') as f: line = f.read() temp += line template[fmt] = temp return cards ## Instruction: Document new {{import:file.txt}} command for Anki card definitions. ## Code After: import os import re import yaml import lib.genanki.genanki as genanki class Anki: def generate_id(): """Generate a 32-bit ID useful for Anki.""" return random.randrange(1 << 30, 1 << 31) # return datetime.now().timestamp() def import_card_definitions(self, yaml_filepath): """Import card definitions from YAML file. Adds a Anki-like {{import:file.txt}} file import command which works similar to the #include preprocessor command in C-like languages, directly replacing the command with text from the import file. """ path = os.path.dirname(yaml_filepath) + '/' with open(yaml_filepath, 'r') as f: cards = f.read() cards = yaml.load(cards) for subject, model in cards.items(): for template in model['templates']: for fmt in ('qfmt', 'afmt'): with open(path + template[fmt], 'r') as f: lines = f.readlines() temp = '' for line in lines: match = re.match('\s*{{import:(.*)}}', line) if match: with open(path + match.group(1), 'r') as f: line = f.read() temp += line template[fmt] = temp return cards
// ... existing code ... def import_card_definitions(self, yaml_filepath): """Import card definitions from YAML file. Adds a Anki-like {{import:file.txt}} file import command which works similar to the #include preprocessor command in C-like languages, directly replacing the command with text from the import file. """ path = os.path.dirname(yaml_filepath) + '/' // ... rest of the code ...
9be04ea1030b423b7414dbd386ae2db2f4761f07
third_party/bunch/bunch/python3_compat.py
third_party/bunch/bunch/python3_compat.py
import platform _IS_PYTHON_3 = (platform.version() >= '3') identity = lambda x : x # u('string') replaces the forwards-incompatible u'string' if _IS_PYTHON_3: u = identity else: import codecs def u(string): return codecs.unicode_escape_decode(string)[0] # dict.iteritems(), dict.iterkeys() is also incompatible if _IS_PYTHON_3: iteritems = dict.items iterkeys = dict.keys else: iteritems = dict.iteritems iterkeys = dict.iterkeys
import sys _IS_PYTHON_3 = (sys.version[0] >= '3') identity = lambda x : x # u('string') replaces the forwards-incompatible u'string' if _IS_PYTHON_3: u = identity else: import codecs def u(string): return codecs.unicode_escape_decode(string)[0] # dict.iteritems(), dict.iterkeys() is also incompatible if _IS_PYTHON_3: iteritems = dict.items iterkeys = dict.keys else: iteritems = dict.iteritems iterkeys = dict.iterkeys
Fix Python 3 version detection in bunch
Fix Python 3 version detection in bunch
Python
apache-2.0
mbrukman/cloud-launcher,mbrukman/cloud-launcher,mbrukman/cloud-launcher,mbrukman/cloud-launcher
- import platform + import sys - _IS_PYTHON_3 = (platform.version() >= '3') + _IS_PYTHON_3 = (sys.version[0] >= '3') identity = lambda x : x # u('string') replaces the forwards-incompatible u'string' if _IS_PYTHON_3: u = identity else: import codecs def u(string): return codecs.unicode_escape_decode(string)[0] # dict.iteritems(), dict.iterkeys() is also incompatible if _IS_PYTHON_3: iteritems = dict.items iterkeys = dict.keys else: iteritems = dict.iteritems iterkeys = dict.iterkeys
Fix Python 3 version detection in bunch
## Code Before: import platform _IS_PYTHON_3 = (platform.version() >= '3') identity = lambda x : x # u('string') replaces the forwards-incompatible u'string' if _IS_PYTHON_3: u = identity else: import codecs def u(string): return codecs.unicode_escape_decode(string)[0] # dict.iteritems(), dict.iterkeys() is also incompatible if _IS_PYTHON_3: iteritems = dict.items iterkeys = dict.keys else: iteritems = dict.iteritems iterkeys = dict.iterkeys ## Instruction: Fix Python 3 version detection in bunch ## Code After: import sys _IS_PYTHON_3 = (sys.version[0] >= '3') identity = lambda x : x # u('string') replaces the forwards-incompatible u'string' if _IS_PYTHON_3: u = identity else: import codecs def u(string): return codecs.unicode_escape_decode(string)[0] # dict.iteritems(), dict.iterkeys() is also incompatible if _IS_PYTHON_3: iteritems = dict.items iterkeys = dict.keys else: iteritems = dict.iteritems iterkeys = dict.iterkeys
# ... existing code ... import sys _IS_PYTHON_3 = (sys.version[0] >= '3') # ... rest of the code ...
d7a91fe283666f01aa06a707c536893cf1473fe3
rtwilio/models.py
rtwilio/models.py
import datetime from django.db import models class TwilioResponse(models.Model): date = models.DateTimeField() message = models.CharField(max_length=64, primary_key=True) account = models.CharField(max_length=64) sender = models.CharField(max_length=16) recipient = models.CharField(max_length=16) status = models.CharField(max_length=16) def save(self, **kwargs): if not self.date: self.date = datetime.datetime.now() return super(TwilioResponse, self).save(**kwargs)
from django.db import models from django.utils import timezone class TwilioResponse(models.Model): date = models.DateTimeField() message = models.CharField(max_length=64, primary_key=True) account = models.CharField(max_length=64) sender = models.CharField(max_length=16) recipient = models.CharField(max_length=16) status = models.CharField(max_length=16) def save(self, **kwargs): if not self.date: self.date = timezone.now() return super(TwilioResponse, self).save(**kwargs)
Use timezone aware datetime now.
Use timezone aware datetime now.
Python
bsd-3-clause
caktus/rapidsms-twilio
- import datetime - from django.db import models + from django.utils import timezone class TwilioResponse(models.Model): date = models.DateTimeField() message = models.CharField(max_length=64, primary_key=True) account = models.CharField(max_length=64) sender = models.CharField(max_length=16) recipient = models.CharField(max_length=16) status = models.CharField(max_length=16) def save(self, **kwargs): if not self.date: - self.date = datetime.datetime.now() + self.date = timezone.now() return super(TwilioResponse, self).save(**kwargs)
Use timezone aware datetime now.
## Code Before: import datetime from django.db import models class TwilioResponse(models.Model): date = models.DateTimeField() message = models.CharField(max_length=64, primary_key=True) account = models.CharField(max_length=64) sender = models.CharField(max_length=16) recipient = models.CharField(max_length=16) status = models.CharField(max_length=16) def save(self, **kwargs): if not self.date: self.date = datetime.datetime.now() return super(TwilioResponse, self).save(**kwargs) ## Instruction: Use timezone aware datetime now. ## Code After: from django.db import models from django.utils import timezone class TwilioResponse(models.Model): date = models.DateTimeField() message = models.CharField(max_length=64, primary_key=True) account = models.CharField(max_length=64) sender = models.CharField(max_length=16) recipient = models.CharField(max_length=16) status = models.CharField(max_length=16) def save(self, **kwargs): if not self.date: self.date = timezone.now() return super(TwilioResponse, self).save(**kwargs)
// ... existing code ... from django.db import models from django.utils import timezone // ... modified code ... if not self.date: self.date = timezone.now() return super(TwilioResponse, self).save(**kwargs) // ... rest of the code ...
be27ec6d2567b85b94b40c79570ca5d9c20fd0bf
modeltrans/admin.py
modeltrans/admin.py
from .conf import get_default_language from .translator import get_i18n_field from .utils import get_language class ActiveLanguageMixin(object): ''' Hide all translated fields, except: - The field for the default language (settings.LANGUAGE_CODE) - The field for the currently active language. ''' def get_exclude(self, request, obj=None): i18n_field = get_i18n_field(self.model) if i18n_field is None: return super(ActiveLanguageMixin, self).get_exclude(request) language = get_language() if language == get_default_language(): language = False excludes = [] for field in i18n_field.get_translated_fields(): if field.language is None or field.language == language: continue excludes.append(field.name) return excludes
from .conf import get_default_language from .translator import get_i18n_field from .utils import get_language class ActiveLanguageMixin(object): ''' Add this mixin to your admin class to hide the untranslated field and all translated fields, except: - The field for the default language (settings.LANGUAGE_CODE) - The field for the currently active language. ''' def get_exclude(self, request, obj=None): # use default implementation for models without i18n-field i18n_field = get_i18n_field(self.model) if i18n_field is None: return super(ActiveLanguageMixin, self).get_exclude(request) language = get_language() if language == get_default_language(): language = False excludes = [] for field in i18n_field.get_translated_fields(): if field.language is None or field.language == language: continue excludes.append(field.name) # also add the name of the original field, as it is added excludes.append(field.original_field.name) # de-duplicate return list(set(excludes))
Improve ActiveLanguageMixin to hide original field
Improve ActiveLanguageMixin to hide original field
Python
bsd-3-clause
zostera/django-modeltrans,zostera/django-modeltrans
from .conf import get_default_language from .translator import get_i18n_field from .utils import get_language class ActiveLanguageMixin(object): ''' + Add this mixin to your admin class to hide the untranslated field and all - Hide all translated fields, except: + translated fields, except: + - The field for the default language (settings.LANGUAGE_CODE) - The field for the currently active language. ''' def get_exclude(self, request, obj=None): - + # use default implementation for models without i18n-field i18n_field = get_i18n_field(self.model) if i18n_field is None: return super(ActiveLanguageMixin, self).get_exclude(request) language = get_language() if language == get_default_language(): language = False excludes = [] for field in i18n_field.get_translated_fields(): if field.language is None or field.language == language: continue excludes.append(field.name) - return excludes + # also add the name of the original field, as it is added + excludes.append(field.original_field.name) + # de-duplicate + return list(set(excludes)) +
Improve ActiveLanguageMixin to hide original field
## Code Before: from .conf import get_default_language from .translator import get_i18n_field from .utils import get_language class ActiveLanguageMixin(object): ''' Hide all translated fields, except: - The field for the default language (settings.LANGUAGE_CODE) - The field for the currently active language. ''' def get_exclude(self, request, obj=None): i18n_field = get_i18n_field(self.model) if i18n_field is None: return super(ActiveLanguageMixin, self).get_exclude(request) language = get_language() if language == get_default_language(): language = False excludes = [] for field in i18n_field.get_translated_fields(): if field.language is None or field.language == language: continue excludes.append(field.name) return excludes ## Instruction: Improve ActiveLanguageMixin to hide original field ## Code After: from .conf import get_default_language from .translator import get_i18n_field from .utils import get_language class ActiveLanguageMixin(object): ''' Add this mixin to your admin class to hide the untranslated field and all translated fields, except: - The field for the default language (settings.LANGUAGE_CODE) - The field for the currently active language. ''' def get_exclude(self, request, obj=None): # use default implementation for models without i18n-field i18n_field = get_i18n_field(self.model) if i18n_field is None: return super(ActiveLanguageMixin, self).get_exclude(request) language = get_language() if language == get_default_language(): language = False excludes = [] for field in i18n_field.get_translated_fields(): if field.language is None or field.language == language: continue excludes.append(field.name) # also add the name of the original field, as it is added excludes.append(field.original_field.name) # de-duplicate return list(set(excludes))
# ... existing code ... ''' Add this mixin to your admin class to hide the untranslated field and all translated fields, except: - The field for the default language (settings.LANGUAGE_CODE) # ... modified code ... def get_exclude(self, request, obj=None): # use default implementation for models without i18n-field i18n_field = get_i18n_field(self.model) ... # also add the name of the original field, as it is added excludes.append(field.original_field.name) # de-duplicate return list(set(excludes)) # ... rest of the code ...
4210d2ecb1b74c2a94c704c20eec9faaf75c5a9a
main.py
main.py
import sys,requests,configparser github_username = '' github_token = '' free_user = '' free_pass = '' def loadConfig(): config = configparser.ConfigParser() config.read('config.cfg') global github_username,github_token,free_user,free_pass try: github_username = config['Github']['username'] github_token = config['Github']['token'] free_user = config['Free']['user'] free_pass = config['Free']['pass'] except: print('Missing information in config.cfg, see README.md') sys.exit(1) def githubAPI(action): r = requests.get('https://api.github.com' + action, auth=(github_username, github_token)) return r.json() def sendSMS(msg): return requests.post('https://smsapi.free-mobile.fr/sendmsg', params={'user' : free_user, 'pass' : free_pass, 'msg' : msg}).status_code if __name__ == '__main__': loadConfig() notifs = githubAPI('/notifications') unread = [] for notif in notifs: if notif['unread']: unread.append(notif) msg = '[Github] Unread notifications :\n' for n in unread: msg += " [" + n['subject']['type'] + "] " + n['subject']['title'] + " in " + n['repository']['full_name'] + "\n" sendSMS(msg)
import sys,requests,configparser github_username = '' github_token = '' free_user = '' free_pass = '' def loadConfig(): config = configparser.ConfigParser() config.read('config.cfg') global github_username,github_token,free_user,free_pass try: github_username = config['Github']['username'] github_token = config['Github']['token'] free_user = config['Free']['user'] free_pass = config['Free']['pass'] except: print('Missing information in config.cfg, see README.md') sys.exit(1) def githubAPI(action): r = requests.get('https://api.github.com' + action, auth=(github_username, github_token)) return r.json() def sendSMS(msg): return requests.post('https://smsapi.free-mobile.fr/sendmsg', params={'user' : free_user, 'pass' : free_pass, 'msg' : msg}).status_code if __name__ == '__main__': loadConfig() notifs = githubAPI('/notifications') unread = [] for notif in notifs: if notif['unread']: unread.append(notif) if len(unread) > 0: msg = '[Github] Unread notifications :\n' for n in unread: msg += " [" + n['subject']['type'] + "] " + n['subject']['title'] + " in " + n['repository']['full_name'] + "\n" sendSMS(msg)
Send SMS, only if there are notifications
Send SMS, only if there are notifications
Python
mit
Walz/github-sms-notifications
import sys,requests,configparser github_username = '' github_token = '' free_user = '' free_pass = '' def loadConfig(): config = configparser.ConfigParser() config.read('config.cfg') - + global github_username,github_token,free_user,free_pass try: github_username = config['Github']['username'] github_token = config['Github']['token'] free_user = config['Free']['user'] free_pass = config['Free']['pass'] except: print('Missing information in config.cfg, see README.md') sys.exit(1) def githubAPI(action): r = requests.get('https://api.github.com' + action, auth=(github_username, github_token)) return r.json() def sendSMS(msg): return requests.post('https://smsapi.free-mobile.fr/sendmsg', params={'user' : free_user, 'pass' : free_pass, 'msg' : msg}).status_code if __name__ == '__main__': loadConfig() notifs = githubAPI('/notifications') unread = [] for notif in notifs: if notif['unread']: unread.append(notif) + if len(unread) > 0: - msg = '[Github] Unread notifications :\n' + msg = '[Github] Unread notifications :\n' - for n in unread: + for n in unread: - msg += " [" + n['subject']['type'] + "] " + n['subject']['title'] + " in " + n['repository']['full_name'] + "\n" + msg += " [" + n['subject']['type'] + "] " + n['subject']['title'] + " in " + n['repository']['full_name'] + "\n" - sendSMS(msg) + sendSMS(msg)
Send SMS, only if there are notifications
## Code Before: import sys,requests,configparser github_username = '' github_token = '' free_user = '' free_pass = '' def loadConfig(): config = configparser.ConfigParser() config.read('config.cfg') global github_username,github_token,free_user,free_pass try: github_username = config['Github']['username'] github_token = config['Github']['token'] free_user = config['Free']['user'] free_pass = config['Free']['pass'] except: print('Missing information in config.cfg, see README.md') sys.exit(1) def githubAPI(action): r = requests.get('https://api.github.com' + action, auth=(github_username, github_token)) return r.json() def sendSMS(msg): return requests.post('https://smsapi.free-mobile.fr/sendmsg', params={'user' : free_user, 'pass' : free_pass, 'msg' : msg}).status_code if __name__ == '__main__': loadConfig() notifs = githubAPI('/notifications') unread = [] for notif in notifs: if notif['unread']: unread.append(notif) msg = '[Github] Unread notifications :\n' for n in unread: msg += " [" + n['subject']['type'] + "] " + n['subject']['title'] + " in " + n['repository']['full_name'] + "\n" sendSMS(msg) ## Instruction: Send SMS, only if there are notifications ## Code After: import sys,requests,configparser github_username = '' github_token = '' free_user = '' free_pass = '' def loadConfig(): config = configparser.ConfigParser() config.read('config.cfg') global github_username,github_token,free_user,free_pass try: github_username = config['Github']['username'] github_token = config['Github']['token'] free_user = config['Free']['user'] free_pass = config['Free']['pass'] except: print('Missing information in config.cfg, see README.md') sys.exit(1) def githubAPI(action): r = requests.get('https://api.github.com' + action, auth=(github_username, github_token)) return r.json() def sendSMS(msg): return requests.post('https://smsapi.free-mobile.fr/sendmsg', params={'user' : free_user, 'pass' : free_pass, 'msg' : msg}).status_code if __name__ == '__main__': loadConfig() notifs = githubAPI('/notifications') unread = [] for notif in notifs: if notif['unread']: unread.append(notif) if len(unread) > 0: msg = '[Github] Unread notifications :\n' for n in unread: msg += " [" + n['subject']['type'] + "] " + n['subject']['title'] + " in " + n['repository']['full_name'] + "\n" sendSMS(msg)
// ... existing code ... config.read('config.cfg') global github_username,github_token,free_user,free_pass // ... modified code ... if len(unread) > 0: msg = '[Github] Unread notifications :\n' for n in unread: msg += " [" + n['subject']['type'] + "] " + n['subject']['title'] + " in " + n['repository']['full_name'] + "\n" sendSMS(msg) // ... rest of the code ...
f80bd7dbb1b66f3fec52200ecfbc50d779caca05
src/tmlib/workflow/jterator/args.py
src/tmlib/workflow/jterator/args.py
from tmlib.workflow.args import Argument from tmlib.workflow.args import BatchArguments from tmlib.workflow.args import SubmissionArguments from tmlib.workflow.args import ExtraArguments from tmlib.workflow.registry import batch_args from tmlib.workflow.registry import submission_args from tmlib.workflow.registry import extra_args @batch_args('jterator') class JteratorBatchArguments(BatchArguments): plot = Argument( type=bool, default=False, flag='p', disabled=True, help='whether plotting should be activated' ) @submission_args('jterator') class JteratorSubmissionArguments(SubmissionArguments): pass def get_names_of_existing_pipelines(experiment): '''Gets names of all existing jterator pipelines for a given experiment. Parameters ---------- experiment: tmlib.models.Experiment processed experiment Returns ------- List[str] names of jterator pipelines ''' import os from tmlib.workflow.jterator.project import list_projects return [ os.path.basename(project) for project in list_projects(os.path.join(experiment.workflow_location, 'jterator')) ] @extra_args('jterator') class JteratorExtraArguments(ExtraArguments): pipeline = Argument( type=str, help='name of the pipeline that should be processed', required=True, flag='p', get_choices=get_names_of_existing_pipelines )
from tmlib.workflow.args import Argument from tmlib.workflow.args import BatchArguments from tmlib.workflow.args import SubmissionArguments from tmlib.workflow.args import ExtraArguments from tmlib.workflow.registry import batch_args from tmlib.workflow.registry import submission_args from tmlib.workflow.registry import extra_args @batch_args('jterator') class JteratorBatchArguments(BatchArguments): plot = Argument( type=bool, default=False, flag='p', disabled=True, help='whether plotting should be activated' ) @submission_args('jterator') class JteratorSubmissionArguments(SubmissionArguments): pass def get_names_of_existing_pipelines(experiment): '''Gets names of all existing jterator pipelines for a given experiment. Parameters ---------- experiment: tmlib.models.Experiment processed experiment Returns ------- List[str] names of jterator pipelines ''' import os from tmlib.workflow.jterator.project import list_projects directory = os.path.join(experiment.workflow_location, 'jterator') if not os.path.exists(directory): return [] else: return [ os.path.basename(project) for project in list_projects(directory) ] @extra_args('jterator') class JteratorExtraArguments(ExtraArguments): pipeline = Argument( type=str, help='name of the pipeline that should be processed', required=True, flag='p', get_choices=get_names_of_existing_pipelines )
Fix bug in function that lists existing jterator projects
Fix bug in function that lists existing jterator projects
Python
agpl-3.0
TissueMAPS/TmLibrary,TissueMAPS/TmLibrary,TissueMAPS/TmLibrary,TissueMAPS/TmLibrary,TissueMAPS/TmLibrary
from tmlib.workflow.args import Argument from tmlib.workflow.args import BatchArguments from tmlib.workflow.args import SubmissionArguments from tmlib.workflow.args import ExtraArguments from tmlib.workflow.registry import batch_args from tmlib.workflow.registry import submission_args from tmlib.workflow.registry import extra_args @batch_args('jterator') class JteratorBatchArguments(BatchArguments): plot = Argument( type=bool, default=False, flag='p', disabled=True, help='whether plotting should be activated' ) @submission_args('jterator') class JteratorSubmissionArguments(SubmissionArguments): pass def get_names_of_existing_pipelines(experiment): '''Gets names of all existing jterator pipelines for a given experiment. Parameters ---------- experiment: tmlib.models.Experiment processed experiment Returns ------- List[str] names of jterator pipelines ''' import os from tmlib.workflow.jterator.project import list_projects + directory = os.path.join(experiment.workflow_location, 'jterator') + if not os.path.exists(directory): + return [] + else: - return [ + return [ - os.path.basename(project) + os.path.basename(project) + for project in list_projects(directory) + ] - for project - in list_projects(os.path.join(experiment.workflow_location, 'jterator')) - ] @extra_args('jterator') class JteratorExtraArguments(ExtraArguments): pipeline = Argument( type=str, help='name of the pipeline that should be processed', required=True, flag='p', get_choices=get_names_of_existing_pipelines )
Fix bug in function that lists existing jterator projects
## Code Before: from tmlib.workflow.args import Argument from tmlib.workflow.args import BatchArguments from tmlib.workflow.args import SubmissionArguments from tmlib.workflow.args import ExtraArguments from tmlib.workflow.registry import batch_args from tmlib.workflow.registry import submission_args from tmlib.workflow.registry import extra_args @batch_args('jterator') class JteratorBatchArguments(BatchArguments): plot = Argument( type=bool, default=False, flag='p', disabled=True, help='whether plotting should be activated' ) @submission_args('jterator') class JteratorSubmissionArguments(SubmissionArguments): pass def get_names_of_existing_pipelines(experiment): '''Gets names of all existing jterator pipelines for a given experiment. Parameters ---------- experiment: tmlib.models.Experiment processed experiment Returns ------- List[str] names of jterator pipelines ''' import os from tmlib.workflow.jterator.project import list_projects return [ os.path.basename(project) for project in list_projects(os.path.join(experiment.workflow_location, 'jterator')) ] @extra_args('jterator') class JteratorExtraArguments(ExtraArguments): pipeline = Argument( type=str, help='name of the pipeline that should be processed', required=True, flag='p', get_choices=get_names_of_existing_pipelines ) ## Instruction: Fix bug in function that lists existing jterator projects ## Code After: from tmlib.workflow.args import Argument from tmlib.workflow.args import BatchArguments from tmlib.workflow.args import SubmissionArguments from tmlib.workflow.args import ExtraArguments from tmlib.workflow.registry import batch_args from tmlib.workflow.registry import submission_args from tmlib.workflow.registry import extra_args @batch_args('jterator') class JteratorBatchArguments(BatchArguments): plot = Argument( type=bool, default=False, flag='p', disabled=True, help='whether plotting should be activated' ) @submission_args('jterator') class JteratorSubmissionArguments(SubmissionArguments): pass def get_names_of_existing_pipelines(experiment): '''Gets names of all existing jterator pipelines for a given experiment. Parameters ---------- experiment: tmlib.models.Experiment processed experiment Returns ------- List[str] names of jterator pipelines ''' import os from tmlib.workflow.jterator.project import list_projects directory = os.path.join(experiment.workflow_location, 'jterator') if not os.path.exists(directory): return [] else: return [ os.path.basename(project) for project in list_projects(directory) ] @extra_args('jterator') class JteratorExtraArguments(ExtraArguments): pipeline = Argument( type=str, help='name of the pipeline that should be processed', required=True, flag='p', get_choices=get_names_of_existing_pipelines )
# ... existing code ... from tmlib.workflow.jterator.project import list_projects directory = os.path.join(experiment.workflow_location, 'jterator') if not os.path.exists(directory): return [] else: return [ os.path.basename(project) for project in list_projects(directory) ] # ... rest of the code ...
be8ac3ac13fee7db684c931cdc15be98ca6a283c
ample/util/tests/test_mrbump_util.py
ample/util/tests/test_mrbump_util.py
"""Test functions for util.mrbump_util""" import cPickle import os import unittest from ample.constants import AMPLE_PKL, SHARE_DIR from ample.util import mrbump_util class Test(unittest.TestCase): @classmethod def setUpClass(cls): cls.thisd = os.path.abspath( os.path.dirname( __file__ ) ) cls.ample_share = SHARE_DIR cls.testfiles_dir = os.path.join(cls.ample_share,'testfiles') def test_final_summary(self): pkl = os.path.join(self.testfiles_dir, AMPLE_PKL) if not os.path.isfile(pkl): return with open(pkl) as f: d = cPickle.load(f) summary = mrbump_util.finalSummary(d) self.assertIsNotNone(summary) def test_topfiles(self): topf = mrbump_util.ResultsSummary(results_pkl=os.path.join(self.testfiles_dir, AMPLE_PKL)).topFiles() self.assertEqual(len(topf),3) self.assertIn('info',topf[2]) if __name__ == "__main__": unittest.main()
"""Test functions for util.mrbump_util""" import cPickle import os import unittest from ample.constants import AMPLE_PKL, SHARE_DIR from ample.util import mrbump_util class Test(unittest.TestCase): @classmethod def setUpClass(cls): cls.thisd = os.path.abspath( os.path.dirname( __file__ ) ) cls.ample_share = SHARE_DIR cls.testfiles_dir = os.path.join(cls.ample_share,'testfiles') def test_final_summary(self): pkl = os.path.join(self.testfiles_dir, AMPLE_PKL) if not os.path.isfile(pkl): return with open(pkl) as f: d = cPickle.load(f) summary = mrbump_util.finalSummary(d) self.assertIsNotNone(summary) def test_topfiles(self): topf = mrbump_util.ResultsSummary(results_pkl=os.path.join(self.testfiles_dir, AMPLE_PKL)).topFiles() self.assertEqual(len(topf),3) self.assertEqual(topf[2]['source'],'SHELXE trace of MR result') if __name__ == "__main__": unittest.main()
Update unit test for changes to topf
Update unit test for changes to topf
Python
bsd-3-clause
rigdenlab/ample,rigdenlab/ample,linucks/ample,linucks/ample
"""Test functions for util.mrbump_util""" import cPickle import os import unittest from ample.constants import AMPLE_PKL, SHARE_DIR from ample.util import mrbump_util class Test(unittest.TestCase): @classmethod def setUpClass(cls): cls.thisd = os.path.abspath( os.path.dirname( __file__ ) ) cls.ample_share = SHARE_DIR cls.testfiles_dir = os.path.join(cls.ample_share,'testfiles') def test_final_summary(self): pkl = os.path.join(self.testfiles_dir, AMPLE_PKL) if not os.path.isfile(pkl): return with open(pkl) as f: d = cPickle.load(f) summary = mrbump_util.finalSummary(d) self.assertIsNotNone(summary) def test_topfiles(self): topf = mrbump_util.ResultsSummary(results_pkl=os.path.join(self.testfiles_dir, AMPLE_PKL)).topFiles() self.assertEqual(len(topf),3) - self.assertIn('info',topf[2]) + self.assertEqual(topf[2]['source'],'SHELXE trace of MR result') if __name__ == "__main__": unittest.main()
Update unit test for changes to topf
## Code Before: """Test functions for util.mrbump_util""" import cPickle import os import unittest from ample.constants import AMPLE_PKL, SHARE_DIR from ample.util import mrbump_util class Test(unittest.TestCase): @classmethod def setUpClass(cls): cls.thisd = os.path.abspath( os.path.dirname( __file__ ) ) cls.ample_share = SHARE_DIR cls.testfiles_dir = os.path.join(cls.ample_share,'testfiles') def test_final_summary(self): pkl = os.path.join(self.testfiles_dir, AMPLE_PKL) if not os.path.isfile(pkl): return with open(pkl) as f: d = cPickle.load(f) summary = mrbump_util.finalSummary(d) self.assertIsNotNone(summary) def test_topfiles(self): topf = mrbump_util.ResultsSummary(results_pkl=os.path.join(self.testfiles_dir, AMPLE_PKL)).topFiles() self.assertEqual(len(topf),3) self.assertIn('info',topf[2]) if __name__ == "__main__": unittest.main() ## Instruction: Update unit test for changes to topf ## Code After: """Test functions for util.mrbump_util""" import cPickle import os import unittest from ample.constants import AMPLE_PKL, SHARE_DIR from ample.util import mrbump_util class Test(unittest.TestCase): @classmethod def setUpClass(cls): cls.thisd = os.path.abspath( os.path.dirname( __file__ ) ) cls.ample_share = SHARE_DIR cls.testfiles_dir = os.path.join(cls.ample_share,'testfiles') def test_final_summary(self): pkl = os.path.join(self.testfiles_dir, AMPLE_PKL) if not os.path.isfile(pkl): return with open(pkl) as f: d = cPickle.load(f) summary = mrbump_util.finalSummary(d) self.assertIsNotNone(summary) def test_topfiles(self): topf = mrbump_util.ResultsSummary(results_pkl=os.path.join(self.testfiles_dir, AMPLE_PKL)).topFiles() self.assertEqual(len(topf),3) self.assertEqual(topf[2]['source'],'SHELXE trace of MR result') if __name__ == "__main__": unittest.main()
... self.assertEqual(len(topf),3) self.assertEqual(topf[2]['source'],'SHELXE trace of MR result') ...
052de49807dcb9895608e3882b799642b0b08d18
exercises/circular-buffer/circular_buffer.py
exercises/circular-buffer/circular_buffer.py
class BufferFullException(Exception): pass class BufferEmptyException(Exception): pass class CircularBuffer(object): def __init__(self): pass
class BufferFullException(Exception): pass class BufferEmptyException(Exception): pass class CircularBuffer(object): def __init__(self, capacity): pass
Add parameter capacity to circular-buffer example
Add parameter capacity to circular-buffer example Fixes #550
Python
mit
jmluy/xpython,mweb/python,mweb/python,pheanex/xpython,exercism/xpython,jmluy/xpython,exercism/xpython,smalley/python,behrtam/xpython,behrtam/xpython,exercism/python,N-Parsons/exercism-python,exercism/python,N-Parsons/exercism-python,pheanex/xpython,smalley/python
class BufferFullException(Exception): pass class BufferEmptyException(Exception): pass class CircularBuffer(object): - def __init__(self): + def __init__(self, capacity): pass
Add parameter capacity to circular-buffer example
## Code Before: class BufferFullException(Exception): pass class BufferEmptyException(Exception): pass class CircularBuffer(object): def __init__(self): pass ## Instruction: Add parameter capacity to circular-buffer example ## Code After: class BufferFullException(Exception): pass class BufferEmptyException(Exception): pass class CircularBuffer(object): def __init__(self, capacity): pass
# ... existing code ... class CircularBuffer(object): def __init__(self, capacity): pass # ... rest of the code ...
1237e75486ac0ae5c9665ec10d6701c530d601e8
src/petclaw/__init__.py
src/petclaw/__init__.py
"""Main petclaw package""" import os import logging, logging.config # Default logging configuration file _DEFAULT_LOG_CONFIG_PATH = os.path.join(os.path.dirname(__file__),'log.config') del os # Setup loggers logging.config.fileConfig(_DEFAULT_LOG_CONFIG_PATH) __all__ = [] # Module imports __all__.extend(['Controller','Data','Dimension','Grid','Solution','State','riemann']) from controller import Controller from grid import Dimension from pyclaw.grid import Grid from pyclaw.data import Data from pyclaw.solution import Solution from state import State __all__.extend(['ClawSolver1D','ClawSolver2D','SharpClawSolver1D','SharpClawSolver2D']) from clawpack import ClawSolver1D from clawpack import ClawSolver2D from sharpclaw import SharpClawSolver1D from sharpclaw import SharpClawSolver2D __all__.append('BC') from pyclaw.solver import BC # Sub-packages import limiters from limiters import * __all__.extend(limiters.__all__) import plot __all__.append('plot')
"""Main petclaw package""" import os import logging, logging.config # Default logging configuration file _DEFAULT_LOG_CONFIG_PATH = os.path.join(os.path.dirname(__file__),'log.config') del os # Setup loggers logging.config.fileConfig(_DEFAULT_LOG_CONFIG_PATH) __all__ = [] # Module imports __all__.extend(['Controller','Data','Dimension','Grid','Solution','State','riemann']) from controller import Controller from grid import Dimension from pyclaw.grid import Grid from pyclaw.data import Data from pyclaw.solution import Solution from state import State __all__.extend(['ClawSolver1D','ClawSolver2D','SharpClawSolver1D','SharpClawSolver2D']) from clawpack import ClawSolver1D from clawpack import ClawSolver2D from sharpclaw import SharpClawSolver1D from sharpclaw import SharpClawSolver2D from implicitclawpack import ImplicitClawSolver1D __all__.append('BC') from pyclaw.solver import BC # Sub-packages import limiters from limiters import * __all__.extend(limiters.__all__) import plot __all__.append('plot')
Add ImplicitClawSolver1D to base namespace
Add ImplicitClawSolver1D to base namespace
Python
bsd-3-clause
unterweg/peanoclaw,unterweg/peanoclaw,unterweg/peanoclaw,unterweg/peanoclaw,unterweg/peanoclaw,unterweg/peanoclaw
"""Main petclaw package""" import os import logging, logging.config # Default logging configuration file _DEFAULT_LOG_CONFIG_PATH = os.path.join(os.path.dirname(__file__),'log.config') del os # Setup loggers logging.config.fileConfig(_DEFAULT_LOG_CONFIG_PATH) __all__ = [] # Module imports __all__.extend(['Controller','Data','Dimension','Grid','Solution','State','riemann']) from controller import Controller from grid import Dimension from pyclaw.grid import Grid from pyclaw.data import Data from pyclaw.solution import Solution from state import State __all__.extend(['ClawSolver1D','ClawSolver2D','SharpClawSolver1D','SharpClawSolver2D']) from clawpack import ClawSolver1D from clawpack import ClawSolver2D from sharpclaw import SharpClawSolver1D from sharpclaw import SharpClawSolver2D + from implicitclawpack import ImplicitClawSolver1D __all__.append('BC') from pyclaw.solver import BC # Sub-packages import limiters from limiters import * __all__.extend(limiters.__all__) import plot __all__.append('plot')
Add ImplicitClawSolver1D to base namespace
## Code Before: """Main petclaw package""" import os import logging, logging.config # Default logging configuration file _DEFAULT_LOG_CONFIG_PATH = os.path.join(os.path.dirname(__file__),'log.config') del os # Setup loggers logging.config.fileConfig(_DEFAULT_LOG_CONFIG_PATH) __all__ = [] # Module imports __all__.extend(['Controller','Data','Dimension','Grid','Solution','State','riemann']) from controller import Controller from grid import Dimension from pyclaw.grid import Grid from pyclaw.data import Data from pyclaw.solution import Solution from state import State __all__.extend(['ClawSolver1D','ClawSolver2D','SharpClawSolver1D','SharpClawSolver2D']) from clawpack import ClawSolver1D from clawpack import ClawSolver2D from sharpclaw import SharpClawSolver1D from sharpclaw import SharpClawSolver2D __all__.append('BC') from pyclaw.solver import BC # Sub-packages import limiters from limiters import * __all__.extend(limiters.__all__) import plot __all__.append('plot') ## Instruction: Add ImplicitClawSolver1D to base namespace ## Code After: """Main petclaw package""" import os import logging, logging.config # Default logging configuration file _DEFAULT_LOG_CONFIG_PATH = os.path.join(os.path.dirname(__file__),'log.config') del os # Setup loggers logging.config.fileConfig(_DEFAULT_LOG_CONFIG_PATH) __all__ = [] # Module imports __all__.extend(['Controller','Data','Dimension','Grid','Solution','State','riemann']) from controller import Controller from grid import Dimension from pyclaw.grid import Grid from pyclaw.data import Data from pyclaw.solution import Solution from state import State __all__.extend(['ClawSolver1D','ClawSolver2D','SharpClawSolver1D','SharpClawSolver2D']) from clawpack import ClawSolver1D from clawpack import ClawSolver2D from sharpclaw import SharpClawSolver1D from sharpclaw import SharpClawSolver2D from implicitclawpack import ImplicitClawSolver1D __all__.append('BC') from pyclaw.solver import BC # Sub-packages import limiters from limiters import * __all__.extend(limiters.__all__) import plot __all__.append('plot')
# ... existing code ... from sharpclaw import SharpClawSolver2D from implicitclawpack import ImplicitClawSolver1D # ... rest of the code ...
a2b1d10e042d135c3c014622ffeabd7e96a46f9f
tests/test_update_target.py
tests/test_update_target.py
import io import pytest from vws import VWS from vws.exceptions import UnknownTarget class TestUpdateTarget: """ Test for updating a target. """ def test_get_target( self, client: VWS, high_quality_image: io.BytesIO, ) -> None: """ Details of a target are returned by ``get_target``. """ target_id = client.add_target( name='x', width=1, image=high_quality_image, ) client.update_target(target_id=target_id) result = client.get_target(target_id=target_id) expected_keys = { 'target_id', 'active_flag', 'name', 'width', 'tracking_rating', 'reco_rating', } assert result['target_record'].keys() == expected_keys def test_no_such_target( self, client: VWS, high_quality_image: io.BytesIO, ) -> None: """ An ``UnknownTarget`` exception is raised when getting a target which does not exist. """ with pytest.raises(UnknownTarget): client.get_target(target_id='a')
import io import pytest from vws import VWS from vws.exceptions import UnknownTarget class TestUpdateTarget: """ Test for updating a target. """ def test_get_target( self, client: VWS, high_quality_image: io.BytesIO, ) -> None: """ Details of a target are returned by ``get_target``. """ # target_id = client.add_target( # name='x', # width=1, # image=high_quality_image, # ) # # client.update_target(target_id=target_id) # result = client.get_target(target_id=target_id) # expected_keys = { # 'target_id', # 'active_flag', # 'name', # 'width', # 'tracking_rating', # 'reco_rating', # } # assert result['target_record'].keys() == expected_keys # # def test_no_such_target( # self, # client: VWS, # high_quality_image: io.BytesIO, # ) -> None: # """ # An ``UnknownTarget`` exception is raised when getting a target which # does not exist. # """ # with pytest.raises(UnknownTarget): # client.get_target(target_id='a')
Comment out part done code
Comment out part done code
Python
mit
adamtheturtle/vws-python,adamtheturtle/vws-python
import io import pytest from vws import VWS from vws.exceptions import UnknownTarget class TestUpdateTarget: """ Test for updating a target. """ def test_get_target( self, client: VWS, high_quality_image: io.BytesIO, ) -> None: """ Details of a target are returned by ``get_target``. """ - target_id = client.add_target( + # target_id = client.add_target( - name='x', + # name='x', - width=1, + # width=1, - image=high_quality_image, + # image=high_quality_image, - ) + # ) + # + # client.update_target(target_id=target_id) + # result = client.get_target(target_id=target_id) + # expected_keys = { + # 'target_id', + # 'active_flag', + # 'name', + # 'width', + # 'tracking_rating', + # 'reco_rating', + # } + # assert result['target_record'].keys() == expected_keys + # + # def test_no_such_target( + # self, + # client: VWS, + # high_quality_image: io.BytesIO, + # ) -> None: + # """ + # An ``UnknownTarget`` exception is raised when getting a target which + # does not exist. + # """ + # with pytest.raises(UnknownTarget): + # client.get_target(target_id='a') - client.update_target(target_id=target_id) - result = client.get_target(target_id=target_id) - expected_keys = { - 'target_id', - 'active_flag', - 'name', - 'width', - 'tracking_rating', - 'reco_rating', - } - assert result['target_record'].keys() == expected_keys - - def test_no_such_target( - self, - client: VWS, - high_quality_image: io.BytesIO, - ) -> None: - """ - An ``UnknownTarget`` exception is raised when getting a target which - does not exist. - """ - with pytest.raises(UnknownTarget): - client.get_target(target_id='a') -
Comment out part done code
## Code Before: import io import pytest from vws import VWS from vws.exceptions import UnknownTarget class TestUpdateTarget: """ Test for updating a target. """ def test_get_target( self, client: VWS, high_quality_image: io.BytesIO, ) -> None: """ Details of a target are returned by ``get_target``. """ target_id = client.add_target( name='x', width=1, image=high_quality_image, ) client.update_target(target_id=target_id) result = client.get_target(target_id=target_id) expected_keys = { 'target_id', 'active_flag', 'name', 'width', 'tracking_rating', 'reco_rating', } assert result['target_record'].keys() == expected_keys def test_no_such_target( self, client: VWS, high_quality_image: io.BytesIO, ) -> None: """ An ``UnknownTarget`` exception is raised when getting a target which does not exist. """ with pytest.raises(UnknownTarget): client.get_target(target_id='a') ## Instruction: Comment out part done code ## Code After: import io import pytest from vws import VWS from vws.exceptions import UnknownTarget class TestUpdateTarget: """ Test for updating a target. """ def test_get_target( self, client: VWS, high_quality_image: io.BytesIO, ) -> None: """ Details of a target are returned by ``get_target``. """ # target_id = client.add_target( # name='x', # width=1, # image=high_quality_image, # ) # # client.update_target(target_id=target_id) # result = client.get_target(target_id=target_id) # expected_keys = { # 'target_id', # 'active_flag', # 'name', # 'width', # 'tracking_rating', # 'reco_rating', # } # assert result['target_record'].keys() == expected_keys # # def test_no_such_target( # self, # client: VWS, # high_quality_image: io.BytesIO, # ) -> None: # """ # An ``UnknownTarget`` exception is raised when getting a target which # does not exist. # """ # with pytest.raises(UnknownTarget): # client.get_target(target_id='a')
... """ # target_id = client.add_target( # name='x', # width=1, # image=high_quality_image, # ) # # client.update_target(target_id=target_id) # result = client.get_target(target_id=target_id) # expected_keys = { # 'target_id', # 'active_flag', # 'name', # 'width', # 'tracking_rating', # 'reco_rating', # } # assert result['target_record'].keys() == expected_keys # # def test_no_such_target( # self, # client: VWS, # high_quality_image: io.BytesIO, # ) -> None: # """ # An ``UnknownTarget`` exception is raised when getting a target which # does not exist. # """ # with pytest.raises(UnknownTarget): # client.get_target(target_id='a') ...
95fa71c4439343764cac95a1667e08dc21cb6ebe
plugins.py
plugins.py
from fabric.api import * import os import re __all__ = [] @task def test(plugin_path): """ Symbolically link a host file that contains a redcap plugin into the ./redcap/plugins folder :param plugin_path: path to plugin folder relative to VagrantFile :return: """ if not os.path.exists(plugin_path): abort("The folder %s does not exist. Please provide a relative path to a plugin folder you would like to test in the local vm" % plugin_path) redcap_root = env.live_project_full_path source_path = "/vagrant/" + plugin_path target_folder = "/".join([redcap_root, env.plugins_path]) run("ln -sf %s %s" % (source_path, target_folder))
from fabric.api import * import os import re __all__ = [] @task def test(plugin_path): """ Symbolically link a host file that contains a redcap plugin into the ./redcap/plugins folder :param plugin_path: path to plugin folder relative to VagrantFile :return: """ if not os.path.exists(plugin_path): abort("The folder %s does not exist. Please provide a relative path to a plugin folder you would like to test in the local vm" % plugin_path) redcap_root = env.live_project_full_path source_path = "/vagrant/" + plugin_path target_folder = "/".join([redcap_root, env.plugins_path]) with settings(user=env.deploy_user): run("ln -sf %s %s" % (source_path, target_folder))
Fix plugin test by running scripts as user deploy
Fix plugin test by running scripts as user deploy
Python
bsd-3-clause
ctsit/redcap_deployment,ctsit/redcap_deployment,ctsit/redcap_deployment,ctsit/redcap_deployment
from fabric.api import * import os import re __all__ = [] @task def test(plugin_path): """ Symbolically link a host file that contains a redcap plugin into the ./redcap/plugins folder :param plugin_path: path to plugin folder relative to VagrantFile :return: """ if not os.path.exists(plugin_path): abort("The folder %s does not exist. Please provide a relative path to a plugin folder you would like to test in the local vm" % plugin_path) redcap_root = env.live_project_full_path source_path = "/vagrant/" + plugin_path target_folder = "/".join([redcap_root, env.plugins_path]) + with settings(user=env.deploy_user): - run("ln -sf %s %s" % (source_path, target_folder)) + run("ln -sf %s %s" % (source_path, target_folder))
Fix plugin test by running scripts as user deploy
## Code Before: from fabric.api import * import os import re __all__ = [] @task def test(plugin_path): """ Symbolically link a host file that contains a redcap plugin into the ./redcap/plugins folder :param plugin_path: path to plugin folder relative to VagrantFile :return: """ if not os.path.exists(plugin_path): abort("The folder %s does not exist. Please provide a relative path to a plugin folder you would like to test in the local vm" % plugin_path) redcap_root = env.live_project_full_path source_path = "/vagrant/" + plugin_path target_folder = "/".join([redcap_root, env.plugins_path]) run("ln -sf %s %s" % (source_path, target_folder)) ## Instruction: Fix plugin test by running scripts as user deploy ## Code After: from fabric.api import * import os import re __all__ = [] @task def test(plugin_path): """ Symbolically link a host file that contains a redcap plugin into the ./redcap/plugins folder :param plugin_path: path to plugin folder relative to VagrantFile :return: """ if not os.path.exists(plugin_path): abort("The folder %s does not exist. Please provide a relative path to a plugin folder you would like to test in the local vm" % plugin_path) redcap_root = env.live_project_full_path source_path = "/vagrant/" + plugin_path target_folder = "/".join([redcap_root, env.plugins_path]) with settings(user=env.deploy_user): run("ln -sf %s %s" % (source_path, target_folder))
// ... existing code ... with settings(user=env.deploy_user): run("ln -sf %s %s" % (source_path, target_folder)) // ... rest of the code ...
d8e9201c627840c72a540a77425ec0c13ac48a22
tests/test_cmd.py
tests/test_cmd.py
import unittest from click.testing import CliRunner from scuevals_api.cmd import cli class CmdsTestCase(unittest.TestCase): @classmethod def setUpClass(cls): cls.runner = CliRunner() def cli_run(self, *cmds): return self.runner.invoke(cli, cmds) cls.cli_run = cli_run def test_initdb(self): result = self.cli_run('initdb') self.assertEqual(0, result.exit_code)
import unittest from click.testing import CliRunner from scuevals_api.cmd import cli class CmdsTestCase(unittest.TestCase): @classmethod def setUpClass(cls): cls.runner = CliRunner() def cli_run(self, *cmds): return self.runner.invoke(cli, cmds) cls.cli_run = cli_run def test_initdb(self): result = self.cli_run('initdb') self.assertEqual(0, result.exit_code, msg=result.output)
Add detailed error for CLI test failure
Add detailed error for CLI test failure
Python
agpl-3.0
SCUEvals/scuevals-api,SCUEvals/scuevals-api
import unittest from click.testing import CliRunner from scuevals_api.cmd import cli class CmdsTestCase(unittest.TestCase): @classmethod def setUpClass(cls): cls.runner = CliRunner() def cli_run(self, *cmds): return self.runner.invoke(cli, cmds) cls.cli_run = cli_run def test_initdb(self): result = self.cli_run('initdb') - self.assertEqual(0, result.exit_code) + self.assertEqual(0, result.exit_code, msg=result.output)
Add detailed error for CLI test failure
## Code Before: import unittest from click.testing import CliRunner from scuevals_api.cmd import cli class CmdsTestCase(unittest.TestCase): @classmethod def setUpClass(cls): cls.runner = CliRunner() def cli_run(self, *cmds): return self.runner.invoke(cli, cmds) cls.cli_run = cli_run def test_initdb(self): result = self.cli_run('initdb') self.assertEqual(0, result.exit_code) ## Instruction: Add detailed error for CLI test failure ## Code After: import unittest from click.testing import CliRunner from scuevals_api.cmd import cli class CmdsTestCase(unittest.TestCase): @classmethod def setUpClass(cls): cls.runner = CliRunner() def cli_run(self, *cmds): return self.runner.invoke(cli, cmds) cls.cli_run = cli_run def test_initdb(self): result = self.cli_run('initdb') self.assertEqual(0, result.exit_code, msg=result.output)
// ... existing code ... result = self.cli_run('initdb') self.assertEqual(0, result.exit_code, msg=result.output) // ... rest of the code ...
9616afb9e8c7a5a599096b588cd71a714e001e2b
dduplicated/fileManager.py
dduplicated/fileManager.py
import os from threading import Thread def _delete(path): os.remove(path) def _link(src, path): os.symlink(src, path) def manager_files(paths, link): # The first file is preserved to not delete all files in directories. first = True src = "" deleted_files = [] linked_files = [] errors = [] for path in paths: if os.path.isfile(path): if first: first = False src = path else: Thread(target=_delete, args=(path)).start() deleted_files.append(path) if link: Thread(target=_link, args=(src, path)).start() linked_files.append(path) else: errors.append("Not identified by file: \"{}\"".format(path)) return {"preserved": src, "linked_files": linked_files, "deleted_files": deleted_files, "errors": errors} # Try The Voight-Kampff if you not recognize if is a replicant or not, all is suspect def manager(duplicates, create_link=False): if len(duplicates) == 0: return None processed_files = [] for files_by_hash in duplicates.values(): processed_files.append(manager_files(files_by_hash, create_link)) return processed_files def delete(duplicates): return manager(duplicates) def link(duplicates): return manager(duplicates, True)
import os from threading import Thread def _delete(path: str, src: str, link: bool): os.remove(path) if link: os.symlink(src, path) def manager_files(paths, link): # The first file is preserved to not delete all files in directories. first = True src = "" deleted_files = [] linked_files = [] errors = [] for path in paths: if os.path.isfile(path): if first: first = False src = path else: Thread(target=_delete, args=(path, src, link)).start() deleted_files.append(path) if link: linked_files.append(path) else: errors.append("Not identified by file: \"{}\"".format(path)) return {"preserved": src, "linked_files": linked_files, "deleted_files": deleted_files, "errors": errors} # Try The Voight-Kampff if you not recognize if is a replicant or not, all is suspect def manager(duplicates, create_link=False): if len(duplicates) == 0: return None processed_files = [] for files_by_hash in duplicates.values(): processed_files.append(manager_files(files_by_hash, create_link)) return processed_files def delete(duplicates): return manager(duplicates) def link(duplicates): return manager(duplicates, True)
Fix in link action. Remove the `_link` method and add action to `_delete`, this fix concurrency problems.
Fix in link action. Remove the `_link` method and add action to `_delete`, this fix concurrency problems. Signed-off-by: messiasthi <[email protected]>
Python
mit
messiasthi/dduplicated-cli
import os from threading import Thread - def _delete(path): + def _delete(path: str, src: str, link: bool): os.remove(path) + if link: - - def _link(src, path): - os.symlink(src, path) + os.symlink(src, path) def manager_files(paths, link): # The first file is preserved to not delete all files in directories. first = True src = "" deleted_files = [] linked_files = [] errors = [] for path in paths: if os.path.isfile(path): if first: first = False src = path else: + - Thread(target=_delete, args=(path)).start() + Thread(target=_delete, args=(path, src, link)).start() deleted_files.append(path) if link: - Thread(target=_link, args=(src, path)).start() linked_files.append(path) + else: errors.append("Not identified by file: \"{}\"".format(path)) return {"preserved": src, "linked_files": linked_files, "deleted_files": deleted_files, "errors": errors} # Try The Voight-Kampff if you not recognize if is a replicant or not, all is suspect def manager(duplicates, create_link=False): if len(duplicates) == 0: return None processed_files = [] for files_by_hash in duplicates.values(): processed_files.append(manager_files(files_by_hash, create_link)) return processed_files def delete(duplicates): return manager(duplicates) def link(duplicates): return manager(duplicates, True)
Fix in link action. Remove the `_link` method and add action to `_delete`, this fix concurrency problems.
## Code Before: import os from threading import Thread def _delete(path): os.remove(path) def _link(src, path): os.symlink(src, path) def manager_files(paths, link): # The first file is preserved to not delete all files in directories. first = True src = "" deleted_files = [] linked_files = [] errors = [] for path in paths: if os.path.isfile(path): if first: first = False src = path else: Thread(target=_delete, args=(path)).start() deleted_files.append(path) if link: Thread(target=_link, args=(src, path)).start() linked_files.append(path) else: errors.append("Not identified by file: \"{}\"".format(path)) return {"preserved": src, "linked_files": linked_files, "deleted_files": deleted_files, "errors": errors} # Try The Voight-Kampff if you not recognize if is a replicant or not, all is suspect def manager(duplicates, create_link=False): if len(duplicates) == 0: return None processed_files = [] for files_by_hash in duplicates.values(): processed_files.append(manager_files(files_by_hash, create_link)) return processed_files def delete(duplicates): return manager(duplicates) def link(duplicates): return manager(duplicates, True) ## Instruction: Fix in link action. Remove the `_link` method and add action to `_delete`, this fix concurrency problems. ## Code After: import os from threading import Thread def _delete(path: str, src: str, link: bool): os.remove(path) if link: os.symlink(src, path) def manager_files(paths, link): # The first file is preserved to not delete all files in directories. first = True src = "" deleted_files = [] linked_files = [] errors = [] for path in paths: if os.path.isfile(path): if first: first = False src = path else: Thread(target=_delete, args=(path, src, link)).start() deleted_files.append(path) if link: linked_files.append(path) else: errors.append("Not identified by file: \"{}\"".format(path)) return {"preserved": src, "linked_files": linked_files, "deleted_files": deleted_files, "errors": errors} # Try The Voight-Kampff if you not recognize if is a replicant or not, all is suspect def manager(duplicates, create_link=False): if len(duplicates) == 0: return None processed_files = [] for files_by_hash in duplicates.values(): processed_files.append(manager_files(files_by_hash, create_link)) return processed_files def delete(duplicates): return manager(duplicates) def link(duplicates): return manager(duplicates, True)
... def _delete(path: str, src: str, link: bool): os.remove(path) if link: os.symlink(src, path) ... else: Thread(target=_delete, args=(path, src, link)).start() deleted_files.append(path) ... if link: linked_files.append(path) else: ...
9f82fe03a38d9eaf4ccd22f2ee6d13907bc3b42e
relay_api/api/server.py
relay_api/api/server.py
from flask import Flask, jsonify server = Flask(__name__) def get_relays(relays): return jsonify({"relays": relays}), 200 def get_relay(relays, relay_name): code = 200 try: relay = relays[relay_name] except KeyError: code = 404 return "", code return jsonify({"relay": relay}), code
from flask import Flask, jsonify # import json server = Flask(__name__) def __serialize_relay(relays): if type(relays).__name__ == "relay": return jsonify({"gpio": relays.gpio, "NC": relays.nc, "state": relays.state}) di = {} for r in relays: di[r] = {"gpio": relays[r].gpio, "NC": relays[r].nc, "state": relays[r].state} return jsonify(di) def get_relays(relays_dict): return __serialize_relay(relays_dict), 200 def get_relay(relay): code = 200 if not relay: code = 404 return "", code return __serialize_relay(relay), code
Change to get a dict with the relay instances
Change to get a dict with the relay instances
Python
mit
pahumadad/raspi-relay-api
from flask import Flask, jsonify + # import json server = Flask(__name__) - def get_relays(relays): + def __serialize_relay(relays): - return jsonify({"relays": relays}), 200 + if type(relays).__name__ == "relay": + return jsonify({"gpio": relays.gpio, + "NC": relays.nc, + "state": relays.state}) + di = {} + for r in relays: + di[r] = {"gpio": relays[r].gpio, + "NC": relays[r].nc, + "state": relays[r].state} + return jsonify(di) + def get_relays(relays_dict): + return __serialize_relay(relays_dict), 200 + + - def get_relay(relays, relay_name): + def get_relay(relay): code = 200 + if not relay: - try: - relay = relays[relay_name] - except KeyError: code = 404 return "", code + return __serialize_relay(relay), code - return jsonify({"relay": relay}), code -
Change to get a dict with the relay instances
## Code Before: from flask import Flask, jsonify server = Flask(__name__) def get_relays(relays): return jsonify({"relays": relays}), 200 def get_relay(relays, relay_name): code = 200 try: relay = relays[relay_name] except KeyError: code = 404 return "", code return jsonify({"relay": relay}), code ## Instruction: Change to get a dict with the relay instances ## Code After: from flask import Flask, jsonify # import json server = Flask(__name__) def __serialize_relay(relays): if type(relays).__name__ == "relay": return jsonify({"gpio": relays.gpio, "NC": relays.nc, "state": relays.state}) di = {} for r in relays: di[r] = {"gpio": relays[r].gpio, "NC": relays[r].nc, "state": relays[r].state} return jsonify(di) def get_relays(relays_dict): return __serialize_relay(relays_dict), 200 def get_relay(relay): code = 200 if not relay: code = 404 return "", code return __serialize_relay(relay), code
# ... existing code ... from flask import Flask, jsonify # import json # ... modified code ... def __serialize_relay(relays): if type(relays).__name__ == "relay": return jsonify({"gpio": relays.gpio, "NC": relays.nc, "state": relays.state}) di = {} for r in relays: di[r] = {"gpio": relays[r].gpio, "NC": relays[r].nc, "state": relays[r].state} return jsonify(di) ... def get_relays(relays_dict): return __serialize_relay(relays_dict), 200 def get_relay(relay): code = 200 if not relay: code = 404 ... return "", code return __serialize_relay(relay), code # ... rest of the code ...
186a72b91798b11d13ea7f2538141f620b0787a8
tests/test_metrics.py
tests/test_metrics.py
import json from . import TestCase class MetricsTests(TestCase): def test_find(self): url = '/metrics/find' response = self.app.get(url) self.assertEqual(response.status_code, 400) response = self.app.get(url, query_string={'query': 'test'}) self.assertJSON(response, []) def test_expand(self): url = '/metrics/expand' response = self.app.get(url) self.assertJSON(response, {'errors': {'query': 'this parameter is required.'}}, status_code=400) response = self.app.get(url, query_string={'query': 'test'}) self.assertEqual(response.status_code, 200) self.assertEqual(json.loads(response.data.decode('utf-8')), {'results': []})
from . import TestCase class MetricsTests(TestCase): def test_find(self): url = '/metrics/find' response = self.app.get(url) self.assertEqual(response.status_code, 400) response = self.app.get(url, query_string={'query': 'test'}) self.assertJSON(response, []) def test_expand(self): url = '/metrics/expand' response = self.app.get(url) self.assertJSON(response, {'errors': {'query': 'this parameter is required.'}}, status_code=400) response = self.app.get(url, query_string={'query': 'test'}) self.assertJSON(response, {'results': []}) def test_noop(self): url = '/dashboard/find' response = self.app.get(url) self.assertJSON(response, {'dashboards': []}) url = '/dashboard/load/foo' response = self.app.get(url) self.assertJSON(response, {'error': "Dashboard 'foo' does not exist."}, status_code=404) url = '/events/get_data' response = self.app.get(url) self.assertJSON(response, [])
Add test for noop routes
Add test for noop routes
Python
apache-2.0
vladimir-smirnov-sociomantic/graphite-api,michaelrice/graphite-api,GeorgeJahad/graphite-api,vladimir-smirnov-sociomantic/graphite-api,michaelrice/graphite-api,alphapigger/graphite-api,raintank/graphite-api,hubrick/graphite-api,rackerlabs/graphite-api,Knewton/graphite-api,raintank/graphite-api,Knewton/graphite-api,bogus-py/graphite-api,cybem/graphite-api-iow,DaveBlooman/graphite-api,rackerlabs/graphite-api,brutasse/graphite-api,DaveBlooman/graphite-api,hubrick/graphite-api,raintank/graphite-api,tpeng/graphite-api,winguru/graphite-api,winguru/graphite-api,bogus-py/graphite-api,tpeng/graphite-api,cybem/graphite-api-iow,absalon-james/graphite-api,alphapigger/graphite-api,absalon-james/graphite-api,brutasse/graphite-api,GeorgeJahad/graphite-api
- import json - from . import TestCase class MetricsTests(TestCase): def test_find(self): url = '/metrics/find' response = self.app.get(url) self.assertEqual(response.status_code, 400) response = self.app.get(url, query_string={'query': 'test'}) self.assertJSON(response, []) def test_expand(self): url = '/metrics/expand' response = self.app.get(url) self.assertJSON(response, {'errors': {'query': 'this parameter is required.'}}, status_code=400) response = self.app.get(url, query_string={'query': 'test'}) + self.assertJSON(response, {'results': []}) - self.assertEqual(response.status_code, 200) - self.assertEqual(json.loads(response.data.decode('utf-8')), - {'results': []}) + def test_noop(self): + url = '/dashboard/find' + response = self.app.get(url) + self.assertJSON(response, {'dashboards': []}) + + url = '/dashboard/load/foo' + response = self.app.get(url) + self.assertJSON(response, {'error': "Dashboard 'foo' does not exist."}, + status_code=404) + + url = '/events/get_data' + response = self.app.get(url) + self.assertJSON(response, []) +
Add test for noop routes
## Code Before: import json from . import TestCase class MetricsTests(TestCase): def test_find(self): url = '/metrics/find' response = self.app.get(url) self.assertEqual(response.status_code, 400) response = self.app.get(url, query_string={'query': 'test'}) self.assertJSON(response, []) def test_expand(self): url = '/metrics/expand' response = self.app.get(url) self.assertJSON(response, {'errors': {'query': 'this parameter is required.'}}, status_code=400) response = self.app.get(url, query_string={'query': 'test'}) self.assertEqual(response.status_code, 200) self.assertEqual(json.loads(response.data.decode('utf-8')), {'results': []}) ## Instruction: Add test for noop routes ## Code After: from . import TestCase class MetricsTests(TestCase): def test_find(self): url = '/metrics/find' response = self.app.get(url) self.assertEqual(response.status_code, 400) response = self.app.get(url, query_string={'query': 'test'}) self.assertJSON(response, []) def test_expand(self): url = '/metrics/expand' response = self.app.get(url) self.assertJSON(response, {'errors': {'query': 'this parameter is required.'}}, status_code=400) response = self.app.get(url, query_string={'query': 'test'}) self.assertJSON(response, {'results': []}) def test_noop(self): url = '/dashboard/find' response = self.app.get(url) self.assertJSON(response, {'dashboards': []}) url = '/dashboard/load/foo' response = self.app.get(url) self.assertJSON(response, {'error': "Dashboard 'foo' does not exist."}, status_code=404) url = '/events/get_data' response = self.app.get(url) self.assertJSON(response, [])
... from . import TestCase ... response = self.app.get(url, query_string={'query': 'test'}) self.assertJSON(response, {'results': []}) def test_noop(self): url = '/dashboard/find' response = self.app.get(url) self.assertJSON(response, {'dashboards': []}) url = '/dashboard/load/foo' response = self.app.get(url) self.assertJSON(response, {'error': "Dashboard 'foo' does not exist."}, status_code=404) url = '/events/get_data' response = self.app.get(url) self.assertJSON(response, []) ...
375513808e3fa83ff23de942aeedbd0d9cc4d1c2
tests/test_h5py.py
tests/test_h5py.py
import h5py import bitshuffle.h5 import numpy import tempfile def test_is_h5py_correctly_installed(): """ If this test fails you probably need to install h5py from source manually: $ pip install --no-binary=h5py h5py """ f = h5py.File(tempfile.gettempdir() + '/h5testfile', "w") block_size = 0 dataset = f.create_dataset( "data", (100, 100, 100), compression=bitshuffle.h5.H5FILTER, compression_opts=(block_size, bitshuffle.h5.H5_COMPRESS_LZ4), dtype='float32', ) array = numpy.random.rand(100, 100, 100) array = array.astype('float32') dataset[:] = array f.close()
import h5py import hdf5plugin import numpy import tempfile def test_is_h5py_correctly_installed(): """ If this test fails you probably need to install h5py from source manually: $ pip install --no-binary=h5py h5py """ f = h5py.File(tempfile.gettempdir() + '/h5testfile', "w") block_size = 0 dataset = f.create_dataset( "data", (100, 100, 100), dtype='float32', **hdf5plugin.Bitshuffle(nelems=0, lz4=True) ) array = numpy.random.rand(100, 100, 100) array = array.astype('float32') dataset[:] = array f.close()
Change bitshuffle call to hdf5plugin
Change bitshuffle call to hdf5plugin
Python
bsd-3-clause
UCBerkeleySETI/blimpy,UCBerkeleySETI/blimpy
import h5py - import bitshuffle.h5 + import hdf5plugin import numpy import tempfile def test_is_h5py_correctly_installed(): """ If this test fails you probably need to install h5py from source manually: $ pip install --no-binary=h5py h5py """ f = h5py.File(tempfile.gettempdir() + '/h5testfile', "w") block_size = 0 dataset = f.create_dataset( "data", (100, 100, 100), - compression=bitshuffle.h5.H5FILTER, - compression_opts=(block_size, bitshuffle.h5.H5_COMPRESS_LZ4), dtype='float32', + **hdf5plugin.Bitshuffle(nelems=0, lz4=True) ) array = numpy.random.rand(100, 100, 100) array = array.astype('float32') dataset[:] = array f.close()
Change bitshuffle call to hdf5plugin
## Code Before: import h5py import bitshuffle.h5 import numpy import tempfile def test_is_h5py_correctly_installed(): """ If this test fails you probably need to install h5py from source manually: $ pip install --no-binary=h5py h5py """ f = h5py.File(tempfile.gettempdir() + '/h5testfile', "w") block_size = 0 dataset = f.create_dataset( "data", (100, 100, 100), compression=bitshuffle.h5.H5FILTER, compression_opts=(block_size, bitshuffle.h5.H5_COMPRESS_LZ4), dtype='float32', ) array = numpy.random.rand(100, 100, 100) array = array.astype('float32') dataset[:] = array f.close() ## Instruction: Change bitshuffle call to hdf5plugin ## Code After: import h5py import hdf5plugin import numpy import tempfile def test_is_h5py_correctly_installed(): """ If this test fails you probably need to install h5py from source manually: $ pip install --no-binary=h5py h5py """ f = h5py.File(tempfile.gettempdir() + '/h5testfile', "w") block_size = 0 dataset = f.create_dataset( "data", (100, 100, 100), dtype='float32', **hdf5plugin.Bitshuffle(nelems=0, lz4=True) ) array = numpy.random.rand(100, 100, 100) array = array.astype('float32') dataset[:] = array f.close()
// ... existing code ... import h5py import hdf5plugin import numpy // ... modified code ... (100, 100, 100), dtype='float32', **hdf5plugin.Bitshuffle(nelems=0, lz4=True) ) // ... rest of the code ...
c0df1342b6625cdc2a205f2ba13ee201e8d0b02a
tests/conftest.py
tests/conftest.py
from __future__ import absolute_import import pytest import os import mock import json import app.mapping with open(os.path.join(os.path.dirname(__file__), 'fixtures/mappings/services.json')) as f: _services_mapping_definition = json.load(f) @pytest.fixture(scope="function") def services_mapping(): """Provide a services mapping fixture, and patch it into the global singleton getter.""" mock_services_mapping_getter_patch = mock.patch('app.mapping.get_services_mapping') mock_services_mapping_getter = mock_services_mapping_getter_patch.start() mock_services_mapping_getter.return_value = app.mapping.Mapping(_services_mapping_definition, 'services') yield mock_services_mapping_getter.return_value mock_services_mapping_getter_patch.stop()
from __future__ import absolute_import import pytest import os import mock import json import app.mapping with open(os.path.join(os.path.dirname(__file__), 'fixtures/mappings/services.json')) as f: _services_mapping_definition = json.load(f) @pytest.fixture(scope="function") def services_mapping(): """Provide a services mapping fixture, and patch it into the global singleton getter.""" with mock.patch('app.mapping.get_services_mapping') as mock_services_mapping_getter: mock_services_mapping_getter.return_value = app.mapping.Mapping(_services_mapping_definition, 'services') yield mock_services_mapping_getter.return_value
Use with block to start/stop the patch context manager.
Use with block to start/stop the patch context manager. - this is less code, hopefully is just as clear why we need to 'yield' rather than just 'return'. https://trello.com/c/OpWI068M/380-after-g9-go-live-removal-of-old-filters-from-search-api-mapping
Python
mit
alphagov/digitalmarketplace-search-api,alphagov/digitalmarketplace-search-api
from __future__ import absolute_import import pytest import os import mock import json import app.mapping with open(os.path.join(os.path.dirname(__file__), 'fixtures/mappings/services.json')) as f: _services_mapping_definition = json.load(f) @pytest.fixture(scope="function") def services_mapping(): """Provide a services mapping fixture, and patch it into the global singleton getter.""" + with mock.patch('app.mapping.get_services_mapping') as mock_services_mapping_getter: - mock_services_mapping_getter_patch = mock.patch('app.mapping.get_services_mapping') - mock_services_mapping_getter = mock_services_mapping_getter_patch.start() - mock_services_mapping_getter.return_value = app.mapping.Mapping(_services_mapping_definition, 'services') + mock_services_mapping_getter.return_value = app.mapping.Mapping(_services_mapping_definition, 'services') + yield mock_services_mapping_getter.return_value - yield mock_services_mapping_getter.return_value - - mock_services_mapping_getter_patch.stop() -
Use with block to start/stop the patch context manager.
## Code Before: from __future__ import absolute_import import pytest import os import mock import json import app.mapping with open(os.path.join(os.path.dirname(__file__), 'fixtures/mappings/services.json')) as f: _services_mapping_definition = json.load(f) @pytest.fixture(scope="function") def services_mapping(): """Provide a services mapping fixture, and patch it into the global singleton getter.""" mock_services_mapping_getter_patch = mock.patch('app.mapping.get_services_mapping') mock_services_mapping_getter = mock_services_mapping_getter_patch.start() mock_services_mapping_getter.return_value = app.mapping.Mapping(_services_mapping_definition, 'services') yield mock_services_mapping_getter.return_value mock_services_mapping_getter_patch.stop() ## Instruction: Use with block to start/stop the patch context manager. ## Code After: from __future__ import absolute_import import pytest import os import mock import json import app.mapping with open(os.path.join(os.path.dirname(__file__), 'fixtures/mappings/services.json')) as f: _services_mapping_definition = json.load(f) @pytest.fixture(scope="function") def services_mapping(): """Provide a services mapping fixture, and patch it into the global singleton getter.""" with mock.patch('app.mapping.get_services_mapping') as mock_services_mapping_getter: mock_services_mapping_getter.return_value = app.mapping.Mapping(_services_mapping_definition, 'services') yield mock_services_mapping_getter.return_value
# ... existing code ... with mock.patch('app.mapping.get_services_mapping') as mock_services_mapping_getter: mock_services_mapping_getter.return_value = app.mapping.Mapping(_services_mapping_definition, 'services') yield mock_services_mapping_getter.return_value # ... rest of the code ...
75a27c416effd2958182b1401e49d6613a28857d
sana_builder/webapp/models.py
sana_builder/webapp/models.py
from django.db import models from django.contrib.auth.models import User class Procedure(models.Model): title = models.CharField(max_length=50) author = models.CharField(max_length=50) uuid = models.IntegerField(null=True) version = models.CharField(max_length=50, null=True) owner = models.ForeignKey(User, unique=True) class Page(models.Model): procedure = models.ForeignKey(Procedure)
from django.db import models from django.contrib.auth.models import User class Procedure(models.Model): title = models.CharField(max_length=50) author = models.CharField(max_length=50) uuid = models.IntegerField(null=True, unique=True) version = models.CharField(max_length=50, null=True) owner = models.ForeignKey(User, unique=True) class Page(models.Model): procedure = models.ForeignKey(Procedure)
Make uuid on procedures unique
Make uuid on procedures unique
Python
bsd-3-clause
SanaMobile/sana.protocol_builder,SanaMobile/sana.protocol_builder,SanaMobile/sana.protocol_builder,SanaMobile/sana.protocol_builder,SanaMobile/sana.protocol_builder
from django.db import models from django.contrib.auth.models import User class Procedure(models.Model): title = models.CharField(max_length=50) author = models.CharField(max_length=50) - uuid = models.IntegerField(null=True) + uuid = models.IntegerField(null=True, unique=True) version = models.CharField(max_length=50, null=True) owner = models.ForeignKey(User, unique=True) class Page(models.Model): procedure = models.ForeignKey(Procedure)
Make uuid on procedures unique
## Code Before: from django.db import models from django.contrib.auth.models import User class Procedure(models.Model): title = models.CharField(max_length=50) author = models.CharField(max_length=50) uuid = models.IntegerField(null=True) version = models.CharField(max_length=50, null=True) owner = models.ForeignKey(User, unique=True) class Page(models.Model): procedure = models.ForeignKey(Procedure) ## Instruction: Make uuid on procedures unique ## Code After: from django.db import models from django.contrib.auth.models import User class Procedure(models.Model): title = models.CharField(max_length=50) author = models.CharField(max_length=50) uuid = models.IntegerField(null=True, unique=True) version = models.CharField(max_length=50, null=True) owner = models.ForeignKey(User, unique=True) class Page(models.Model): procedure = models.ForeignKey(Procedure)
# ... existing code ... author = models.CharField(max_length=50) uuid = models.IntegerField(null=True, unique=True) version = models.CharField(max_length=50, null=True) # ... rest of the code ...
9729a77b9b8cbfe8a6960ded4b5931e3ed64fe10
discover/__init__.py
discover/__init__.py
import logging LOG_FORMAT = '%(asctime)s [%(name)s] %(levelname)s %(message)s' LOG_DATE = '%Y-%m-%d %I:%M:%S %p' logging.basicConfig(format=LOG_FORMAT, datefmt=LOG_DATE, level=logging.WARN) logger = logging.getLogger('yoda-discover') logger.level = logging.INFO
import logging LOG_FORMAT = '[%(name)s] %(levelname)s %(message)s' logging.basicConfig(format=LOG_FORMAT, level=logging.WARN) logger = logging.getLogger('yoda-discover') logger.level = logging.INFO
Remove date from log formatting (handled by syslog)
Remove date from log formatting (handled by syslog)
Python
mit
totem/yoda-discover
import logging - LOG_FORMAT = '%(asctime)s [%(name)s] %(levelname)s %(message)s' + LOG_FORMAT = '[%(name)s] %(levelname)s %(message)s' - LOG_DATE = '%Y-%m-%d %I:%M:%S %p' - - logging.basicConfig(format=LOG_FORMAT, datefmt=LOG_DATE, level=logging.WARN) + logging.basicConfig(format=LOG_FORMAT, level=logging.WARN) logger = logging.getLogger('yoda-discover') logger.level = logging.INFO
Remove date from log formatting (handled by syslog)
## Code Before: import logging LOG_FORMAT = '%(asctime)s [%(name)s] %(levelname)s %(message)s' LOG_DATE = '%Y-%m-%d %I:%M:%S %p' logging.basicConfig(format=LOG_FORMAT, datefmt=LOG_DATE, level=logging.WARN) logger = logging.getLogger('yoda-discover') logger.level = logging.INFO ## Instruction: Remove date from log formatting (handled by syslog) ## Code After: import logging LOG_FORMAT = '[%(name)s] %(levelname)s %(message)s' logging.basicConfig(format=LOG_FORMAT, level=logging.WARN) logger = logging.getLogger('yoda-discover') logger.level = logging.INFO
... LOG_FORMAT = '[%(name)s] %(levelname)s %(message)s' logging.basicConfig(format=LOG_FORMAT, level=logging.WARN) logger = logging.getLogger('yoda-discover') ...
ae55577e4cea64a0052eb0c219641435c9c0210c
samples/model-builder/init_sample.py
samples/model-builder/init_sample.py
from typing import Optional from google.auth import credentials as auth_credentials from google.cloud import aiplatform # [START aiplatform_sdk_init_sample] def init_sample( project: Optional[str] = None, location: Optional[str] = None, experiment: Optional[str] = None, staging_bucket: Optional[str] = None, credentials: Optional[auth_credentials.Credentials] = None, encryption_spec_key_name: Optional[str] = None, ): aiplatform.init( project=project, location=location, experiment=experiment, staging_bucket=staging_bucket, credentials=credentials, encryption_spec_key_name=encryption_spec_key_name, ) # [END aiplatform_sdk_init_sample]
from typing import Optional from google.auth import credentials as auth_credentials # [START aiplatform_sdk_init_sample] def init_sample( project: Optional[str] = None, location: Optional[str] = None, experiment: Optional[str] = None, staging_bucket: Optional[str] = None, credentials: Optional[auth_credentials.Credentials] = None, encryption_spec_key_name: Optional[str] = None, ): from google.cloud import aiplatform aiplatform.init( project=project, location=location, experiment=experiment, staging_bucket=staging_bucket, credentials=credentials, encryption_spec_key_name=encryption_spec_key_name, ) # [END aiplatform_sdk_init_sample]
Update init sample to import inside of function.
chore: Update init sample to import inside of function. PiperOrigin-RevId: 485079470
Python
apache-2.0
googleapis/python-aiplatform,googleapis/python-aiplatform
from typing import Optional from google.auth import credentials as auth_credentials - from google.cloud import aiplatform # [START aiplatform_sdk_init_sample] def init_sample( project: Optional[str] = None, location: Optional[str] = None, experiment: Optional[str] = None, staging_bucket: Optional[str] = None, credentials: Optional[auth_credentials.Credentials] = None, encryption_spec_key_name: Optional[str] = None, ): + + from google.cloud import aiplatform + aiplatform.init( project=project, location=location, experiment=experiment, staging_bucket=staging_bucket, credentials=credentials, encryption_spec_key_name=encryption_spec_key_name, ) # [END aiplatform_sdk_init_sample]
Update init sample to import inside of function.
## Code Before: from typing import Optional from google.auth import credentials as auth_credentials from google.cloud import aiplatform # [START aiplatform_sdk_init_sample] def init_sample( project: Optional[str] = None, location: Optional[str] = None, experiment: Optional[str] = None, staging_bucket: Optional[str] = None, credentials: Optional[auth_credentials.Credentials] = None, encryption_spec_key_name: Optional[str] = None, ): aiplatform.init( project=project, location=location, experiment=experiment, staging_bucket=staging_bucket, credentials=credentials, encryption_spec_key_name=encryption_spec_key_name, ) # [END aiplatform_sdk_init_sample] ## Instruction: Update init sample to import inside of function. ## Code After: from typing import Optional from google.auth import credentials as auth_credentials # [START aiplatform_sdk_init_sample] def init_sample( project: Optional[str] = None, location: Optional[str] = None, experiment: Optional[str] = None, staging_bucket: Optional[str] = None, credentials: Optional[auth_credentials.Credentials] = None, encryption_spec_key_name: Optional[str] = None, ): from google.cloud import aiplatform aiplatform.init( project=project, location=location, experiment=experiment, staging_bucket=staging_bucket, credentials=credentials, encryption_spec_key_name=encryption_spec_key_name, ) # [END aiplatform_sdk_init_sample]
# ... existing code ... from google.auth import credentials as auth_credentials # ... modified code ... ): from google.cloud import aiplatform aiplatform.init( # ... rest of the code ...
ff4b34cda7c0b5bc516d9f9e3689818000301336
tests/test_planner.py
tests/test_planner.py
import cutplanner import unittest class TestPlanner(unittest.TestCase): def setUp(self): sizes = [80, 120] needed = [10, 25, 75] loss = 0.25 self.planner = cutplanner.Planner(sizes, needed, loss) def test_get_largest(self): largest = self.planner.get_largest_stock() self.assertEqual(largest, 120) if __name__ == '__main__': unittest.main()
import cutplanner import unittest class TestPlanner(unittest.TestCase): def setUp(self): sizes = [80, 120] needed = [10, 25, 75] loss = 0.25 self.planner = cutplanner.Planner(sizes, needed, loss) def test_largest_stock(self): largest = self.planner.largest_stock self.assertEqual(largest, 120) if __name__ == '__main__': unittest.main()
Update test for largest stock
Update test for largest stock
Python
mit
alanc10n/py-cutplanner
import cutplanner import unittest class TestPlanner(unittest.TestCase): def setUp(self): sizes = [80, 120] needed = [10, 25, 75] loss = 0.25 self.planner = cutplanner.Planner(sizes, needed, loss) - def test_get_largest(self): + def test_largest_stock(self): - largest = self.planner.get_largest_stock() + largest = self.planner.largest_stock self.assertEqual(largest, 120) if __name__ == '__main__': unittest.main()
Update test for largest stock
## Code Before: import cutplanner import unittest class TestPlanner(unittest.TestCase): def setUp(self): sizes = [80, 120] needed = [10, 25, 75] loss = 0.25 self.planner = cutplanner.Planner(sizes, needed, loss) def test_get_largest(self): largest = self.planner.get_largest_stock() self.assertEqual(largest, 120) if __name__ == '__main__': unittest.main() ## Instruction: Update test for largest stock ## Code After: import cutplanner import unittest class TestPlanner(unittest.TestCase): def setUp(self): sizes = [80, 120] needed = [10, 25, 75] loss = 0.25 self.planner = cutplanner.Planner(sizes, needed, loss) def test_largest_stock(self): largest = self.planner.largest_stock self.assertEqual(largest, 120) if __name__ == '__main__': unittest.main()
// ... existing code ... def test_largest_stock(self): largest = self.planner.largest_stock self.assertEqual(largest, 120) // ... rest of the code ...
d1a2a4c2ee7fda2bfde369bb6311719e72c75a3d
corehq/blobs/tasks.py
corehq/blobs/tasks.py
from __future__ import absolute_import from datetime import datetime from celery.task import periodic_task from celery.schedules import crontab from corehq.util.datadog.gauges import datadog_counter from corehq.blobs.models import BlobExpiration from corehq.blobs import get_blob_db @periodic_task(run_every=crontab(minute=0, hour='0,12')) def delete_expired_blobs(): blob_expirations = BlobExpiration.objects.filter(expires_on__lt=_utcnow(), deleted=False) db = get_blob_db() paths = [] bytes_deleted = 0 for blob_expiration in blob_expirations: paths.append(db.get_path(blob_expiration.identifier, blob_expiration.bucket)) bytes_deleted += blob_expiration.length db.bulk_delete(paths) blob_expirations.update(deleted=True) datadog_counter( 'commcare.temp_blobs.bytes_deleted', value=bytes_deleted, ) return bytes_deleted def _utcnow(): return datetime.utcnow()
from __future__ import absolute_import from datetime import datetime from celery.task import periodic_task from celery.schedules import crontab from corehq.util.datadog.gauges import datadog_counter from corehq.blobs.models import BlobExpiration from corehq.blobs import get_blob_db @periodic_task(run_every=crontab(minute=0, hour='0,12')) def delete_expired_blobs(): blob_expirations = BlobExpiration.objects.filter(expires_on__lt=_utcnow(), deleted=False) db = get_blob_db() total_bytes_deleted = 0 while blob_expirations.exists(): paths = [] deleted_ids = [] bytes_deleted = 0 for blob_expiration in blob_expirations[:1000]: paths.append(db.get_path(blob_expiration.identifier, blob_expiration.bucket)) deleted_ids.append(blob_expiration.id) bytes_deleted += blob_expiration.length db.bulk_delete(paths) BlobExpiration.objects.filter(id__in=deleted_ids).update(deleted=True) datadog_counter( 'commcare.temp_blobs.bytes_deleted', value=bytes_deleted, ) total_bytes_deleted += bytes_deleted return total_bytes_deleted def _utcnow(): return datetime.utcnow()
Delete expired blobs in batches
Delete expired blobs in batches
Python
bsd-3-clause
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
from __future__ import absolute_import from datetime import datetime from celery.task import periodic_task from celery.schedules import crontab from corehq.util.datadog.gauges import datadog_counter from corehq.blobs.models import BlobExpiration from corehq.blobs import get_blob_db @periodic_task(run_every=crontab(minute=0, hour='0,12')) def delete_expired_blobs(): blob_expirations = BlobExpiration.objects.filter(expires_on__lt=_utcnow(), deleted=False) db = get_blob_db() + total_bytes_deleted = 0 + while blob_expirations.exists(): - paths = [] + paths = [] + deleted_ids = [] - bytes_deleted = 0 + bytes_deleted = 0 - for blob_expiration in blob_expirations: + for blob_expiration in blob_expirations[:1000]: - paths.append(db.get_path(blob_expiration.identifier, blob_expiration.bucket)) + paths.append(db.get_path(blob_expiration.identifier, blob_expiration.bucket)) + deleted_ids.append(blob_expiration.id) - bytes_deleted += blob_expiration.length + bytes_deleted += blob_expiration.length - db.bulk_delete(paths) + db.bulk_delete(paths) - blob_expirations.update(deleted=True) + BlobExpiration.objects.filter(id__in=deleted_ids).update(deleted=True) - datadog_counter( + datadog_counter( - 'commcare.temp_blobs.bytes_deleted', + 'commcare.temp_blobs.bytes_deleted', - value=bytes_deleted, + value=bytes_deleted, - ) + ) + total_bytes_deleted += bytes_deleted + - return bytes_deleted + return total_bytes_deleted def _utcnow(): return datetime.utcnow()
Delete expired blobs in batches
## Code Before: from __future__ import absolute_import from datetime import datetime from celery.task import periodic_task from celery.schedules import crontab from corehq.util.datadog.gauges import datadog_counter from corehq.blobs.models import BlobExpiration from corehq.blobs import get_blob_db @periodic_task(run_every=crontab(minute=0, hour='0,12')) def delete_expired_blobs(): blob_expirations = BlobExpiration.objects.filter(expires_on__lt=_utcnow(), deleted=False) db = get_blob_db() paths = [] bytes_deleted = 0 for blob_expiration in blob_expirations: paths.append(db.get_path(blob_expiration.identifier, blob_expiration.bucket)) bytes_deleted += blob_expiration.length db.bulk_delete(paths) blob_expirations.update(deleted=True) datadog_counter( 'commcare.temp_blobs.bytes_deleted', value=bytes_deleted, ) return bytes_deleted def _utcnow(): return datetime.utcnow() ## Instruction: Delete expired blobs in batches ## Code After: from __future__ import absolute_import from datetime import datetime from celery.task import periodic_task from celery.schedules import crontab from corehq.util.datadog.gauges import datadog_counter from corehq.blobs.models import BlobExpiration from corehq.blobs import get_blob_db @periodic_task(run_every=crontab(minute=0, hour='0,12')) def delete_expired_blobs(): blob_expirations = BlobExpiration.objects.filter(expires_on__lt=_utcnow(), deleted=False) db = get_blob_db() total_bytes_deleted = 0 while blob_expirations.exists(): paths = [] deleted_ids = [] bytes_deleted = 0 for blob_expiration in blob_expirations[:1000]: paths.append(db.get_path(blob_expiration.identifier, blob_expiration.bucket)) deleted_ids.append(blob_expiration.id) bytes_deleted += blob_expiration.length db.bulk_delete(paths) BlobExpiration.objects.filter(id__in=deleted_ids).update(deleted=True) datadog_counter( 'commcare.temp_blobs.bytes_deleted', value=bytes_deleted, ) total_bytes_deleted += bytes_deleted return total_bytes_deleted def _utcnow(): return datetime.utcnow()
# ... existing code ... db = get_blob_db() total_bytes_deleted = 0 while blob_expirations.exists(): paths = [] deleted_ids = [] bytes_deleted = 0 for blob_expiration in blob_expirations[:1000]: paths.append(db.get_path(blob_expiration.identifier, blob_expiration.bucket)) deleted_ids.append(blob_expiration.id) bytes_deleted += blob_expiration.length db.bulk_delete(paths) BlobExpiration.objects.filter(id__in=deleted_ids).update(deleted=True) datadog_counter( 'commcare.temp_blobs.bytes_deleted', value=bytes_deleted, ) total_bytes_deleted += bytes_deleted return total_bytes_deleted # ... rest of the code ...
7ddb1b3d0139ef8b6a7badcb2c6bef6a0e35e88a
hooks/post_gen_project.py
hooks/post_gen_project.py
import os package_dir = '{{cookiecutter.repo_name}}' old_kv_file = os.path.join(package_dir, '{{cookiecutter.app_class_name}}.kv') lower_app_class_name = '{{cookiecutter.app_class_name}}'.lower() if (lower_app_class_name.endswith('app')): lower_app_class_name = lower_app_class_name[:-3] new_kv_file = os.path.join(package_dir, '{}.kv'.format(lower_app_class_name)) os.rename(old_kv_file, new_kv_file)
def rename_kv_file(): """Rename the generated kv file to be compatible with the original kivy kv file detection of `App.load_kv`. """ import os package_dir = '{{cookiecutter.repo_name}}' old_kv_file = os.path.join( package_dir, '{{cookiecutter.app_class_name}}.kv' ) lower_app_class_name = '{{cookiecutter.app_class_name}}'.lower() if (lower_app_class_name.endswith('app')): lower_app_class_name = lower_app_class_name[:-3] new_kv_file = os.path.join( package_dir, '{}.kv'.format(lower_app_class_name) ) os.rename(old_kv_file, new_kv_file) rename_kv_file()
Use a function to rename the kv file in hooks
Use a function to rename the kv file in hooks
Python
mit
hackebrot/cookiedozer,hackebrot/cookiedozer
- - import os + def rename_kv_file(): + """Rename the generated kv file to be compatible with the original kivy kv + file detection of `App.load_kv`. + """ + import os - package_dir = '{{cookiecutter.repo_name}}' + package_dir = '{{cookiecutter.repo_name}}' - old_kv_file = os.path.join(package_dir, '{{cookiecutter.app_class_name}}.kv') + old_kv_file = os.path.join( + package_dir, '{{cookiecutter.app_class_name}}.kv' + ) - lower_app_class_name = '{{cookiecutter.app_class_name}}'.lower() + lower_app_class_name = '{{cookiecutter.app_class_name}}'.lower() - if (lower_app_class_name.endswith('app')): + if (lower_app_class_name.endswith('app')): - lower_app_class_name = lower_app_class_name[:-3] + lower_app_class_name = lower_app_class_name[:-3] - new_kv_file = os.path.join(package_dir, '{}.kv'.format(lower_app_class_name)) + new_kv_file = os.path.join( + package_dir, '{}.kv'.format(lower_app_class_name) + ) - os.rename(old_kv_file, new_kv_file) + os.rename(old_kv_file, new_kv_file) + + rename_kv_file() +
Use a function to rename the kv file in hooks
## Code Before: import os package_dir = '{{cookiecutter.repo_name}}' old_kv_file = os.path.join(package_dir, '{{cookiecutter.app_class_name}}.kv') lower_app_class_name = '{{cookiecutter.app_class_name}}'.lower() if (lower_app_class_name.endswith('app')): lower_app_class_name = lower_app_class_name[:-3] new_kv_file = os.path.join(package_dir, '{}.kv'.format(lower_app_class_name)) os.rename(old_kv_file, new_kv_file) ## Instruction: Use a function to rename the kv file in hooks ## Code After: def rename_kv_file(): """Rename the generated kv file to be compatible with the original kivy kv file detection of `App.load_kv`. """ import os package_dir = '{{cookiecutter.repo_name}}' old_kv_file = os.path.join( package_dir, '{{cookiecutter.app_class_name}}.kv' ) lower_app_class_name = '{{cookiecutter.app_class_name}}'.lower() if (lower_app_class_name.endswith('app')): lower_app_class_name = lower_app_class_name[:-3] new_kv_file = os.path.join( package_dir, '{}.kv'.format(lower_app_class_name) ) os.rename(old_kv_file, new_kv_file) rename_kv_file()
# ... existing code ... # ... modified code ... def rename_kv_file(): """Rename the generated kv file to be compatible with the original kivy kv file detection of `App.load_kv`. """ import os package_dir = '{{cookiecutter.repo_name}}' old_kv_file = os.path.join( package_dir, '{{cookiecutter.app_class_name}}.kv' ) lower_app_class_name = '{{cookiecutter.app_class_name}}'.lower() if (lower_app_class_name.endswith('app')): lower_app_class_name = lower_app_class_name[:-3] new_kv_file = os.path.join( package_dir, '{}.kv'.format(lower_app_class_name) ) os.rename(old_kv_file, new_kv_file) rename_kv_file() # ... rest of the code ...
c9cc5585e030951a09687c6a61a489ec51f83446
cr2/plotter/__init__.py
cr2/plotter/__init__.py
"""Init Module for the Plotter Code""" import pandas as pd from LinePlot import LinePlot
"""Init Module for the Plotter Code""" import pandas as pd from LinePlot import LinePlot import AttrConf def register_forwarding_arg(arg_name): """Allows the user to register args to be forwarded to matplotlib """ if arg_name not in AttrConf.ARGS_TO_FORWARD: AttrConf.ARGS_TO_FORWARD.append(arg_name) def unregister_forwarding_arg(arg_name): """Unregisters arg_name from being passed to plotter matplotlib calls """ try: AttrConf.ARGS_TO_FORWARD.remove(arg_name) except ValueError: pass
Enable user specified arg forwarding to matplotlib
plotter: Enable user specified arg forwarding to matplotlib This change allows the user to register args for forwarding to matplotlib and also unregister the same. Change-Id: If53dab43dd4a2f530b3d1faf35582206ac925740 Signed-off-by: Kapileshwar Singh <[email protected]>
Python
apache-2.0
JaviMerino/trappy,joelagnel/trappy,bjackman/trappy,derkling/trappy,ARM-software/trappy,sinkap/trappy,JaviMerino/trappy,joelagnel/trappy,ARM-software/trappy,derkling/trappy,bjackman/trappy,sinkap/trappy,ARM-software/trappy,ARM-software/trappy,bjackman/trappy,sinkap/trappy,joelagnel/trappy,sinkap/trappy,JaviMerino/trappy,bjackman/trappy,derkling/trappy,joelagnel/trappy
"""Init Module for the Plotter Code""" import pandas as pd from LinePlot import LinePlot + import AttrConf + + def register_forwarding_arg(arg_name): + """Allows the user to register args to + be forwarded to matplotlib + """ + if arg_name not in AttrConf.ARGS_TO_FORWARD: + AttrConf.ARGS_TO_FORWARD.append(arg_name) + + def unregister_forwarding_arg(arg_name): + """Unregisters arg_name from being passed to + plotter matplotlib calls + """ + try: + AttrConf.ARGS_TO_FORWARD.remove(arg_name) + except ValueError: + pass +
Enable user specified arg forwarding to matplotlib
## Code Before: """Init Module for the Plotter Code""" import pandas as pd from LinePlot import LinePlot ## Instruction: Enable user specified arg forwarding to matplotlib ## Code After: """Init Module for the Plotter Code""" import pandas as pd from LinePlot import LinePlot import AttrConf def register_forwarding_arg(arg_name): """Allows the user to register args to be forwarded to matplotlib """ if arg_name not in AttrConf.ARGS_TO_FORWARD: AttrConf.ARGS_TO_FORWARD.append(arg_name) def unregister_forwarding_arg(arg_name): """Unregisters arg_name from being passed to plotter matplotlib calls """ try: AttrConf.ARGS_TO_FORWARD.remove(arg_name) except ValueError: pass
... from LinePlot import LinePlot import AttrConf def register_forwarding_arg(arg_name): """Allows the user to register args to be forwarded to matplotlib """ if arg_name not in AttrConf.ARGS_TO_FORWARD: AttrConf.ARGS_TO_FORWARD.append(arg_name) def unregister_forwarding_arg(arg_name): """Unregisters arg_name from being passed to plotter matplotlib calls """ try: AttrConf.ARGS_TO_FORWARD.remove(arg_name) except ValueError: pass ...
21651120925cc3e51aeada4eac4dbfaa5bf98fae
src/header_filter/__init__.py
src/header_filter/__init__.py
from header_filter.matchers import Header # noqa: F401 from header_filter.middleware import HeaderFilterMiddleware # noqa: F401 from header_filter.rules import Enforce, Forbid # noqa: F401
from header_filter.matchers import Header, HeaderRegexp # noqa: F401 from header_filter.middleware import HeaderFilterMiddleware # noqa: F401 from header_filter.rules import Enforce, Forbid # noqa: F401
Allow HeaderRegexp to be imported directly from header_filter package.
Allow HeaderRegexp to be imported directly from header_filter package.
Python
mit
sanjioh/django-header-filter
- from header_filter.matchers import Header # noqa: F401 + from header_filter.matchers import Header, HeaderRegexp # noqa: F401 from header_filter.middleware import HeaderFilterMiddleware # noqa: F401 from header_filter.rules import Enforce, Forbid # noqa: F401
Allow HeaderRegexp to be imported directly from header_filter package.
## Code Before: from header_filter.matchers import Header # noqa: F401 from header_filter.middleware import HeaderFilterMiddleware # noqa: F401 from header_filter.rules import Enforce, Forbid # noqa: F401 ## Instruction: Allow HeaderRegexp to be imported directly from header_filter package. ## Code After: from header_filter.matchers import Header, HeaderRegexp # noqa: F401 from header_filter.middleware import HeaderFilterMiddleware # noqa: F401 from header_filter.rules import Enforce, Forbid # noqa: F401
// ... existing code ... from header_filter.matchers import Header, HeaderRegexp # noqa: F401 from header_filter.middleware import HeaderFilterMiddleware # noqa: F401 // ... rest of the code ...
cc08888a527dac321f88cbe9da27508aee62e51e
examples/lab/main.py
examples/lab/main.py
import os from jinja2 import FileSystemLoader from notebook.base.handlers import IPythonHandler, FileFindHandler from notebook.notebookapp import NotebookApp from traitlets import Unicode class ExampleHandler(IPythonHandler): """Handle requests between the main app page and notebook server.""" def get(self): """Get the main page for the application's interface.""" return self.write(self.render_template("index.html", static=self.static_url, base_url=self.base_url)) def get_template(self, name): loader = FileSystemLoader(os.getcwd()) return loader.load(self.settings['jinja2_env'], name) class ExampleApp(NotebookApp): default_url = Unicode('/example') def init_webapp(self): """initialize tornado webapp and httpserver. """ super(ExampleApp, self).init_webapp() default_handlers = [ (r'/example/?', ExampleHandler), (r"/example/(.*)", FileFindHandler, {'path': 'build'}), ] self.web_app.add_handlers(".*$", default_handlers) if __name__ == '__main__': ExampleApp.launch_instance()
import os from jinja2 import FileSystemLoader from notebook.base.handlers import IPythonHandler, FileFindHandler from notebook.notebookapp import NotebookApp from traitlets import Unicode class ExampleHandler(IPythonHandler): """Handle requests between the main app page and notebook server.""" def get(self): """Get the main page for the application's interface.""" return self.write(self.render_template("index.html", static=self.static_url, base_url=self.base_url, terminals_available=True)) def get_template(self, name): loader = FileSystemLoader(os.getcwd()) return loader.load(self.settings['jinja2_env'], name) class ExampleApp(NotebookApp): default_url = Unicode('/example') def init_webapp(self): """initialize tornado webapp and httpserver. """ super(ExampleApp, self).init_webapp() default_handlers = [ (r'/example/?', ExampleHandler), (r"/example/(.*)", FileFindHandler, {'path': 'build'}), ] self.web_app.add_handlers(".*$", default_handlers) if __name__ == '__main__': ExampleApp.launch_instance()
Enable terminals in the lab example
Enable terminals in the lab example
Python
bsd-3-clause
eskirk/jupyterlab,eskirk/jupyterlab,jupyter/jupyterlab,eskirk/jupyterlab,jupyter/jupyterlab,jupyter/jupyterlab,charnpreetsingh185/jupyterlab,charnpreetsingh185/jupyterlab,charnpreetsingh185/jupyterlab,jupyter/jupyterlab,jupyter/jupyterlab,eskirk/jupyterlab,charnpreetsingh185/jupyterlab,eskirk/jupyterlab,charnpreetsingh185/jupyterlab
import os from jinja2 import FileSystemLoader from notebook.base.handlers import IPythonHandler, FileFindHandler from notebook.notebookapp import NotebookApp from traitlets import Unicode class ExampleHandler(IPythonHandler): """Handle requests between the main app page and notebook server.""" def get(self): """Get the main page for the application's interface.""" return self.write(self.render_template("index.html", - static=self.static_url, base_url=self.base_url)) + static=self.static_url, base_url=self.base_url, + terminals_available=True)) def get_template(self, name): loader = FileSystemLoader(os.getcwd()) return loader.load(self.settings['jinja2_env'], name) class ExampleApp(NotebookApp): default_url = Unicode('/example') def init_webapp(self): """initialize tornado webapp and httpserver. """ super(ExampleApp, self).init_webapp() default_handlers = [ (r'/example/?', ExampleHandler), (r"/example/(.*)", FileFindHandler, {'path': 'build'}), ] self.web_app.add_handlers(".*$", default_handlers) if __name__ == '__main__': ExampleApp.launch_instance()
Enable terminals in the lab example
## Code Before: import os from jinja2 import FileSystemLoader from notebook.base.handlers import IPythonHandler, FileFindHandler from notebook.notebookapp import NotebookApp from traitlets import Unicode class ExampleHandler(IPythonHandler): """Handle requests between the main app page and notebook server.""" def get(self): """Get the main page for the application's interface.""" return self.write(self.render_template("index.html", static=self.static_url, base_url=self.base_url)) def get_template(self, name): loader = FileSystemLoader(os.getcwd()) return loader.load(self.settings['jinja2_env'], name) class ExampleApp(NotebookApp): default_url = Unicode('/example') def init_webapp(self): """initialize tornado webapp and httpserver. """ super(ExampleApp, self).init_webapp() default_handlers = [ (r'/example/?', ExampleHandler), (r"/example/(.*)", FileFindHandler, {'path': 'build'}), ] self.web_app.add_handlers(".*$", default_handlers) if __name__ == '__main__': ExampleApp.launch_instance() ## Instruction: Enable terminals in the lab example ## Code After: import os from jinja2 import FileSystemLoader from notebook.base.handlers import IPythonHandler, FileFindHandler from notebook.notebookapp import NotebookApp from traitlets import Unicode class ExampleHandler(IPythonHandler): """Handle requests between the main app page and notebook server.""" def get(self): """Get the main page for the application's interface.""" return self.write(self.render_template("index.html", static=self.static_url, base_url=self.base_url, terminals_available=True)) def get_template(self, name): loader = FileSystemLoader(os.getcwd()) return loader.load(self.settings['jinja2_env'], name) class ExampleApp(NotebookApp): default_url = Unicode('/example') def init_webapp(self): """initialize tornado webapp and httpserver. """ super(ExampleApp, self).init_webapp() default_handlers = [ (r'/example/?', ExampleHandler), (r"/example/(.*)", FileFindHandler, {'path': 'build'}), ] self.web_app.add_handlers(".*$", default_handlers) if __name__ == '__main__': ExampleApp.launch_instance()
// ... existing code ... return self.write(self.render_template("index.html", static=self.static_url, base_url=self.base_url, terminals_available=True)) // ... rest of the code ...
ab6293bbe039cb0c939493c3b921f114ad68645b
tests/test_plugin_execute.py
tests/test_plugin_execute.py
from irc3.testing import BotTestCase class ExecutePluginTestCase(BotTestCase): config = { 'includes': [ 'onebot.plugins.execute' ], 'onebot.plugins.execute': { 'commands': [ 'command1', 'command2' ] } } def setUp(self): super(ExecutePluginTestCase, self).setUp() self.callFTU() self.bot.db = {} def test_command_allowed(self): self.bot.notify('connection_made') self.assertSent(['command1', 'command2'])
from irc3.testing import BotTestCase class ExecutePluginTestCase(BotTestCase): config = { 'includes': [ 'onebot.plugins.execute' ], 'onebot.plugins.execute': { 'commands': [ 'command1', 'command2' ] } } def setUp(self): super(ExecutePluginTestCase, self).setUp() self.callFTU() self.bot.db = {} def test_connection_made(self): self.bot.dispatch(':irc.server 376 foo!nick@bar :something') self.assertSent(['command1', 'command2'])
Fix test for connection made
Fix test for connection made
Python
bsd-3-clause
thomwiggers/onebot
from irc3.testing import BotTestCase class ExecutePluginTestCase(BotTestCase): config = { 'includes': [ 'onebot.plugins.execute' ], 'onebot.plugins.execute': { 'commands': [ 'command1', 'command2' ] } } def setUp(self): super(ExecutePluginTestCase, self).setUp() self.callFTU() self.bot.db = {} - def test_command_allowed(self): - self.bot.notify('connection_made') + def test_connection_made(self): + self.bot.dispatch(':irc.server 376 foo!nick@bar :something') self.assertSent(['command1', 'command2'])
Fix test for connection made
## Code Before: from irc3.testing import BotTestCase class ExecutePluginTestCase(BotTestCase): config = { 'includes': [ 'onebot.plugins.execute' ], 'onebot.plugins.execute': { 'commands': [ 'command1', 'command2' ] } } def setUp(self): super(ExecutePluginTestCase, self).setUp() self.callFTU() self.bot.db = {} def test_command_allowed(self): self.bot.notify('connection_made') self.assertSent(['command1', 'command2']) ## Instruction: Fix test for connection made ## Code After: from irc3.testing import BotTestCase class ExecutePluginTestCase(BotTestCase): config = { 'includes': [ 'onebot.plugins.execute' ], 'onebot.plugins.execute': { 'commands': [ 'command1', 'command2' ] } } def setUp(self): super(ExecutePluginTestCase, self).setUp() self.callFTU() self.bot.db = {} def test_connection_made(self): self.bot.dispatch(':irc.server 376 foo!nick@bar :something') self.assertSent(['command1', 'command2'])
# ... existing code ... def test_connection_made(self): self.bot.dispatch(':irc.server 376 foo!nick@bar :something') self.assertSent(['command1', 'command2']) # ... rest of the code ...
a6a2ee870840730f99ad475e02956c49fe2e7ed3
common/authapp.py
common/authapp.py
import ConfigParser from common.application import Application from keystonemiddleware.auth_token import filter_factory as auth_filter_factory class KeystoneApplication(Application): """ An Application which uses Keystone for authorisation using RBAC """ def __init__(self, configuration): super(KeystoneApplication, self).__init__(configuration) self.required_role = self.config.get('authorisation', 'required_role') if self.required_role is None: raise ValueError("No required role supplied") def _check_auth(self, req): if 'HTTP_X_ROLES' in req.environ: user_roles = req.environ['HTTP_X_ROLES'].split(',') return self.required_role in user_roles return False def keystone_auth_filter_factory(global_config, **local_config): global_config.update(local_config) config_file_name = global_config.get('config_file', 'apiv1app.ini') config_file = ConfigParser.SafeConfigParser() config_file.read(config_file_name) global_config.update(config_file.items('keystone_authtoken')) return auth_filter_factory(global_config)
import ConfigParser from common.application import Application from keystonemiddleware.auth_token import filter_factory as auth_filter_factory class KeystoneApplication(Application): """ An Application which uses Keystone for authorisation using RBAC """ INI_SECTION = 'keystone_authtoken' def __init__(self, configuration): super(KeystoneApplication, self).__init__(configuration) self.required_role = self.config.get('authorisation', 'required_role') if self.required_role is None: raise ValueError("No required role supplied") def _check_auth(self, req): if 'HTTP_X_ROLES' in req.environ: user_roles = req.environ['HTTP_X_ROLES'].split(',') return self.required_role in user_roles return False def keystone_auth_filter_factory(global_config, **local_config): global_config.update(local_config) config_file_name = global_config.get('config_file') if not config_file_name: raise ValueError('No config_file directive') config_file = ConfigParser.SafeConfigParser() if not config_file.read(config_file_name): raise ValueError("Cannot read config file '%s'" % config_file_name) global_config.update(config_file.items(KeystoneApplication.INI_SECTION)) return auth_filter_factory(global_config)
Remove hardcoded default filename. Raise an error if no app config file was specified, or it is unreadable, or it doesn't contain the section we need.
Remove hardcoded default filename. Raise an error if no app config file was specified, or it is unreadable, or it doesn't contain the section we need.
Python
apache-2.0
NCI-Cloud/reporting-api,NeCTAR-RC/reporting-api,NCI-Cloud/reporting-api,NeCTAR-RC/reporting-api
import ConfigParser from common.application import Application from keystonemiddleware.auth_token import filter_factory as auth_filter_factory class KeystoneApplication(Application): """ An Application which uses Keystone for authorisation using RBAC """ + + INI_SECTION = 'keystone_authtoken' def __init__(self, configuration): super(KeystoneApplication, self).__init__(configuration) self.required_role = self.config.get('authorisation', 'required_role') if self.required_role is None: raise ValueError("No required role supplied") def _check_auth(self, req): if 'HTTP_X_ROLES' in req.environ: user_roles = req.environ['HTTP_X_ROLES'].split(',') return self.required_role in user_roles return False def keystone_auth_filter_factory(global_config, **local_config): global_config.update(local_config) - config_file_name = global_config.get('config_file', 'apiv1app.ini') + config_file_name = global_config.get('config_file') + if not config_file_name: + raise ValueError('No config_file directive') config_file = ConfigParser.SafeConfigParser() - config_file.read(config_file_name) + if not config_file.read(config_file_name): + raise ValueError("Cannot read config file '%s'" % config_file_name) - global_config.update(config_file.items('keystone_authtoken')) + global_config.update(config_file.items(KeystoneApplication.INI_SECTION)) return auth_filter_factory(global_config)
Remove hardcoded default filename. Raise an error if no app config file was specified, or it is unreadable, or it doesn't contain the section we need.
## Code Before: import ConfigParser from common.application import Application from keystonemiddleware.auth_token import filter_factory as auth_filter_factory class KeystoneApplication(Application): """ An Application which uses Keystone for authorisation using RBAC """ def __init__(self, configuration): super(KeystoneApplication, self).__init__(configuration) self.required_role = self.config.get('authorisation', 'required_role') if self.required_role is None: raise ValueError("No required role supplied") def _check_auth(self, req): if 'HTTP_X_ROLES' in req.environ: user_roles = req.environ['HTTP_X_ROLES'].split(',') return self.required_role in user_roles return False def keystone_auth_filter_factory(global_config, **local_config): global_config.update(local_config) config_file_name = global_config.get('config_file', 'apiv1app.ini') config_file = ConfigParser.SafeConfigParser() config_file.read(config_file_name) global_config.update(config_file.items('keystone_authtoken')) return auth_filter_factory(global_config) ## Instruction: Remove hardcoded default filename. Raise an error if no app config file was specified, or it is unreadable, or it doesn't contain the section we need. ## Code After: import ConfigParser from common.application import Application from keystonemiddleware.auth_token import filter_factory as auth_filter_factory class KeystoneApplication(Application): """ An Application which uses Keystone for authorisation using RBAC """ INI_SECTION = 'keystone_authtoken' def __init__(self, configuration): super(KeystoneApplication, self).__init__(configuration) self.required_role = self.config.get('authorisation', 'required_role') if self.required_role is None: raise ValueError("No required role supplied") def _check_auth(self, req): if 'HTTP_X_ROLES' in req.environ: user_roles = req.environ['HTTP_X_ROLES'].split(',') return self.required_role in user_roles return False def keystone_auth_filter_factory(global_config, **local_config): global_config.update(local_config) config_file_name = global_config.get('config_file') if not config_file_name: raise ValueError('No config_file directive') config_file = ConfigParser.SafeConfigParser() if not config_file.read(config_file_name): raise ValueError("Cannot read config file '%s'" % config_file_name) global_config.update(config_file.items(KeystoneApplication.INI_SECTION)) return auth_filter_factory(global_config)
... """ INI_SECTION = 'keystone_authtoken' ... global_config.update(local_config) config_file_name = global_config.get('config_file') if not config_file_name: raise ValueError('No config_file directive') config_file = ConfigParser.SafeConfigParser() if not config_file.read(config_file_name): raise ValueError("Cannot read config file '%s'" % config_file_name) global_config.update(config_file.items(KeystoneApplication.INI_SECTION)) return auth_filter_factory(global_config) ...
3046eaf265d015c2257efa8066a04c26ddd4448e
search.py
search.py
import io import getopt import sys import pickle def usage(): print("usage: " + sys.argv[0] + " -d dictionary-file -p postings-file -q file-of-queries -o output-file-of-results") if __name__ == '__main__': dict_file = postings_file = query_file = output_file = None try: opts, args = getopt.getopt(sys.argv[1:], 'd:p:q:o:') except getopt.GetoptError as err: usage() sys.exit(2) for o, a in opts: if o == '-d': dict_file = a elif o == '-p': postings_file = a elif o == '-q': query_file = a elif o == '-o': output_file = a else: assert False, "unhandled option" if dict_file == None or postings_file == None or query_file == None or output_file == None: usage() sys.exit(2) with io.open(dict_file, 'rb') as f: dictionary = pickle.load(f) with io.open(postings_file, 'rb') as f: postings = pickle.load(f) skip_pointers = pickle.load(f)
import io import getopt import sys import pickle def usage(): print("usage: " + sys.argv[0] + " -d dictionary-file -p postings-file -q file-of-queries -o output-file-of-results") if __name__ == '__main__': dict_file = postings_file = query_file = output_file = None try: opts, args = getopt.getopt(sys.argv[1:], 'd:p:q:o:') except getopt.GetoptError as err: usage() sys.exit(2) for o, a in opts: if o == '-d': dict_file = a elif o == '-p': postings_file = a elif o == '-q': query_file = a elif o == '-o': output_file = a else: assert False, "unhandled option" if dict_file == None or postings_file == None or query_file == None or output_file == None: usage() sys.exit(2) with io.open(dict_file, 'rb') as f: dictionary = pickle.load(f) # Implement seeking and reading don't read entirely with io.open(postings_file, 'rb') as f: postings = pickle.load(f) skip_pointers = pickle.load(f)
Add todo for seeking and reading
Add todo for seeking and reading
Python
mit
ikaruswill/boolean-retrieval,ikaruswill/vector-space-model
import io import getopt import sys import pickle def usage(): print("usage: " + sys.argv[0] + " -d dictionary-file -p postings-file -q file-of-queries -o output-file-of-results") if __name__ == '__main__': dict_file = postings_file = query_file = output_file = None try: opts, args = getopt.getopt(sys.argv[1:], 'd:p:q:o:') except getopt.GetoptError as err: usage() sys.exit(2) for o, a in opts: if o == '-d': dict_file = a elif o == '-p': postings_file = a elif o == '-q': query_file = a elif o == '-o': output_file = a else: assert False, "unhandled option" if dict_file == None or postings_file == None or query_file == None or output_file == None: usage() sys.exit(2) with io.open(dict_file, 'rb') as f: dictionary = pickle.load(f) + # Implement seeking and reading don't read entirely with io.open(postings_file, 'rb') as f: postings = pickle.load(f) skip_pointers = pickle.load(f)
Add todo for seeking and reading
## Code Before: import io import getopt import sys import pickle def usage(): print("usage: " + sys.argv[0] + " -d dictionary-file -p postings-file -q file-of-queries -o output-file-of-results") if __name__ == '__main__': dict_file = postings_file = query_file = output_file = None try: opts, args = getopt.getopt(sys.argv[1:], 'd:p:q:o:') except getopt.GetoptError as err: usage() sys.exit(2) for o, a in opts: if o == '-d': dict_file = a elif o == '-p': postings_file = a elif o == '-q': query_file = a elif o == '-o': output_file = a else: assert False, "unhandled option" if dict_file == None or postings_file == None or query_file == None or output_file == None: usage() sys.exit(2) with io.open(dict_file, 'rb') as f: dictionary = pickle.load(f) with io.open(postings_file, 'rb') as f: postings = pickle.load(f) skip_pointers = pickle.load(f) ## Instruction: Add todo for seeking and reading ## Code After: import io import getopt import sys import pickle def usage(): print("usage: " + sys.argv[0] + " -d dictionary-file -p postings-file -q file-of-queries -o output-file-of-results") if __name__ == '__main__': dict_file = postings_file = query_file = output_file = None try: opts, args = getopt.getopt(sys.argv[1:], 'd:p:q:o:') except getopt.GetoptError as err: usage() sys.exit(2) for o, a in opts: if o == '-d': dict_file = a elif o == '-p': postings_file = a elif o == '-q': query_file = a elif o == '-o': output_file = a else: assert False, "unhandled option" if dict_file == None or postings_file == None or query_file == None or output_file == None: usage() sys.exit(2) with io.open(dict_file, 'rb') as f: dictionary = pickle.load(f) # Implement seeking and reading don't read entirely with io.open(postings_file, 'rb') as f: postings = pickle.load(f) skip_pointers = pickle.load(f)
# ... existing code ... # Implement seeking and reading don't read entirely with io.open(postings_file, 'rb') as f: # ... rest of the code ...
b6b627cb4c5d6b7dc1636794de870a2bf6da262b
cookiecutter/replay.py
cookiecutter/replay.py
from __future__ import unicode_literals from .compat import is_string def dump(template_name, context): if not is_string(template_name): raise TypeError('Template name is required to be of type str')
from __future__ import unicode_literals from .compat import is_string def dump(template_name, context): if not is_string(template_name): raise TypeError('Template name is required to be of type str') if not isinstance(context, dict): raise TypeError('Context is required to be of type dict')
Raise a TypeError if context is not a dict
Raise a TypeError if context is not a dict
Python
bsd-3-clause
pjbull/cookiecutter,hackebrot/cookiecutter,cguardia/cookiecutter,luzfcb/cookiecutter,stevepiercy/cookiecutter,agconti/cookiecutter,michaeljoseph/cookiecutter,venumech/cookiecutter,christabor/cookiecutter,michaeljoseph/cookiecutter,terryjbates/cookiecutter,takeflight/cookiecutter,dajose/cookiecutter,pjbull/cookiecutter,willingc/cookiecutter,audreyr/cookiecutter,ramiroluz/cookiecutter,benthomasson/cookiecutter,moi65/cookiecutter,benthomasson/cookiecutter,audreyr/cookiecutter,takeflight/cookiecutter,willingc/cookiecutter,venumech/cookiecutter,agconti/cookiecutter,stevepiercy/cookiecutter,terryjbates/cookiecutter,cguardia/cookiecutter,dajose/cookiecutter,ramiroluz/cookiecutter,luzfcb/cookiecutter,hackebrot/cookiecutter,Springerle/cookiecutter,Springerle/cookiecutter,christabor/cookiecutter,moi65/cookiecutter
from __future__ import unicode_literals from .compat import is_string def dump(template_name, context): if not is_string(template_name): raise TypeError('Template name is required to be of type str') + if not isinstance(context, dict): + raise TypeError('Context is required to be of type dict') +
Raise a TypeError if context is not a dict
## Code Before: from __future__ import unicode_literals from .compat import is_string def dump(template_name, context): if not is_string(template_name): raise TypeError('Template name is required to be of type str') ## Instruction: Raise a TypeError if context is not a dict ## Code After: from __future__ import unicode_literals from .compat import is_string def dump(template_name, context): if not is_string(template_name): raise TypeError('Template name is required to be of type str') if not isinstance(context, dict): raise TypeError('Context is required to be of type dict')
// ... existing code ... raise TypeError('Template name is required to be of type str') if not isinstance(context, dict): raise TypeError('Context is required to be of type dict') // ... rest of the code ...
68b1c3804504ecc14f7c23465ca11db31489e1cd
mozcal/events/views.py
mozcal/events/views.py
from django.shortcuts import render, get_object_or_404 from mozcal.events.models import Event, Space, FunctionalArea def one(request, slug): event = get_object_or_404(Event, slug=slug) return render(request, 'event.html', { 'event': event }) def all(request): events = Event.objects.all() spaces = Space.objects.all() areas = FunctionalArea.objects.all() return render(request, 'events_all.html', { 'events': events, 'spaces': spaces, 'areas': areas })
from django.shortcuts import render, get_object_or_404 from mozcal.events.models import Event, Space, FunctionalArea def one(request, slug): event = get_object_or_404(Event, slug=slug) return render(request, 'event.html', { 'event': event }) def all(request): search_string = request.GET.get('search', '') events = Event.objects.filter(title__icontains=search_string) spaces = Space.objects.all() areas = FunctionalArea.objects.all() return render(request, 'events_all.html', { 'events': events, 'spaces': spaces, 'areas': areas })
Allow filtering of events by title
Allow filtering of events by title
Python
bsd-3-clause
ppapadeas/wprevents,yvan-sraka/wprevents,yvan-sraka/wprevents,ppapadeas/wprevents,yvan-sraka/wprevents,ppapadeas/wprevents,yvan-sraka/wprevents
from django.shortcuts import render, get_object_or_404 from mozcal.events.models import Event, Space, FunctionalArea def one(request, slug): event = get_object_or_404(Event, slug=slug) return render(request, 'event.html', { 'event': event }) def all(request): - events = Event.objects.all() + search_string = request.GET.get('search', '') + + events = Event.objects.filter(title__icontains=search_string) spaces = Space.objects.all() areas = FunctionalArea.objects.all() return render(request, 'events_all.html', { 'events': events, 'spaces': spaces, 'areas': areas })
Allow filtering of events by title
## Code Before: from django.shortcuts import render, get_object_or_404 from mozcal.events.models import Event, Space, FunctionalArea def one(request, slug): event = get_object_or_404(Event, slug=slug) return render(request, 'event.html', { 'event': event }) def all(request): events = Event.objects.all() spaces = Space.objects.all() areas = FunctionalArea.objects.all() return render(request, 'events_all.html', { 'events': events, 'spaces': spaces, 'areas': areas }) ## Instruction: Allow filtering of events by title ## Code After: from django.shortcuts import render, get_object_or_404 from mozcal.events.models import Event, Space, FunctionalArea def one(request, slug): event = get_object_or_404(Event, slug=slug) return render(request, 'event.html', { 'event': event }) def all(request): search_string = request.GET.get('search', '') events = Event.objects.filter(title__icontains=search_string) spaces = Space.objects.all() areas = FunctionalArea.objects.all() return render(request, 'events_all.html', { 'events': events, 'spaces': spaces, 'areas': areas })
// ... existing code ... def all(request): search_string = request.GET.get('search', '') events = Event.objects.filter(title__icontains=search_string) spaces = Space.objects.all() // ... rest of the code ...
1069565b596d3bc13b99bcae4ec831c2228e7946
PrinterApplication.py
PrinterApplication.py
from Cura.WxApplication import WxApplication import wx class PrinterApplication(WxApplication): def __init__(self): super(PrinterApplication, self).__init__() def run(self): frame = wx.Frame(None, wx.ID_ANY, "Hello World") frame.Show(True) super(PrinterApplication, self).run()
from Cura.Wx.WxApplication import WxApplication class PrinterApplication(WxApplication): def __init__(self): super(PrinterApplication, self).__init__() def run(self): super(PrinterApplication, self).run()
Move WxApplication into its own Wx submodule
Move WxApplication into its own Wx submodule
Python
agpl-3.0
lo0ol/Ultimaker-Cura,Curahelper/Cura,senttech/Cura,DeskboxBrazil/Cura,lo0ol/Ultimaker-Cura,bq/Ultimaker-Cura,fxtentacle/Cura,totalretribution/Cura,derekhe/Cura,ad1217/Cura,ad1217/Cura,derekhe/Cura,Curahelper/Cura,ynotstartups/Wanhao,hmflash/Cura,markwal/Cura,fieldOfView/Cura,fxtentacle/Cura,hmflash/Cura,quillford/Cura,senttech/Cura,ynotstartups/Wanhao,markwal/Cura,bq/Ultimaker-Cura,fieldOfView/Cura,DeskboxBrazil/Cura,quillford/Cura,totalretribution/Cura
- from Cura.WxApplication import WxApplication + from Cura.Wx.WxApplication import WxApplication - - import wx class PrinterApplication(WxApplication): def __init__(self): super(PrinterApplication, self).__init__() def run(self): - frame = wx.Frame(None, wx.ID_ANY, "Hello World") - frame.Show(True) super(PrinterApplication, self).run()
Move WxApplication into its own Wx submodule
## Code Before: from Cura.WxApplication import WxApplication import wx class PrinterApplication(WxApplication): def __init__(self): super(PrinterApplication, self).__init__() def run(self): frame = wx.Frame(None, wx.ID_ANY, "Hello World") frame.Show(True) super(PrinterApplication, self).run() ## Instruction: Move WxApplication into its own Wx submodule ## Code After: from Cura.Wx.WxApplication import WxApplication class PrinterApplication(WxApplication): def __init__(self): super(PrinterApplication, self).__init__() def run(self): super(PrinterApplication, self).run()
... from Cura.Wx.WxApplication import WxApplication ... def run(self): super(PrinterApplication, self).run() ...
f5e8bfaf5c4f7a2131fbe0ffd0f8d14a316b907e
camoco/Exceptions.py
camoco/Exceptions.py
class CamocoError(Exception): pass class CamocoExistsError(CamocoError): ''' You tried to create a camoco object which already exists under the same name,type combination. ''' def __init__(self,expr,message='',*args): self.expr = expr self.message = ( 'You are trying to create a Camoco based object' 'That already exists' + message.format(*args) ) class CamocoGeneNameError(CamocoError): ''' Gene names must be beautiful snowflakes. ''' def __init__(self,expr,message='',*args): self.expr = expr self.message = 'Gene names must be unique:' + message.format(args) class CamocoAccessionNameError(CamocoError): ''' Accession names must be Unique. ''' def __init__(self,expr,message='',*args): self.expr = expr self.message = ( 'Accession names must be unique:' + message.format(args) ) class CamocoZeroWindowError(CamocoError): def __init__(self,expr,message,*args): self.expr = expr self.message = ( 'Operation requiring window, but window is 0:' + \ message.format(args) )
class CamocoError(Exception): pass class CamocoExistsError(CamocoError): ''' You tried to create a camoco object which already exists under the same name,type combination. ''' def __init__(self,expr,message='',*args): self.expr = expr self.message = ( 'You are trying to create a Camoco based object' 'That already exists' + message.format(*args) ) class CamocoGeneNameError(CamocoError): ''' Gene names must be beautiful snowflakes. ''' def __init__(self,expr,message='',*args): self.expr = expr self.message = 'Gene names must be unique:' + message.format(args) class CamocoAccessionNameError(CamocoError): ''' Accession names must be Unique. ''' def __init__(self,expr,message='',*args): self.expr = expr self.message = ( 'Accession names must be unique:' + message.format(args) ) class CamocoZeroWindowError(CamocoError): def __init__(self,expr,message,*args): self.expr = expr self.message = ( 'Operation requiring window, but window is 0:' + \ message.format(args) ) class CamocoInteractive(CamocoError): def __init__(self,expr=None,message='',*args): self.expr = expr self.message = 'Camoco interactive ipython session.'
Add exception for cli command line to run interactively.
Add exception for cli command line to run interactively.
Python
mit
schae234/Camoco,schae234/Camoco
class CamocoError(Exception): pass class CamocoExistsError(CamocoError): ''' You tried to create a camoco object which already exists under the same name,type combination. ''' def __init__(self,expr,message='',*args): self.expr = expr self.message = ( 'You are trying to create a Camoco based object' 'That already exists' + message.format(*args) ) class CamocoGeneNameError(CamocoError): ''' Gene names must be beautiful snowflakes. ''' def __init__(self,expr,message='',*args): self.expr = expr self.message = 'Gene names must be unique:' + message.format(args) class CamocoAccessionNameError(CamocoError): ''' Accession names must be Unique. ''' def __init__(self,expr,message='',*args): self.expr = expr self.message = ( 'Accession names must be unique:' + message.format(args) ) class CamocoZeroWindowError(CamocoError): def __init__(self,expr,message,*args): self.expr = expr self.message = ( 'Operation requiring window, but window is 0:' + \ message.format(args) ) + class CamocoInteractive(CamocoError): + def __init__(self,expr=None,message='',*args): + self.expr = expr + self.message = 'Camoco interactive ipython session.' +
Add exception for cli command line to run interactively.
## Code Before: class CamocoError(Exception): pass class CamocoExistsError(CamocoError): ''' You tried to create a camoco object which already exists under the same name,type combination. ''' def __init__(self,expr,message='',*args): self.expr = expr self.message = ( 'You are trying to create a Camoco based object' 'That already exists' + message.format(*args) ) class CamocoGeneNameError(CamocoError): ''' Gene names must be beautiful snowflakes. ''' def __init__(self,expr,message='',*args): self.expr = expr self.message = 'Gene names must be unique:' + message.format(args) class CamocoAccessionNameError(CamocoError): ''' Accession names must be Unique. ''' def __init__(self,expr,message='',*args): self.expr = expr self.message = ( 'Accession names must be unique:' + message.format(args) ) class CamocoZeroWindowError(CamocoError): def __init__(self,expr,message,*args): self.expr = expr self.message = ( 'Operation requiring window, but window is 0:' + \ message.format(args) ) ## Instruction: Add exception for cli command line to run interactively. ## Code After: class CamocoError(Exception): pass class CamocoExistsError(CamocoError): ''' You tried to create a camoco object which already exists under the same name,type combination. ''' def __init__(self,expr,message='',*args): self.expr = expr self.message = ( 'You are trying to create a Camoco based object' 'That already exists' + message.format(*args) ) class CamocoGeneNameError(CamocoError): ''' Gene names must be beautiful snowflakes. ''' def __init__(self,expr,message='',*args): self.expr = expr self.message = 'Gene names must be unique:' + message.format(args) class CamocoAccessionNameError(CamocoError): ''' Accession names must be Unique. ''' def __init__(self,expr,message='',*args): self.expr = expr self.message = ( 'Accession names must be unique:' + message.format(args) ) class CamocoZeroWindowError(CamocoError): def __init__(self,expr,message,*args): self.expr = expr self.message = ( 'Operation requiring window, but window is 0:' + \ message.format(args) ) class CamocoInteractive(CamocoError): def __init__(self,expr=None,message='',*args): self.expr = expr self.message = 'Camoco interactive ipython session.'
... ) class CamocoInteractive(CamocoError): def __init__(self,expr=None,message='',*args): self.expr = expr self.message = 'Camoco interactive ipython session.' ...
95ccab69cfff30c24932c4cd156983a29639435d
nginxauthdaemon/crowdauth.py
nginxauthdaemon/crowdauth.py
import crowd from auth import Authenticator class CrowdAuthenticator(Authenticator): """Atlassian Crowd authenticator. Requires configuration options CROWD_URL, CROWD_APP_NAME, CROWD_APP_PASSWORD""" def __init__(self, config): super(CrowdAuthenticator, self).__init__(config) app_url = config['CROWD_URL'] app_user = config['CROWD_APP_NAME'] app_pass = config['CROWD_APP_PASSWORD'] self._cs = crowd.CrowdServer(app_url, app_user, app_pass) def authenticate(self, username, password): result = self._cs.auth_user(username, password) return result.get('name') == username
import crowd from auth import Authenticator class CrowdAuthenticator(Authenticator): """Atlassian Crowd authenticator. Requires configuration options CROWD_URL, CROWD_APP_NAME, CROWD_APP_PASSWORD""" def __init__(self, config): super(CrowdAuthenticator, self).__init__(config) app_url = config['CROWD_URL'] app_user = config['CROWD_APP_NAME'] app_pass = config['CROWD_APP_PASSWORD'] self._cs = crowd.CrowdServer(app_url, app_user, app_pass) def authenticate(self, username, password): result = self._cs.auth_user(username, password) if result == None: # auth failed return False # auth succeeded return result.get('name') == username
Fix 500 error when Crowd auth is failed
Fix 500 error when Crowd auth is failed
Python
mit
akurdyukov/nginxauthdaemon,akurdyukov/nginxauthdaemon
import crowd from auth import Authenticator class CrowdAuthenticator(Authenticator): """Atlassian Crowd authenticator. Requires configuration options CROWD_URL, CROWD_APP_NAME, CROWD_APP_PASSWORD""" def __init__(self, config): super(CrowdAuthenticator, self).__init__(config) app_url = config['CROWD_URL'] app_user = config['CROWD_APP_NAME'] app_pass = config['CROWD_APP_PASSWORD'] self._cs = crowd.CrowdServer(app_url, app_user, app_pass) def authenticate(self, username, password): result = self._cs.auth_user(username, password) + if result == None: + # auth failed + return False + # auth succeeded return result.get('name') == username
Fix 500 error when Crowd auth is failed
## Code Before: import crowd from auth import Authenticator class CrowdAuthenticator(Authenticator): """Atlassian Crowd authenticator. Requires configuration options CROWD_URL, CROWD_APP_NAME, CROWD_APP_PASSWORD""" def __init__(self, config): super(CrowdAuthenticator, self).__init__(config) app_url = config['CROWD_URL'] app_user = config['CROWD_APP_NAME'] app_pass = config['CROWD_APP_PASSWORD'] self._cs = crowd.CrowdServer(app_url, app_user, app_pass) def authenticate(self, username, password): result = self._cs.auth_user(username, password) return result.get('name') == username ## Instruction: Fix 500 error when Crowd auth is failed ## Code After: import crowd from auth import Authenticator class CrowdAuthenticator(Authenticator): """Atlassian Crowd authenticator. Requires configuration options CROWD_URL, CROWD_APP_NAME, CROWD_APP_PASSWORD""" def __init__(self, config): super(CrowdAuthenticator, self).__init__(config) app_url = config['CROWD_URL'] app_user = config['CROWD_APP_NAME'] app_pass = config['CROWD_APP_PASSWORD'] self._cs = crowd.CrowdServer(app_url, app_user, app_pass) def authenticate(self, username, password): result = self._cs.auth_user(username, password) if result == None: # auth failed return False # auth succeeded return result.get('name') == username
# ... existing code ... result = self._cs.auth_user(username, password) if result == None: # auth failed return False # auth succeeded return result.get('name') == username # ... rest of the code ...
e1a0e3e6895ce14822b111ee17b182a79b7b28c9
miniraf/calc.py
miniraf/calc.py
def create_parser(subparsers): pass
import argparse from astropy.io import fits import sys OP_MAP = {"+": lambda x, y: x + y, "-": lambda x, y: x - y, "*": lambda x, y: x * y, "/": lambda x, y: x / y} def create_parser(subparsers): parser_calc = subparsers.add_parser("calc", help="calc help") parser_calc.add_argument("-o", "--output", metavar="OUTFILE", default=sys.stdout.buffer) parser_calc.add_argument("file1") parser_calc.add_argument("op", choices=["+", "-", "*", "/"]) parser_calc.add_argument("file2") parser_calc.set_defaults(func=main) def load_fits_data(filename): with fits.open(filename) as f: data = f[0].data return data def main(args): a, b = load_fits_data(args.file1), load_fits_data(args.file2) result = OP_MAP[args.op](a, b) hdu = fits.PrimaryHDU(result) hdu.writeto(args.output)
Add simple four-function output option
Add simple four-function output option Signed-off-by: Lizhou Sha <[email protected]>
Python
mit
vulpicastor/miniraf
+ import argparse + from astropy.io import fits + import sys + + OP_MAP = {"+": lambda x, y: x + y, + "-": lambda x, y: x - y, + "*": lambda x, y: x * y, + "/": lambda x, y: x / y} + def create_parser(subparsers): - pass + parser_calc = subparsers.add_parser("calc", help="calc help") + parser_calc.add_argument("-o", "--output", metavar="OUTFILE", default=sys.stdout.buffer) + parser_calc.add_argument("file1") + parser_calc.add_argument("op", choices=["+", "-", "*", "/"]) + parser_calc.add_argument("file2") + parser_calc.set_defaults(func=main) + def load_fits_data(filename): + with fits.open(filename) as f: + data = f[0].data + return data + + def main(args): + a, b = load_fits_data(args.file1), load_fits_data(args.file2) + result = OP_MAP[args.op](a, b) + hdu = fits.PrimaryHDU(result) + hdu.writeto(args.output) +
Add simple four-function output option
## Code Before: def create_parser(subparsers): pass ## Instruction: Add simple four-function output option ## Code After: import argparse from astropy.io import fits import sys OP_MAP = {"+": lambda x, y: x + y, "-": lambda x, y: x - y, "*": lambda x, y: x * y, "/": lambda x, y: x / y} def create_parser(subparsers): parser_calc = subparsers.add_parser("calc", help="calc help") parser_calc.add_argument("-o", "--output", metavar="OUTFILE", default=sys.stdout.buffer) parser_calc.add_argument("file1") parser_calc.add_argument("op", choices=["+", "-", "*", "/"]) parser_calc.add_argument("file2") parser_calc.set_defaults(func=main) def load_fits_data(filename): with fits.open(filename) as f: data = f[0].data return data def main(args): a, b = load_fits_data(args.file1), load_fits_data(args.file2) result = OP_MAP[args.op](a, b) hdu = fits.PrimaryHDU(result) hdu.writeto(args.output)
... import argparse from astropy.io import fits import sys OP_MAP = {"+": lambda x, y: x + y, "-": lambda x, y: x - y, "*": lambda x, y: x * y, "/": lambda x, y: x / y} def create_parser(subparsers): parser_calc = subparsers.add_parser("calc", help="calc help") parser_calc.add_argument("-o", "--output", metavar="OUTFILE", default=sys.stdout.buffer) parser_calc.add_argument("file1") parser_calc.add_argument("op", choices=["+", "-", "*", "/"]) parser_calc.add_argument("file2") parser_calc.set_defaults(func=main) def load_fits_data(filename): with fits.open(filename) as f: data = f[0].data return data def main(args): a, b = load_fits_data(args.file1), load_fits_data(args.file2) result = OP_MAP[args.op](a, b) hdu = fits.PrimaryHDU(result) hdu.writeto(args.output) ...
74010276715e3570ad6f66144f2c2e31aff8948a
tests/test_local.py
tests/test_local.py
import cachelper class TestMemorize: def test_should_cache_return_value(self, mocker): func = mocker.Mock() func.side_effect = lambda i: i * 2 func.__name__ = 'double' cached = cachelper.memoize()(func) assert cached(2) == 4 assert cached(2) == 4 assert func.call_count == 1 assert cached(4) == 8 assert cached(4) == 8 assert func.call_count == 2 def test_can_clear_cache(self, mocker): func = mocker.Mock() func.side_effect = lambda i: i * 2 func.__name__ = 'double' decorator = cachelper.memoize() cached = decorator(func) cached(10) cached.clear_cachelper_cache() cached(10) assert func.call_count == 2
import cachelper class TestMemorize: def test_should_cache_return_value(self, mocker): func = mocker.Mock() func.side_effect = lambda i: i * 2 func.__name__ = 'double' cached = cachelper.memoize()(func) assert cached(2) == 4 assert cached(2) == 4 assert func.call_count == 1 assert cached(4) == 8 assert cached(4) == 8 assert func.call_count == 2 def test_can_clear_cache(self, mocker): func = mocker.Mock() func.side_effect = lambda i: i * 2 func.__name__ = 'double' decorator = cachelper.memoize() cached = decorator(func) cached(10) cached.clear_cachelper_cache() cached(10) assert func.call_count == 2 def test_can_decorate_method(self, mocker): tracker = mocker.Mock() class A(object): @cachelper.memoize() def calculate(self, x, y): tracker() return x + y a1 = A() assert a1.calculate(1, 2) == 3 assert a1.calculate(1, 2) == 3 assert tracker.call_count == 1 a2 = A() assert a2.calculate(1, 2) == 3 assert tracker.call_count == 2
Add test to make sure memoize works for methods
Add test to make sure memoize works for methods
Python
mit
suzaku/cachelper
import cachelper class TestMemorize: def test_should_cache_return_value(self, mocker): func = mocker.Mock() func.side_effect = lambda i: i * 2 func.__name__ = 'double' cached = cachelper.memoize()(func) assert cached(2) == 4 assert cached(2) == 4 assert func.call_count == 1 assert cached(4) == 8 assert cached(4) == 8 assert func.call_count == 2 def test_can_clear_cache(self, mocker): func = mocker.Mock() func.side_effect = lambda i: i * 2 func.__name__ = 'double' decorator = cachelper.memoize() cached = decorator(func) cached(10) cached.clear_cachelper_cache() cached(10) assert func.call_count == 2 + def test_can_decorate_method(self, mocker): + tracker = mocker.Mock() + + class A(object): + + @cachelper.memoize() + def calculate(self, x, y): + tracker() + return x + y + + a1 = A() + assert a1.calculate(1, 2) == 3 + assert a1.calculate(1, 2) == 3 + assert tracker.call_count == 1 + a2 = A() + assert a2.calculate(1, 2) == 3 + assert tracker.call_count == 2 +
Add test to make sure memoize works for methods
## Code Before: import cachelper class TestMemorize: def test_should_cache_return_value(self, mocker): func = mocker.Mock() func.side_effect = lambda i: i * 2 func.__name__ = 'double' cached = cachelper.memoize()(func) assert cached(2) == 4 assert cached(2) == 4 assert func.call_count == 1 assert cached(4) == 8 assert cached(4) == 8 assert func.call_count == 2 def test_can_clear_cache(self, mocker): func = mocker.Mock() func.side_effect = lambda i: i * 2 func.__name__ = 'double' decorator = cachelper.memoize() cached = decorator(func) cached(10) cached.clear_cachelper_cache() cached(10) assert func.call_count == 2 ## Instruction: Add test to make sure memoize works for methods ## Code After: import cachelper class TestMemorize: def test_should_cache_return_value(self, mocker): func = mocker.Mock() func.side_effect = lambda i: i * 2 func.__name__ = 'double' cached = cachelper.memoize()(func) assert cached(2) == 4 assert cached(2) == 4 assert func.call_count == 1 assert cached(4) == 8 assert cached(4) == 8 assert func.call_count == 2 def test_can_clear_cache(self, mocker): func = mocker.Mock() func.side_effect = lambda i: i * 2 func.__name__ = 'double' decorator = cachelper.memoize() cached = decorator(func) cached(10) cached.clear_cachelper_cache() cached(10) assert func.call_count == 2 def test_can_decorate_method(self, mocker): tracker = mocker.Mock() class A(object): @cachelper.memoize() def calculate(self, x, y): tracker() return x + y a1 = A() assert a1.calculate(1, 2) == 3 assert a1.calculate(1, 2) == 3 assert tracker.call_count == 1 a2 = A() assert a2.calculate(1, 2) == 3 assert tracker.call_count == 2
# ... existing code ... assert func.call_count == 2 def test_can_decorate_method(self, mocker): tracker = mocker.Mock() class A(object): @cachelper.memoize() def calculate(self, x, y): tracker() return x + y a1 = A() assert a1.calculate(1, 2) == 3 assert a1.calculate(1, 2) == 3 assert tracker.call_count == 1 a2 = A() assert a2.calculate(1, 2) == 3 assert tracker.call_count == 2 # ... rest of the code ...