commit
stringlengths
40
40
old_file
stringlengths
4
106
new_file
stringlengths
4
106
old_contents
stringlengths
10
2.94k
new_contents
stringlengths
21
2.95k
subject
stringlengths
16
444
message
stringlengths
17
2.63k
lang
stringclasses
1 value
license
stringclasses
13 values
repos
stringlengths
7
43k
ndiff
stringlengths
52
3.31k
instruction
stringlengths
16
444
content
stringlengths
133
4.32k
diff
stringlengths
49
3.61k
485f04f0e396444dbb5635b21202b2cd2e0612ff
src/webapp/admin/login.py
src/webapp/admin/login.py
from datetime import timedelta, datetime from functools import wraps import hmac from hashlib import sha1 from flask import Blueprint, session, redirect, url_for, request, current_app ADMIN = "valid_admin" TIME_FORMAT = '%Y%m%d%H%M%S' TIME_LIMIT = timedelta(hours=3) def _create_hmac(payload): key = current_app.config["SECRET_KEY"] payload = payload.encode("utf8") mac = hmac.new(key, payload, sha1) return mac.hexdigest() def set_token(): expire = datetime.now() + TIME_LIMIT token = expire.strftime(TIME_FORMAT) session[ADMIN] = "%s|%s" % (token, _create_hmac(token)) def delete_token(): del session[ADMIN] def _valid_token(token): try: token, token_mac = token.split(u"|", 1) except: return False if not token_mac == _create_hmac(token): return False if datetime.now().strftime(TIME_FORMAT) < token: return True def valid_admin(fn): @wraps(fn) def nufun(*args, **kwargs): if ADMIN in session: if _valid_token(session[ADMIN]): set_token() return fn(*args, **kwargs) delete_token() session["next"] = request.path return redirect(url_for(".login")) return nufun
from datetime import timedelta, datetime from functools import wraps import hmac from hashlib import sha1 from flask import Blueprint, session, redirect, url_for, request, current_app ADMIN = "valid_admin" TIME_FORMAT = '%Y%m%d%H%M%S' TIME_LIMIT = timedelta(hours=3) def _create_hmac(payload): key = current_app.config["SECRET_KEY"] payload = payload.encode("utf8") mac = hmac.new(key, payload, sha1) return mac.hexdigest() def set_token(): expire = datetime.now() + TIME_LIMIT token = expire.strftime(TIME_FORMAT) session[ADMIN] = "%s|%s" % (token, _create_hmac(token)) def delete_token(): del session[ADMIN] def _valid_token(token): try: token, token_mac = token.split(u"|", 1) except: return False if not token_mac == _create_hmac(token): return False if datetime.now().strftime(TIME_FORMAT) < token: return True def valid_admin(fn): @wraps(fn) def nufun(*args, **kwargs): if ADMIN in session: if _valid_token(session[ADMIN]): set_token() return fn(*args, **kwargs) delete_token() session["next"] = request.script_root + request.path return redirect(url_for(".login")) return nufun
Fix redirect generation for reverse proxied solutions
Fix redirect generation for reverse proxied solutions
Python
bsd-3-clause
janLo/meet-and-eat-registration-system,janLo/meet-and-eat-registration-system,eXma/meet-and-eat-registration-system,eXma/meet-and-eat-registration-system,eXma/meet-and-eat-registration-system,eXma/meet-and-eat-registration-system,janLo/meet-and-eat-registration-system,janLo/meet-and-eat-registration-system
from datetime import timedelta, datetime from functools import wraps import hmac from hashlib import sha1 from flask import Blueprint, session, redirect, url_for, request, current_app ADMIN = "valid_admin" TIME_FORMAT = '%Y%m%d%H%M%S' TIME_LIMIT = timedelta(hours=3) def _create_hmac(payload): key = current_app.config["SECRET_KEY"] payload = payload.encode("utf8") mac = hmac.new(key, payload, sha1) return mac.hexdigest() def set_token(): expire = datetime.now() + TIME_LIMIT token = expire.strftime(TIME_FORMAT) session[ADMIN] = "%s|%s" % (token, _create_hmac(token)) def delete_token(): del session[ADMIN] def _valid_token(token): try: token, token_mac = token.split(u"|", 1) except: return False if not token_mac == _create_hmac(token): return False if datetime.now().strftime(TIME_FORMAT) < token: return True def valid_admin(fn): @wraps(fn) def nufun(*args, **kwargs): if ADMIN in session: if _valid_token(session[ADMIN]): set_token() return fn(*args, **kwargs) delete_token() - session["next"] = request.path + session["next"] = request.script_root + request.path return redirect(url_for(".login")) return nufun
Fix redirect generation for reverse proxied solutions
## Code Before: from datetime import timedelta, datetime from functools import wraps import hmac from hashlib import sha1 from flask import Blueprint, session, redirect, url_for, request, current_app ADMIN = "valid_admin" TIME_FORMAT = '%Y%m%d%H%M%S' TIME_LIMIT = timedelta(hours=3) def _create_hmac(payload): key = current_app.config["SECRET_KEY"] payload = payload.encode("utf8") mac = hmac.new(key, payload, sha1) return mac.hexdigest() def set_token(): expire = datetime.now() + TIME_LIMIT token = expire.strftime(TIME_FORMAT) session[ADMIN] = "%s|%s" % (token, _create_hmac(token)) def delete_token(): del session[ADMIN] def _valid_token(token): try: token, token_mac = token.split(u"|", 1) except: return False if not token_mac == _create_hmac(token): return False if datetime.now().strftime(TIME_FORMAT) < token: return True def valid_admin(fn): @wraps(fn) def nufun(*args, **kwargs): if ADMIN in session: if _valid_token(session[ADMIN]): set_token() return fn(*args, **kwargs) delete_token() session["next"] = request.path return redirect(url_for(".login")) return nufun ## Instruction: Fix redirect generation for reverse proxied solutions ## Code After: from datetime import timedelta, datetime from functools import wraps import hmac from hashlib import sha1 from flask import Blueprint, session, redirect, url_for, request, current_app ADMIN = "valid_admin" TIME_FORMAT = '%Y%m%d%H%M%S' TIME_LIMIT = timedelta(hours=3) def _create_hmac(payload): key = current_app.config["SECRET_KEY"] payload = payload.encode("utf8") mac = hmac.new(key, payload, sha1) return mac.hexdigest() def set_token(): expire = datetime.now() + TIME_LIMIT token = expire.strftime(TIME_FORMAT) session[ADMIN] = "%s|%s" % (token, _create_hmac(token)) def delete_token(): del session[ADMIN] def _valid_token(token): try: token, token_mac = token.split(u"|", 1) except: return False if not token_mac == _create_hmac(token): return False if datetime.now().strftime(TIME_FORMAT) < token: return True def valid_admin(fn): @wraps(fn) def nufun(*args, **kwargs): if ADMIN in session: if _valid_token(session[ADMIN]): set_token() return fn(*args, **kwargs) delete_token() session["next"] = request.script_root + request.path return redirect(url_for(".login")) return nufun
from datetime import timedelta, datetime from functools import wraps import hmac from hashlib import sha1 from flask import Blueprint, session, redirect, url_for, request, current_app ADMIN = "valid_admin" TIME_FORMAT = '%Y%m%d%H%M%S' TIME_LIMIT = timedelta(hours=3) def _create_hmac(payload): key = current_app.config["SECRET_KEY"] payload = payload.encode("utf8") mac = hmac.new(key, payload, sha1) return mac.hexdigest() def set_token(): expire = datetime.now() + TIME_LIMIT token = expire.strftime(TIME_FORMAT) session[ADMIN] = "%s|%s" % (token, _create_hmac(token)) def delete_token(): del session[ADMIN] def _valid_token(token): try: token, token_mac = token.split(u"|", 1) except: return False if not token_mac == _create_hmac(token): return False if datetime.now().strftime(TIME_FORMAT) < token: return True def valid_admin(fn): @wraps(fn) def nufun(*args, **kwargs): if ADMIN in session: if _valid_token(session[ADMIN]): set_token() return fn(*args, **kwargs) delete_token() - session["next"] = request.path + session["next"] = request.script_root + request.path ? ++++++++++++++++++++++ return redirect(url_for(".login")) return nufun
b58b270c707b57e9f6c245f1ebb31d68a247471c
mastering-python/ch05/Decorator.py
mastering-python/ch05/Decorator.py
import functools def logParams(function): @functools.wraps(function) # use this to prevent loss of function attributes def wrapper(*args, **kwargs): print("function: {}, args: {}, kwargs: {}".format(function.__name__, args, kwargs)) return function(*args, **kwargs) return wrapper def add(a, b): return a + b @logParams def mul(a, b): return a * b add(1, 1) mul(2, 2) def memo(function): function.cache = dict() @functools.wraps(function) def wrapper(*args): if args not in function.cache: function.cache[args] = function(*args) return function.cache[args] return wrapper @memo def fib(n): if n < 2: return n else: return fib(n - 1) + fib(n - 2) for i in range(1, 10): print("fib{}:{}".format(i, fib(i)))
import functools def logParams(function): @functools.wraps(function) # use this to prevent loss of function attributes def wrapper(*args, **kwargs): print("function: {}, args: {}, kwargs: {}".format(function.__name__, args, kwargs)) return function(*args, **kwargs) return wrapper def add(a, b): return a + b @logParams def mul(a, b): return a * b add(1, 1) mul(2, 2) def memo(function): function.cache = dict() @functools.wraps(function) def wrapper(*args): if args not in function.cache: function.cache[args] = function(*args) return function.cache[args] return wrapper @memo def fib(n): if n < 2: return n else: return fib(n - 1) + fib(n - 2) for i in range(1, 10): print("fib{}:{}".format(i, fib(i))) def trace(func): @functools.wraps(func) def _trace(self, *args): print("Invoking {} - {}".format(self, args)) func(self, *args) return _trace class FooBar: @trace def dummy(self, s): print(s) fb = FooBar() fb.dummy("Hello")
Add class method decorator demo.
Add class method decorator demo.
Python
apache-2.0
precompiler/python-101
import functools def logParams(function): @functools.wraps(function) # use this to prevent loss of function attributes def wrapper(*args, **kwargs): print("function: {}, args: {}, kwargs: {}".format(function.__name__, args, kwargs)) return function(*args, **kwargs) return wrapper def add(a, b): return a + b @logParams def mul(a, b): return a * b add(1, 1) mul(2, 2) def memo(function): function.cache = dict() @functools.wraps(function) def wrapper(*args): if args not in function.cache: function.cache[args] = function(*args) return function.cache[args] return wrapper @memo def fib(n): if n < 2: return n else: return fib(n - 1) + fib(n - 2) for i in range(1, 10): print("fib{}:{}".format(i, fib(i))) + + def trace(func): + @functools.wraps(func) + def _trace(self, *args): + print("Invoking {} - {}".format(self, args)) + func(self, *args) + return _trace + + + class FooBar: + @trace + def dummy(self, s): + print(s) + + fb = FooBar() + fb.dummy("Hello") +
Add class method decorator demo.
## Code Before: import functools def logParams(function): @functools.wraps(function) # use this to prevent loss of function attributes def wrapper(*args, **kwargs): print("function: {}, args: {}, kwargs: {}".format(function.__name__, args, kwargs)) return function(*args, **kwargs) return wrapper def add(a, b): return a + b @logParams def mul(a, b): return a * b add(1, 1) mul(2, 2) def memo(function): function.cache = dict() @functools.wraps(function) def wrapper(*args): if args not in function.cache: function.cache[args] = function(*args) return function.cache[args] return wrapper @memo def fib(n): if n < 2: return n else: return fib(n - 1) + fib(n - 2) for i in range(1, 10): print("fib{}:{}".format(i, fib(i))) ## Instruction: Add class method decorator demo. ## Code After: import functools def logParams(function): @functools.wraps(function) # use this to prevent loss of function attributes def wrapper(*args, **kwargs): print("function: {}, args: {}, kwargs: {}".format(function.__name__, args, kwargs)) return function(*args, **kwargs) return wrapper def add(a, b): return a + b @logParams def mul(a, b): return a * b add(1, 1) mul(2, 2) def memo(function): function.cache = dict() @functools.wraps(function) def wrapper(*args): if args not in function.cache: function.cache[args] = function(*args) return function.cache[args] return wrapper @memo def fib(n): if n < 2: return n else: return fib(n - 1) + fib(n - 2) for i in range(1, 10): print("fib{}:{}".format(i, fib(i))) def trace(func): @functools.wraps(func) def _trace(self, *args): print("Invoking {} - {}".format(self, args)) func(self, *args) return _trace class FooBar: @trace def dummy(self, s): print(s) fb = FooBar() fb.dummy("Hello")
import functools def logParams(function): @functools.wraps(function) # use this to prevent loss of function attributes def wrapper(*args, **kwargs): print("function: {}, args: {}, kwargs: {}".format(function.__name__, args, kwargs)) return function(*args, **kwargs) return wrapper def add(a, b): return a + b @logParams def mul(a, b): return a * b add(1, 1) mul(2, 2) def memo(function): function.cache = dict() @functools.wraps(function) def wrapper(*args): if args not in function.cache: function.cache[args] = function(*args) return function.cache[args] return wrapper @memo def fib(n): if n < 2: return n else: return fib(n - 1) + fib(n - 2) for i in range(1, 10): print("fib{}:{}".format(i, fib(i))) + + + def trace(func): + @functools.wraps(func) + def _trace(self, *args): + print("Invoking {} - {}".format(self, args)) + func(self, *args) + return _trace + + + class FooBar: + @trace + def dummy(self, s): + print(s) + + fb = FooBar() + fb.dummy("Hello")
af2c75a4a99f93e3cae1a1d2d0485a88cce833b7
features/environment.py
features/environment.py
import os import os.path as pt from scripttest import TestFileEnvironment def before_scenario(context, _): root_dir = pt.abspath(pt.join(pt.dirname(__file__), '..')) path = ":" + pt.join(root_dir, 'bin') tmp = pt.join(root_dir, "tmp") python_path = pt.join(root_dir, 'vendor', 'python', 'lib', 'python2.7', 'site-packages') os.environ['PATH'] = path + ":" + os.environ['PATH'] os.environ['PYTHONPATH'] = python_path os.environ['TMPDIR'] = tmp # Required to work with boot2docker context.env = TestFileEnvironment(base_path = tmp)
import os import os.path as pt from scripttest import TestFileEnvironment def before_scenario(context, _): root_dir = pt.abspath(pt.join(pt.dirname(__file__), '..')) path = ":" + pt.join(root_dir, 'bin') tmp = pt.join(root_dir, "tmp", "feature") python_path = pt.join(root_dir, 'vendor', 'python', 'lib', 'python2.7', 'site-packages') os.environ['PATH'] = path + ":" + os.environ['PATH'] os.environ['PYTHONPATH'] = python_path context.env = TestFileEnvironment(base_path = tmp)
Update location of feature tests are run in
Update location of feature tests are run in
Python
mit
michaelbarton/command-line-interface,bioboxes/command-line-interface,bioboxes/command-line-interface,michaelbarton/command-line-interface
import os import os.path as pt from scripttest import TestFileEnvironment def before_scenario(context, _): root_dir = pt.abspath(pt.join(pt.dirname(__file__), '..')) path = ":" + pt.join(root_dir, 'bin') - tmp = pt.join(root_dir, "tmp") + tmp = pt.join(root_dir, "tmp", "feature") python_path = pt.join(root_dir, 'vendor', 'python', 'lib', 'python2.7', 'site-packages') os.environ['PATH'] = path + ":" + os.environ['PATH'] os.environ['PYTHONPATH'] = python_path - os.environ['TMPDIR'] = tmp # Required to work with boot2docker context.env = TestFileEnvironment(base_path = tmp)
Update location of feature tests are run in
## Code Before: import os import os.path as pt from scripttest import TestFileEnvironment def before_scenario(context, _): root_dir = pt.abspath(pt.join(pt.dirname(__file__), '..')) path = ":" + pt.join(root_dir, 'bin') tmp = pt.join(root_dir, "tmp") python_path = pt.join(root_dir, 'vendor', 'python', 'lib', 'python2.7', 'site-packages') os.environ['PATH'] = path + ":" + os.environ['PATH'] os.environ['PYTHONPATH'] = python_path os.environ['TMPDIR'] = tmp # Required to work with boot2docker context.env = TestFileEnvironment(base_path = tmp) ## Instruction: Update location of feature tests are run in ## Code After: import os import os.path as pt from scripttest import TestFileEnvironment def before_scenario(context, _): root_dir = pt.abspath(pt.join(pt.dirname(__file__), '..')) path = ":" + pt.join(root_dir, 'bin') tmp = pt.join(root_dir, "tmp", "feature") python_path = pt.join(root_dir, 'vendor', 'python', 'lib', 'python2.7', 'site-packages') os.environ['PATH'] = path + ":" + os.environ['PATH'] os.environ['PYTHONPATH'] = python_path context.env = TestFileEnvironment(base_path = tmp)
import os import os.path as pt from scripttest import TestFileEnvironment def before_scenario(context, _): root_dir = pt.abspath(pt.join(pt.dirname(__file__), '..')) path = ":" + pt.join(root_dir, 'bin') - tmp = pt.join(root_dir, "tmp") + tmp = pt.join(root_dir, "tmp", "feature") ? +++++++++++ python_path = pt.join(root_dir, 'vendor', 'python', 'lib', 'python2.7', 'site-packages') os.environ['PATH'] = path + ":" + os.environ['PATH'] os.environ['PYTHONPATH'] = python_path - os.environ['TMPDIR'] = tmp # Required to work with boot2docker context.env = TestFileEnvironment(base_path = tmp)
1caace2631f8e9c38cf0adfb1179a5260dcd3c33
tools/management/commands/output_all_uniprot.py
tools/management/commands/output_all_uniprot.py
from django.core.management.base import BaseCommand, CommandError from django.core.management import call_command from django.conf import settings from django.db import connection from django.db.models import Q from django.template.loader import render_to_string from protein.models import Protein from residue.models import ResidueGenericNumber, ResidueGenericNumberEquivalent from common import definitions from common.selection import SelectionItem from common.alignment_gpcr import Alignment import xlsxwriter, xlrd import logging, json, os class Command(BaseCommand): help = "Output all uniprot mappings" logger = logging.getLogger(__name__) def handle(self, *args, **options): #Get the proteins f = open('uniprot.json', 'w') ps = Protein.objects.filter(Q(source__name='SWISSPROT') | Q(source__name='TREMBL'),web_links__web_resource__slug='uniprot').all().prefetch_related('web_links__web_resource') print('total:',len(ps)) mapping = {} for p in ps: uniprot = p.web_links.get(web_resource__slug='uniprot') mapping[p.entry_name] = uniprot.index json.dump(mapping,f, indent=4, separators=(',', ': ')) # print("Seqs: {}\tNot matching: {}".format(num_of_sequences, num_of_non_matching_sequences)) # open("uniprot.txt", "w").write()
from django.core.management.base import BaseCommand, CommandError from django.core.management import call_command from django.conf import settings from django.db import connection from django.db.models import Q from django.template.loader import render_to_string from protein.models import Protein from residue.models import ResidueGenericNumber, ResidueGenericNumberEquivalent from common import definitions from common.selection import SelectionItem from common.alignment_gpcr import Alignment import xlsxwriter, xlrd import logging, json, os class Command(BaseCommand): help = "Output all uniprot mappings" logger = logging.getLogger(__name__) def handle(self, *args, **options): #Get the proteins f = open('uniprot.json', 'w') ps = Protein.objects.filter(Q(source__name='SWISSPROT') | Q(source__name='TREMBL'),web_links__web_resource__slug='uniprot').all().prefetch_related('web_links__web_resource') print('total:',len(ps)) mapping = {} for p in ps: uniprot = p.web_links.filter(web_resource__slug='uniprot').values_list('index', flat = True) mapping[p.entry_name] = list(uniprot) json.dump(mapping,f, indent=4, separators=(',', ': ')) # print("Seqs: {}\tNot matching: {}".format(num_of_sequences, num_of_non_matching_sequences)) # open("uniprot.txt", "w").write()
Change output_all_unitprot to allow multi ids for some proteins.
Change output_all_unitprot to allow multi ids for some proteins.
Python
apache-2.0
cmunk/protwis,fosfataza/protwis,fosfataza/protwis,fosfataza/protwis,cmunk/protwis,protwis/protwis,cmunk/protwis,cmunk/protwis,fosfataza/protwis,protwis/protwis,protwis/protwis
from django.core.management.base import BaseCommand, CommandError from django.core.management import call_command from django.conf import settings from django.db import connection from django.db.models import Q from django.template.loader import render_to_string from protein.models import Protein from residue.models import ResidueGenericNumber, ResidueGenericNumberEquivalent from common import definitions from common.selection import SelectionItem from common.alignment_gpcr import Alignment import xlsxwriter, xlrd import logging, json, os class Command(BaseCommand): help = "Output all uniprot mappings" logger = logging.getLogger(__name__) def handle(self, *args, **options): #Get the proteins f = open('uniprot.json', 'w') ps = Protein.objects.filter(Q(source__name='SWISSPROT') | Q(source__name='TREMBL'),web_links__web_resource__slug='uniprot').all().prefetch_related('web_links__web_resource') print('total:',len(ps)) mapping = {} for p in ps: - uniprot = p.web_links.get(web_resource__slug='uniprot') + uniprot = p.web_links.filter(web_resource__slug='uniprot').values_list('index', flat = True) - mapping[p.entry_name] = uniprot.index + mapping[p.entry_name] = list(uniprot) + json.dump(mapping,f, indent=4, separators=(',', ': ')) # print("Seqs: {}\tNot matching: {}".format(num_of_sequences, num_of_non_matching_sequences)) # open("uniprot.txt", "w").write()
Change output_all_unitprot to allow multi ids for some proteins.
## Code Before: from django.core.management.base import BaseCommand, CommandError from django.core.management import call_command from django.conf import settings from django.db import connection from django.db.models import Q from django.template.loader import render_to_string from protein.models import Protein from residue.models import ResidueGenericNumber, ResidueGenericNumberEquivalent from common import definitions from common.selection import SelectionItem from common.alignment_gpcr import Alignment import xlsxwriter, xlrd import logging, json, os class Command(BaseCommand): help = "Output all uniprot mappings" logger = logging.getLogger(__name__) def handle(self, *args, **options): #Get the proteins f = open('uniprot.json', 'w') ps = Protein.objects.filter(Q(source__name='SWISSPROT') | Q(source__name='TREMBL'),web_links__web_resource__slug='uniprot').all().prefetch_related('web_links__web_resource') print('total:',len(ps)) mapping = {} for p in ps: uniprot = p.web_links.get(web_resource__slug='uniprot') mapping[p.entry_name] = uniprot.index json.dump(mapping,f, indent=4, separators=(',', ': ')) # print("Seqs: {}\tNot matching: {}".format(num_of_sequences, num_of_non_matching_sequences)) # open("uniprot.txt", "w").write() ## Instruction: Change output_all_unitprot to allow multi ids for some proteins. ## Code After: from django.core.management.base import BaseCommand, CommandError from django.core.management import call_command from django.conf import settings from django.db import connection from django.db.models import Q from django.template.loader import render_to_string from protein.models import Protein from residue.models import ResidueGenericNumber, ResidueGenericNumberEquivalent from common import definitions from common.selection import SelectionItem from common.alignment_gpcr import Alignment import xlsxwriter, xlrd import logging, json, os class Command(BaseCommand): help = "Output all uniprot mappings" logger = logging.getLogger(__name__) def handle(self, *args, **options): #Get the proteins f = open('uniprot.json', 'w') ps = Protein.objects.filter(Q(source__name='SWISSPROT') | Q(source__name='TREMBL'),web_links__web_resource__slug='uniprot').all().prefetch_related('web_links__web_resource') print('total:',len(ps)) mapping = {} for p in ps: uniprot = p.web_links.filter(web_resource__slug='uniprot').values_list('index', flat = True) mapping[p.entry_name] = list(uniprot) json.dump(mapping,f, indent=4, separators=(',', ': ')) # print("Seqs: {}\tNot matching: {}".format(num_of_sequences, num_of_non_matching_sequences)) # open("uniprot.txt", "w").write()
from django.core.management.base import BaseCommand, CommandError from django.core.management import call_command from django.conf import settings from django.db import connection from django.db.models import Q from django.template.loader import render_to_string from protein.models import Protein from residue.models import ResidueGenericNumber, ResidueGenericNumberEquivalent from common import definitions from common.selection import SelectionItem from common.alignment_gpcr import Alignment import xlsxwriter, xlrd import logging, json, os class Command(BaseCommand): help = "Output all uniprot mappings" logger = logging.getLogger(__name__) def handle(self, *args, **options): #Get the proteins f = open('uniprot.json', 'w') ps = Protein.objects.filter(Q(source__name='SWISSPROT') | Q(source__name='TREMBL'),web_links__web_resource__slug='uniprot').all().prefetch_related('web_links__web_resource') print('total:',len(ps)) mapping = {} for p in ps: - uniprot = p.web_links.get(web_resource__slug='uniprot') ? ^ ^ + uniprot = p.web_links.filter(web_resource__slug='uniprot').values_list('index', flat = True) ? ^^^^ ^ ++++++++++++++++++++++++++++++++++ - mapping[p.entry_name] = uniprot.index ? ^^^^^^ + mapping[p.entry_name] = list(uniprot) ? +++++ ^ + json.dump(mapping,f, indent=4, separators=(',', ': ')) # print("Seqs: {}\tNot matching: {}".format(num_of_sequences, num_of_non_matching_sequences)) # open("uniprot.txt", "w").write()
2c5c04fd0bb1dc4f5bf54af2e2739fb6a0f1d2c4
survey/urls.py
survey/urls.py
from django.conf.urls import patterns, include, url from .views import IndexView, SurveyDetail, ConfirmView, SurveyCompleted urlpatterns = patterns('', # Examples: url(r'^survey/$', IndexView.as_view(), name='survey-list'), url(r'^survey/(?P<id>[a-zA-Z0-9-]+)/', SurveyDetail.as_view(), name='survey-detail'), url(r'^survey/(?P<id>[a-zA-Z0-9-]+)/completed/', SurveyCompleted.as_view(), name='survey-completed'), url(r'^survey/(?P<id>[a-zA-Z0-9-]+)-(?P<step>\d+)/', SurveyDetail.as_view(), name='survey-detail-step'), url(r'^confirm/(?P<uuid>\w+)/', ConfirmView.as_view(), name='survey-confirmation'), )
from django.conf.urls import patterns, include, url from .views import IndexView, SurveyDetail, ConfirmView, SurveyCompleted urlpatterns = patterns('', url(r'^survey/$', IndexView.as_view(), name='survey-list'), url(r'^survey/(?P<id>\d+)/', SurveyDetail.as_view(), name='survey-detail'), url(r'^survey/(?P<id>\d+)/completed/', SurveyCompleted.as_view(), name='survey-completed'), url(r'^survey/(?P<id>\d+)-(?P<step>\d+)/', SurveyDetail.as_view(), name='survey-detail-step'), url(r'^confirm/(?P<uuid>\w+)/', ConfirmView.as_view(), name='survey-confirmation'), )
Fix - No more crash when entering an url with letter
Fix - No more crash when entering an url with letter
Python
agpl-3.0
Pierre-Sassoulas/django-survey,Pierre-Sassoulas/django-survey,Pierre-Sassoulas/django-survey
from django.conf.urls import patterns, include, url from .views import IndexView, SurveyDetail, ConfirmView, SurveyCompleted + urlpatterns = patterns('', - # Examples: - url(r'^survey/$', IndexView.as_view(), name='survey-list'), + url(r'^survey/$', IndexView.as_view(), name='survey-list'), - url(r'^survey/(?P<id>[a-zA-Z0-9-]+)/', SurveyDetail.as_view(), name='survey-detail'), + url(r'^survey/(?P<id>\d+)/', SurveyDetail.as_view(), name='survey-detail'), - url(r'^survey/(?P<id>[a-zA-Z0-9-]+)/completed/', SurveyCompleted.as_view(), name='survey-completed'), + url(r'^survey/(?P<id>\d+)/completed/', SurveyCompleted.as_view(), name='survey-completed'), - url(r'^survey/(?P<id>[a-zA-Z0-9-]+)-(?P<step>\d+)/', SurveyDetail.as_view(), name='survey-detail-step'), + url(r'^survey/(?P<id>\d+)-(?P<step>\d+)/', SurveyDetail.as_view(), name='survey-detail-step'), - url(r'^confirm/(?P<uuid>\w+)/', ConfirmView.as_view(), name='survey-confirmation'), + url(r'^confirm/(?P<uuid>\w+)/', ConfirmView.as_view(), name='survey-confirmation'), )
Fix - No more crash when entering an url with letter
## Code Before: from django.conf.urls import patterns, include, url from .views import IndexView, SurveyDetail, ConfirmView, SurveyCompleted urlpatterns = patterns('', # Examples: url(r'^survey/$', IndexView.as_view(), name='survey-list'), url(r'^survey/(?P<id>[a-zA-Z0-9-]+)/', SurveyDetail.as_view(), name='survey-detail'), url(r'^survey/(?P<id>[a-zA-Z0-9-]+)/completed/', SurveyCompleted.as_view(), name='survey-completed'), url(r'^survey/(?P<id>[a-zA-Z0-9-]+)-(?P<step>\d+)/', SurveyDetail.as_view(), name='survey-detail-step'), url(r'^confirm/(?P<uuid>\w+)/', ConfirmView.as_view(), name='survey-confirmation'), ) ## Instruction: Fix - No more crash when entering an url with letter ## Code After: from django.conf.urls import patterns, include, url from .views import IndexView, SurveyDetail, ConfirmView, SurveyCompleted urlpatterns = patterns('', url(r'^survey/$', IndexView.as_view(), name='survey-list'), url(r'^survey/(?P<id>\d+)/', SurveyDetail.as_view(), name='survey-detail'), url(r'^survey/(?P<id>\d+)/completed/', SurveyCompleted.as_view(), name='survey-completed'), url(r'^survey/(?P<id>\d+)-(?P<step>\d+)/', SurveyDetail.as_view(), name='survey-detail-step'), url(r'^confirm/(?P<uuid>\w+)/', ConfirmView.as_view(), name='survey-confirmation'), )
from django.conf.urls import patterns, include, url from .views import IndexView, SurveyDetail, ConfirmView, SurveyCompleted + urlpatterns = patterns('', - # Examples: - url(r'^survey/$', IndexView.as_view(), name='survey-list'), ? ^ + url(r'^survey/$', IndexView.as_view(), name='survey-list'), ? ^^^^^^^ - url(r'^survey/(?P<id>[a-zA-Z0-9-]+)/', SurveyDetail.as_view(), name='survey-detail'), ? ^ ^^^^^^^^^^^^ + url(r'^survey/(?P<id>\d+)/', SurveyDetail.as_view(), name='survey-detail'), ? ^^^^^^^ ^^ - url(r'^survey/(?P<id>[a-zA-Z0-9-]+)/completed/', SurveyCompleted.as_view(), name='survey-completed'), ? ^ ^^^^^^^^^^^^ + url(r'^survey/(?P<id>\d+)/completed/', SurveyCompleted.as_view(), name='survey-completed'), ? ^^^^^^^ ^^ - url(r'^survey/(?P<id>[a-zA-Z0-9-]+)-(?P<step>\d+)/', SurveyDetail.as_view(), name='survey-detail-step'), ? ^ ^^^^^^^^^^^^ + url(r'^survey/(?P<id>\d+)-(?P<step>\d+)/', SurveyDetail.as_view(), name='survey-detail-step'), ? ^^^^^^^ ^^ - url(r'^confirm/(?P<uuid>\w+)/', ConfirmView.as_view(), name='survey-confirmation'), ? ^ + url(r'^confirm/(?P<uuid>\w+)/', ConfirmView.as_view(), name='survey-confirmation'), ? ^^^^^^^ )
731e48b1b81e9249fc8bdd0f826c6e009559fcc3
mempoke.py
mempoke.py
import gdb import struct class DeviceMemory: def __init__(self): self.inferior = gdb.selected_inferior() def __del__(self): del self.inferior def read(self, address): return struct.unpack('I', self.inferior.read_memory(address, 4))[0] def write(self, address, value): value_bytes = struct.pack('I', value) self.inferior.write_memory(address, value_bytes)
import gdb import struct class DeviceMemory: def __init__(self): self.inferior = gdb.selected_inferior() def __del__(self): del self.inferior def read(self, address): return struct.unpack('I', self.inferior.read_memory(address, 4))[0] def write(self, address, value): value_bytes = struct.pack('I', value) self.inferior.write_memory(address, value_bytes) def create_memory_reg(offset, name): def reg_getter(self): return self.device_memory.read(self.address + offset) def reg_setter(self, value): self.device_memory.write(self.address + offset, value) return property(reg_getter, reg_setter, None, name) def create_mem_struct(name, registers): structure_fields = {} for register, offset in registers: structure_fields[register] = create_memory_reg(offset, register) def memory_structure_init(self, address, device_memory): self.address = address self.device_memory = device_memory structure_fields['__init__'] = memory_structure_init return type(name, (object,), structure_fields)
Add mechanism for defining MCU control structures
Add mechanism for defining MCU control structures
Python
mit
fmfi-svt-deadlock/hw-testing,fmfi-svt-deadlock/hw-testing
import gdb import struct class DeviceMemory: def __init__(self): self.inferior = gdb.selected_inferior() def __del__(self): del self.inferior def read(self, address): return struct.unpack('I', self.inferior.read_memory(address, 4))[0] def write(self, address, value): value_bytes = struct.pack('I', value) self.inferior.write_memory(address, value_bytes) + + def create_memory_reg(offset, name): + def reg_getter(self): + return self.device_memory.read(self.address + offset) + + def reg_setter(self, value): + self.device_memory.write(self.address + offset, value) + + return property(reg_getter, reg_setter, None, name) + + + def create_mem_struct(name, registers): + structure_fields = {} + + for register, offset in registers: + structure_fields[register] = create_memory_reg(offset, register) + + def memory_structure_init(self, address, device_memory): + self.address = address + self.device_memory = device_memory + + structure_fields['__init__'] = memory_structure_init + + return type(name, (object,), structure_fields) +
Add mechanism for defining MCU control structures
## Code Before: import gdb import struct class DeviceMemory: def __init__(self): self.inferior = gdb.selected_inferior() def __del__(self): del self.inferior def read(self, address): return struct.unpack('I', self.inferior.read_memory(address, 4))[0] def write(self, address, value): value_bytes = struct.pack('I', value) self.inferior.write_memory(address, value_bytes) ## Instruction: Add mechanism for defining MCU control structures ## Code After: import gdb import struct class DeviceMemory: def __init__(self): self.inferior = gdb.selected_inferior() def __del__(self): del self.inferior def read(self, address): return struct.unpack('I', self.inferior.read_memory(address, 4))[0] def write(self, address, value): value_bytes = struct.pack('I', value) self.inferior.write_memory(address, value_bytes) def create_memory_reg(offset, name): def reg_getter(self): return self.device_memory.read(self.address + offset) def reg_setter(self, value): self.device_memory.write(self.address + offset, value) return property(reg_getter, reg_setter, None, name) def create_mem_struct(name, registers): structure_fields = {} for register, offset in registers: structure_fields[register] = create_memory_reg(offset, register) def memory_structure_init(self, address, device_memory): self.address = address self.device_memory = device_memory structure_fields['__init__'] = memory_structure_init return type(name, (object,), structure_fields)
import gdb import struct class DeviceMemory: def __init__(self): self.inferior = gdb.selected_inferior() def __del__(self): del self.inferior def read(self, address): return struct.unpack('I', self.inferior.read_memory(address, 4))[0] def write(self, address, value): value_bytes = struct.pack('I', value) self.inferior.write_memory(address, value_bytes) + + + def create_memory_reg(offset, name): + def reg_getter(self): + return self.device_memory.read(self.address + offset) + + def reg_setter(self, value): + self.device_memory.write(self.address + offset, value) + + return property(reg_getter, reg_setter, None, name) + + + def create_mem_struct(name, registers): + structure_fields = {} + + for register, offset in registers: + structure_fields[register] = create_memory_reg(offset, register) + + def memory_structure_init(self, address, device_memory): + self.address = address + self.device_memory = device_memory + + structure_fields['__init__'] = memory_structure_init + + return type(name, (object,), structure_fields)
e86f62edb2edf9dd5d20eb2bf89b09c76807de50
tests/cupy_tests/core_tests/test_array_function.py
tests/cupy_tests/core_tests/test_array_function.py
import unittest import numpy import six import cupy from cupy import testing @testing.gpu class TestArrayFunction(unittest.TestCase): @testing.with_requires('numpy>=1.17.0') def test_array_function(self): a = numpy.random.randn(100, 100) a_cpu = numpy.asarray(a) a_gpu = cupy.asarray(a) # The numpy call for both CPU and GPU arrays is intentional to test the # __array_function__ protocol qr_cpu = numpy.linalg.qr(a_cpu) qr_gpu = numpy.linalg.qr(a_gpu) if isinstance(qr_cpu, tuple): for b_cpu, b_gpu in six.moves.zip(qr_cpu, qr_gpu): self.assertEqual(b_cpu.dtype, b_gpu.dtype) cupy.testing.assert_allclose(b_cpu, b_gpu, atol=1e-4) else: self.assertEqual(qr_cpu.dtype, qr_gpu.dtype) cupy.testing.assert_allclose(qr_cpu, qr_gpu, atol=1e-4)
import unittest import numpy import six import cupy from cupy import testing @testing.gpu class TestArrayFunction(unittest.TestCase): @testing.with_requires('numpy>=1.17.0') def test_array_function(self): a = numpy.random.randn(100, 100) a_cpu = numpy.asarray(a) a_gpu = cupy.asarray(a) # The numpy call for both CPU and GPU arrays is intentional to test the # __array_function__ protocol qr_cpu = numpy.linalg.qr(a_cpu) qr_gpu = numpy.linalg.qr(a_gpu) if isinstance(qr_cpu, tuple): for b_cpu, b_gpu in six.moves.zip(qr_cpu, qr_gpu): self.assertEqual(b_cpu.dtype, b_gpu.dtype) cupy.testing.assert_allclose(b_cpu, b_gpu, atol=1e-4) else: self.assertEqual(qr_cpu.dtype, qr_gpu.dtype) cupy.testing.assert_allclose(qr_cpu, qr_gpu, atol=1e-4) @testing.numpy_cupy_equal() def test_array_function_can_cast(self, xp): return numpy.can_cast(xp.arange(2), 'f4') @testing.numpy_cupy_equal() def test_array_function_common_type(self, xp): return numpy.common_type(xp.arange(2, dtype='f8'), xp.arange(2, dtype='f4')) @testing.numpy_cupy_equal() def test_array_function_result_type(self, xp): return numpy.result_type(3, xp.arange(2, dtype='f8'))
Add tests for NumPy _implementation usage
Add tests for NumPy _implementation usage
Python
mit
cupy/cupy,cupy/cupy,cupy/cupy,cupy/cupy
import unittest import numpy import six import cupy from cupy import testing @testing.gpu class TestArrayFunction(unittest.TestCase): @testing.with_requires('numpy>=1.17.0') def test_array_function(self): a = numpy.random.randn(100, 100) a_cpu = numpy.asarray(a) a_gpu = cupy.asarray(a) # The numpy call for both CPU and GPU arrays is intentional to test the # __array_function__ protocol qr_cpu = numpy.linalg.qr(a_cpu) qr_gpu = numpy.linalg.qr(a_gpu) if isinstance(qr_cpu, tuple): for b_cpu, b_gpu in six.moves.zip(qr_cpu, qr_gpu): self.assertEqual(b_cpu.dtype, b_gpu.dtype) cupy.testing.assert_allclose(b_cpu, b_gpu, atol=1e-4) else: self.assertEqual(qr_cpu.dtype, qr_gpu.dtype) cupy.testing.assert_allclose(qr_cpu, qr_gpu, atol=1e-4) + @testing.numpy_cupy_equal() + def test_array_function_can_cast(self, xp): + return numpy.can_cast(xp.arange(2), 'f4') + + @testing.numpy_cupy_equal() + def test_array_function_common_type(self, xp): + return numpy.common_type(xp.arange(2, dtype='f8'), xp.arange(2, dtype='f4')) + + @testing.numpy_cupy_equal() + def test_array_function_result_type(self, xp): + return numpy.result_type(3, xp.arange(2, dtype='f8')) +
Add tests for NumPy _implementation usage
## Code Before: import unittest import numpy import six import cupy from cupy import testing @testing.gpu class TestArrayFunction(unittest.TestCase): @testing.with_requires('numpy>=1.17.0') def test_array_function(self): a = numpy.random.randn(100, 100) a_cpu = numpy.asarray(a) a_gpu = cupy.asarray(a) # The numpy call for both CPU and GPU arrays is intentional to test the # __array_function__ protocol qr_cpu = numpy.linalg.qr(a_cpu) qr_gpu = numpy.linalg.qr(a_gpu) if isinstance(qr_cpu, tuple): for b_cpu, b_gpu in six.moves.zip(qr_cpu, qr_gpu): self.assertEqual(b_cpu.dtype, b_gpu.dtype) cupy.testing.assert_allclose(b_cpu, b_gpu, atol=1e-4) else: self.assertEqual(qr_cpu.dtype, qr_gpu.dtype) cupy.testing.assert_allclose(qr_cpu, qr_gpu, atol=1e-4) ## Instruction: Add tests for NumPy _implementation usage ## Code After: import unittest import numpy import six import cupy from cupy import testing @testing.gpu class TestArrayFunction(unittest.TestCase): @testing.with_requires('numpy>=1.17.0') def test_array_function(self): a = numpy.random.randn(100, 100) a_cpu = numpy.asarray(a) a_gpu = cupy.asarray(a) # The numpy call for both CPU and GPU arrays is intentional to test the # __array_function__ protocol qr_cpu = numpy.linalg.qr(a_cpu) qr_gpu = numpy.linalg.qr(a_gpu) if isinstance(qr_cpu, tuple): for b_cpu, b_gpu in six.moves.zip(qr_cpu, qr_gpu): self.assertEqual(b_cpu.dtype, b_gpu.dtype) cupy.testing.assert_allclose(b_cpu, b_gpu, atol=1e-4) else: self.assertEqual(qr_cpu.dtype, qr_gpu.dtype) cupy.testing.assert_allclose(qr_cpu, qr_gpu, atol=1e-4) @testing.numpy_cupy_equal() def test_array_function_can_cast(self, xp): return numpy.can_cast(xp.arange(2), 'f4') @testing.numpy_cupy_equal() def test_array_function_common_type(self, xp): return numpy.common_type(xp.arange(2, dtype='f8'), xp.arange(2, dtype='f4')) @testing.numpy_cupy_equal() def test_array_function_result_type(self, xp): return numpy.result_type(3, xp.arange(2, dtype='f8'))
import unittest import numpy import six import cupy from cupy import testing @testing.gpu class TestArrayFunction(unittest.TestCase): @testing.with_requires('numpy>=1.17.0') def test_array_function(self): a = numpy.random.randn(100, 100) a_cpu = numpy.asarray(a) a_gpu = cupy.asarray(a) # The numpy call for both CPU and GPU arrays is intentional to test the # __array_function__ protocol qr_cpu = numpy.linalg.qr(a_cpu) qr_gpu = numpy.linalg.qr(a_gpu) if isinstance(qr_cpu, tuple): for b_cpu, b_gpu in six.moves.zip(qr_cpu, qr_gpu): self.assertEqual(b_cpu.dtype, b_gpu.dtype) cupy.testing.assert_allclose(b_cpu, b_gpu, atol=1e-4) else: self.assertEqual(qr_cpu.dtype, qr_gpu.dtype) cupy.testing.assert_allclose(qr_cpu, qr_gpu, atol=1e-4) + + @testing.numpy_cupy_equal() + def test_array_function_can_cast(self, xp): + return numpy.can_cast(xp.arange(2), 'f4') + + @testing.numpy_cupy_equal() + def test_array_function_common_type(self, xp): + return numpy.common_type(xp.arange(2, dtype='f8'), xp.arange(2, dtype='f4')) + + @testing.numpy_cupy_equal() + def test_array_function_result_type(self, xp): + return numpy.result_type(3, xp.arange(2, dtype='f8'))
834637f8860f6b2d99726f9f531d05884e375ea3
setup.py
setup.py
import os import re from distutils.core import setup DIRNAME = os.path.abspath(os.path.dirname(__file__)) rel = lambda *parts: os.path.abspath(os.path.join(DIRNAME, *parts)) README = open(rel('README.rst')).read() INIT_PY = open(rel('flask_redis.py')).read() VERSION = re.findall("__version__ = '([^']+)'", INIT_PY)[0] setup( name='Flask-And-Redis', version=VERSION, description='Simple as dead support of Redis database for Flask apps.', long_description=README, author='Igor Davydenko', author_email='[email protected]', url='https://github.com/playpauseandstop/Flask-And-Redis', install_requires=[ 'Flask', 'redis', ], py_modules=[ 'flask_redis', ], platforms='any', classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Web Environment', 'Operating System :: OS Independent', 'Topic :: Utilities', 'Programming Language :: Python', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', 'Topic :: Software Development :: Libraries :: Python Modules', 'License :: OSI Approved :: BSD License', ], keywords='flask redis', license='BSD License', )
import os import re import sys from distutils.core import setup DIRNAME = os.path.abspath(os.path.dirname(__file__)) rel = lambda *parts: os.path.abspath(os.path.join(DIRNAME, *parts)) with open(rel('README.rst')) as handler: README = handler.read() with open(rel('flask_redis.py')) as handler: INIT_PY = handler.read() INSTALL_REQUIRES = { 2: ['Flask>=0.8', 'redis>=2.4.11'], 3: ['Flask>=0.10.1', 'redis>=2.6.2'], } VERSION = re.findall("__version__ = '([^']+)'", INIT_PY)[0] setup( name='Flask-And-Redis', version=VERSION, description='Simple as dead support of Redis database for Flask apps.', long_description=README, author='Igor Davydenko', author_email='[email protected]', url='https://github.com/playpauseandstop/Flask-And-Redis', install_requires=INSTALL_REQUIRES[sys.version_info[0]], py_modules=['flask_redis'], platforms='any', classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Web Environment', 'Operating System :: OS Independent', 'Topic :: Utilities', 'Programming Language :: Python', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', 'Topic :: Software Development :: Libraries :: Python Modules', 'License :: OSI Approved :: BSD License', ], keywords='flask redis', license='BSD License', )
Fix install requirements due to Python versions.
Fix install requirements due to Python versions.
Python
bsd-3-clause
playpauseandstop/Flask-And-Redis,playpauseandstop/Flask-And-Redis
import os import re + import sys from distutils.core import setup DIRNAME = os.path.abspath(os.path.dirname(__file__)) rel = lambda *parts: os.path.abspath(os.path.join(DIRNAME, *parts)) - README = open(rel('README.rst')).read() - INIT_PY = open(rel('flask_redis.py')).read() + with open(rel('README.rst')) as handler: + README = handler.read() + with open(rel('flask_redis.py')) as handler: + INIT_PY = handler.read() + + INSTALL_REQUIRES = { + 2: ['Flask>=0.8', 'redis>=2.4.11'], + 3: ['Flask>=0.10.1', 'redis>=2.6.2'], + } VERSION = re.findall("__version__ = '([^']+)'", INIT_PY)[0] setup( name='Flask-And-Redis', version=VERSION, description='Simple as dead support of Redis database for Flask apps.', long_description=README, author='Igor Davydenko', author_email='[email protected]', url='https://github.com/playpauseandstop/Flask-And-Redis', + install_requires=INSTALL_REQUIRES[sys.version_info[0]], + py_modules=['flask_redis'], - install_requires=[ - 'Flask', - 'redis', - ], - py_modules=[ - 'flask_redis', - ], platforms='any', classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Web Environment', 'Operating System :: OS Independent', 'Topic :: Utilities', 'Programming Language :: Python', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', 'Topic :: Software Development :: Libraries :: Python Modules', 'License :: OSI Approved :: BSD License', ], keywords='flask redis', license='BSD License', )
Fix install requirements due to Python versions.
## Code Before: import os import re from distutils.core import setup DIRNAME = os.path.abspath(os.path.dirname(__file__)) rel = lambda *parts: os.path.abspath(os.path.join(DIRNAME, *parts)) README = open(rel('README.rst')).read() INIT_PY = open(rel('flask_redis.py')).read() VERSION = re.findall("__version__ = '([^']+)'", INIT_PY)[0] setup( name='Flask-And-Redis', version=VERSION, description='Simple as dead support of Redis database for Flask apps.', long_description=README, author='Igor Davydenko', author_email='[email protected]', url='https://github.com/playpauseandstop/Flask-And-Redis', install_requires=[ 'Flask', 'redis', ], py_modules=[ 'flask_redis', ], platforms='any', classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Web Environment', 'Operating System :: OS Independent', 'Topic :: Utilities', 'Programming Language :: Python', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', 'Topic :: Software Development :: Libraries :: Python Modules', 'License :: OSI Approved :: BSD License', ], keywords='flask redis', license='BSD License', ) ## Instruction: Fix install requirements due to Python versions. ## Code After: import os import re import sys from distutils.core import setup DIRNAME = os.path.abspath(os.path.dirname(__file__)) rel = lambda *parts: os.path.abspath(os.path.join(DIRNAME, *parts)) with open(rel('README.rst')) as handler: README = handler.read() with open(rel('flask_redis.py')) as handler: INIT_PY = handler.read() INSTALL_REQUIRES = { 2: ['Flask>=0.8', 'redis>=2.4.11'], 3: ['Flask>=0.10.1', 'redis>=2.6.2'], } VERSION = re.findall("__version__ = '([^']+)'", INIT_PY)[0] setup( name='Flask-And-Redis', version=VERSION, description='Simple as dead support of Redis database for Flask apps.', long_description=README, author='Igor Davydenko', author_email='[email protected]', url='https://github.com/playpauseandstop/Flask-And-Redis', install_requires=INSTALL_REQUIRES[sys.version_info[0]], py_modules=['flask_redis'], platforms='any', classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Web Environment', 'Operating System :: OS Independent', 'Topic :: Utilities', 'Programming Language :: Python', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', 'Topic :: Software Development :: Libraries :: Python Modules', 'License :: OSI Approved :: BSD License', ], keywords='flask redis', license='BSD License', )
import os import re + import sys from distutils.core import setup DIRNAME = os.path.abspath(os.path.dirname(__file__)) rel = lambda *parts: os.path.abspath(os.path.join(DIRNAME, *parts)) - README = open(rel('README.rst')).read() - INIT_PY = open(rel('flask_redis.py')).read() + with open(rel('README.rst')) as handler: + README = handler.read() + with open(rel('flask_redis.py')) as handler: + INIT_PY = handler.read() + + INSTALL_REQUIRES = { + 2: ['Flask>=0.8', 'redis>=2.4.11'], + 3: ['Flask>=0.10.1', 'redis>=2.6.2'], + } VERSION = re.findall("__version__ = '([^']+)'", INIT_PY)[0] setup( name='Flask-And-Redis', version=VERSION, description='Simple as dead support of Redis database for Flask apps.', long_description=README, author='Igor Davydenko', author_email='[email protected]', url='https://github.com/playpauseandstop/Flask-And-Redis', + install_requires=INSTALL_REQUIRES[sys.version_info[0]], + py_modules=['flask_redis'], - install_requires=[ - 'Flask', - 'redis', - ], - py_modules=[ - 'flask_redis', - ], platforms='any', classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Web Environment', 'Operating System :: OS Independent', 'Topic :: Utilities', 'Programming Language :: Python', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', 'Topic :: Software Development :: Libraries :: Python Modules', 'License :: OSI Approved :: BSD License', ], keywords='flask redis', license='BSD License', )
4bc9d1b51cd735c366edce81cd4e36e2eca904c7
worker/models/spotify_artist.py
worker/models/spotify_artist.py
from spotify_item import SpotifyItem class Artist(SpotifyItem): def __init__(self, **entries): super(Artist, self).__init__(**entries) def __repr__(self): return '<Artist: {0}>'.format(self.name)
from spotify_item import SpotifyItem from pyechonest import config from pyechonest import artist from worker.config import ECHO_NEST_API_KEY config.ECHO_NEST_API_KEY = ECHO_NEST_API_KEY class Artist(SpotifyItem): def __init__(self, **entries): super(Artist, self).__init__(**entries) self.echonest = artist.Artist(self.uri) def __repr__(self): return '<Artist: {0}>'.format(self.name)
Add echo nest to artist model
Add echo nest to artist model
Python
mit
projectweekend/song-feed-worker
from spotify_item import SpotifyItem + from pyechonest import config + from pyechonest import artist + from worker.config import ECHO_NEST_API_KEY + + + config.ECHO_NEST_API_KEY = ECHO_NEST_API_KEY class Artist(SpotifyItem): def __init__(self, **entries): super(Artist, self).__init__(**entries) + self.echonest = artist.Artist(self.uri) def __repr__(self): return '<Artist: {0}>'.format(self.name)
Add echo nest to artist model
## Code Before: from spotify_item import SpotifyItem class Artist(SpotifyItem): def __init__(self, **entries): super(Artist, self).__init__(**entries) def __repr__(self): return '<Artist: {0}>'.format(self.name) ## Instruction: Add echo nest to artist model ## Code After: from spotify_item import SpotifyItem from pyechonest import config from pyechonest import artist from worker.config import ECHO_NEST_API_KEY config.ECHO_NEST_API_KEY = ECHO_NEST_API_KEY class Artist(SpotifyItem): def __init__(self, **entries): super(Artist, self).__init__(**entries) self.echonest = artist.Artist(self.uri) def __repr__(self): return '<Artist: {0}>'.format(self.name)
from spotify_item import SpotifyItem + from pyechonest import config + from pyechonest import artist + from worker.config import ECHO_NEST_API_KEY + + + config.ECHO_NEST_API_KEY = ECHO_NEST_API_KEY class Artist(SpotifyItem): def __init__(self, **entries): super(Artist, self).__init__(**entries) + self.echonest = artist.Artist(self.uri) def __repr__(self): return '<Artist: {0}>'.format(self.name)
1e7c53398e6a4a72d4027524622b32ee1819f154
zpr.py
zpr.py
import json import lib_zpr from flask import Flask, jsonify, make_response app = Flask(__name__) api_version = 'v1.0' api_base = str('/zpr/{v}'.format(v=api_version)) @app.errorhandler(404) def not_found(error): return make_response(jsonify({'error': 'Not found'}), 404) # @app.route('/zpr/job') # def ls_test(): # return json.dumps(call('ls')) @app.route('{a}/job/<backup_host>'.format(a=api_base), methods=['GET']) def check_job(backup_host): job = str(lib_zpr.check_zpr_rsync_job(backup_host)) return json.dumps(job) @app.route('{a}/job/duplicity/<backup_host>'.format(a=api_base), methods=['GET']) def check_offsite_job(backup_host): lib_zpr.check_duplicity_job(backup_host, print_output=False) return json.dumps(str(lib_zpr.check_duplicity_out[0])) if __name__ == '__main__': app.run(debug=True)
import json import lib_zpr from flask import Flask, jsonify, make_response app = Flask(__name__) api_version = 'v1.0' api_base = str('/zpr/{v}'.format(v=api_version)) @app.errorhandler(404) def not_found(error): return make_response(jsonify({'error': 'Not found'}), 404) # @app.route('/zpr/job') # def ls_test(): # return json.dumps(call('ls')) @app.route('{a}/job/<backup_host>'.format(a=api_base), methods=['GET']) def check_job(backup_host): job = str(lib_zpr.check_zpr_rsync_job(backup_host)) return json.dumps(job) @app.route('{a}/job/duplicity/<backup_host>'.format(a=api_base), methods=['GET']) def check_offsite_job(backup_host): lib_zpr.check_duplicity_job(backup_host, print_output=False) return json.dumps(str(lib_zpr.check_duplicity_out[0])) if __name__ == '__main__': app.run(host='127.0.0.1')
Add host and remove debug
Add host and remove debug
Python
apache-2.0
mattkirby/zpr-api
import json import lib_zpr from flask import Flask, jsonify, make_response app = Flask(__name__) api_version = 'v1.0' api_base = str('/zpr/{v}'.format(v=api_version)) @app.errorhandler(404) def not_found(error): return make_response(jsonify({'error': 'Not found'}), 404) # @app.route('/zpr/job') # def ls_test(): # return json.dumps(call('ls')) @app.route('{a}/job/<backup_host>'.format(a=api_base), methods=['GET']) def check_job(backup_host): job = str(lib_zpr.check_zpr_rsync_job(backup_host)) return json.dumps(job) @app.route('{a}/job/duplicity/<backup_host>'.format(a=api_base), methods=['GET']) def check_offsite_job(backup_host): lib_zpr.check_duplicity_job(backup_host, print_output=False) return json.dumps(str(lib_zpr.check_duplicity_out[0])) if __name__ == '__main__': - app.run(debug=True) + app.run(host='127.0.0.1')
Add host and remove debug
## Code Before: import json import lib_zpr from flask import Flask, jsonify, make_response app = Flask(__name__) api_version = 'v1.0' api_base = str('/zpr/{v}'.format(v=api_version)) @app.errorhandler(404) def not_found(error): return make_response(jsonify({'error': 'Not found'}), 404) # @app.route('/zpr/job') # def ls_test(): # return json.dumps(call('ls')) @app.route('{a}/job/<backup_host>'.format(a=api_base), methods=['GET']) def check_job(backup_host): job = str(lib_zpr.check_zpr_rsync_job(backup_host)) return json.dumps(job) @app.route('{a}/job/duplicity/<backup_host>'.format(a=api_base), methods=['GET']) def check_offsite_job(backup_host): lib_zpr.check_duplicity_job(backup_host, print_output=False) return json.dumps(str(lib_zpr.check_duplicity_out[0])) if __name__ == '__main__': app.run(debug=True) ## Instruction: Add host and remove debug ## Code After: import json import lib_zpr from flask import Flask, jsonify, make_response app = Flask(__name__) api_version = 'v1.0' api_base = str('/zpr/{v}'.format(v=api_version)) @app.errorhandler(404) def not_found(error): return make_response(jsonify({'error': 'Not found'}), 404) # @app.route('/zpr/job') # def ls_test(): # return json.dumps(call('ls')) @app.route('{a}/job/<backup_host>'.format(a=api_base), methods=['GET']) def check_job(backup_host): job = str(lib_zpr.check_zpr_rsync_job(backup_host)) return json.dumps(job) @app.route('{a}/job/duplicity/<backup_host>'.format(a=api_base), methods=['GET']) def check_offsite_job(backup_host): lib_zpr.check_duplicity_job(backup_host, print_output=False) return json.dumps(str(lib_zpr.check_duplicity_out[0])) if __name__ == '__main__': app.run(host='127.0.0.1')
import json import lib_zpr from flask import Flask, jsonify, make_response app = Flask(__name__) api_version = 'v1.0' api_base = str('/zpr/{v}'.format(v=api_version)) @app.errorhandler(404) def not_found(error): return make_response(jsonify({'error': 'Not found'}), 404) # @app.route('/zpr/job') # def ls_test(): # return json.dumps(call('ls')) @app.route('{a}/job/<backup_host>'.format(a=api_base), methods=['GET']) def check_job(backup_host): job = str(lib_zpr.check_zpr_rsync_job(backup_host)) return json.dumps(job) @app.route('{a}/job/duplicity/<backup_host>'.format(a=api_base), methods=['GET']) def check_offsite_job(backup_host): lib_zpr.check_duplicity_job(backup_host, print_output=False) return json.dumps(str(lib_zpr.check_duplicity_out[0])) if __name__ == '__main__': - app.run(debug=True) + app.run(host='127.0.0.1')
dfdb824eb1327a270e1c167e2ed5e161026858ea
antxetamedia/multimedia/handlers.py
antxetamedia/multimedia/handlers.py
from boto.s3.connection import S3Connection from boto.s3.bucket import Bucket from boto.exception import S3ResponseError, S3CreateError from django.conf import settings def upload(user, passwd, bucket, metadata, key, fd): conn = S3Connection(user, passwd, host=settings.S3_HOST, is_secure=False) while bucket.endswith('-'): bucket = bucket[:-1] try: bucket = conn.get_bucket(bucket) except S3ResponseError: try: bucket = conn.create_bucket(bucket, headers=metadata) except (S3ResponseError, UnicodeDecodeError): bucket = conn.create_bucket(bucket) except S3CreateError as e: if e.status == 409: bucket = Bucket(conn, bucket) key = bucket.new_key(key) try: key.set_contents_from_file(fd) except S3ResponseError: key.set_contents_from_file(fd) return key.generate_url(0).split('?')[0]
from boto.s3.connection import S3Connection from boto.s3.bucket import Bucket from boto.exception import S3ResponseError, S3CreateError from django.conf import settings def upload(user, passwd, bucket, metadata, key, fd): conn = S3Connection(user, passwd, host=settings.S3_HOST, is_secure=False) while bucket.endswith('-'): bucket = bucket[:-1] try: bucket = conn.get_bucket(bucket) except S3ResponseError: try: bucket = conn.create_bucket(bucket, headers=metadata) except S3CreateError as e: if e.status == 409: bucket = Bucket(conn, bucket) key = bucket.new_key(key) try: key.set_contents_from_file(fd) except S3ResponseError: key.set_contents_from_file(fd) return key.generate_url(0).split('?')[0]
Break lowdly on unicode errors
Break lowdly on unicode errors
Python
agpl-3.0
GISAElkartea/antxetamedia,GISAElkartea/antxetamedia,GISAElkartea/antxetamedia
from boto.s3.connection import S3Connection from boto.s3.bucket import Bucket from boto.exception import S3ResponseError, S3CreateError from django.conf import settings def upload(user, passwd, bucket, metadata, key, fd): conn = S3Connection(user, passwd, host=settings.S3_HOST, is_secure=False) while bucket.endswith('-'): bucket = bucket[:-1] try: bucket = conn.get_bucket(bucket) except S3ResponseError: try: bucket = conn.create_bucket(bucket, headers=metadata) - except (S3ResponseError, UnicodeDecodeError): - bucket = conn.create_bucket(bucket) except S3CreateError as e: if e.status == 409: bucket = Bucket(conn, bucket) key = bucket.new_key(key) try: key.set_contents_from_file(fd) except S3ResponseError: key.set_contents_from_file(fd) return key.generate_url(0).split('?')[0]
Break lowdly on unicode errors
## Code Before: from boto.s3.connection import S3Connection from boto.s3.bucket import Bucket from boto.exception import S3ResponseError, S3CreateError from django.conf import settings def upload(user, passwd, bucket, metadata, key, fd): conn = S3Connection(user, passwd, host=settings.S3_HOST, is_secure=False) while bucket.endswith('-'): bucket = bucket[:-1] try: bucket = conn.get_bucket(bucket) except S3ResponseError: try: bucket = conn.create_bucket(bucket, headers=metadata) except (S3ResponseError, UnicodeDecodeError): bucket = conn.create_bucket(bucket) except S3CreateError as e: if e.status == 409: bucket = Bucket(conn, bucket) key = bucket.new_key(key) try: key.set_contents_from_file(fd) except S3ResponseError: key.set_contents_from_file(fd) return key.generate_url(0).split('?')[0] ## Instruction: Break lowdly on unicode errors ## Code After: from boto.s3.connection import S3Connection from boto.s3.bucket import Bucket from boto.exception import S3ResponseError, S3CreateError from django.conf import settings def upload(user, passwd, bucket, metadata, key, fd): conn = S3Connection(user, passwd, host=settings.S3_HOST, is_secure=False) while bucket.endswith('-'): bucket = bucket[:-1] try: bucket = conn.get_bucket(bucket) except S3ResponseError: try: bucket = conn.create_bucket(bucket, headers=metadata) except S3CreateError as e: if e.status == 409: bucket = Bucket(conn, bucket) key = bucket.new_key(key) try: key.set_contents_from_file(fd) except S3ResponseError: key.set_contents_from_file(fd) return key.generate_url(0).split('?')[0]
from boto.s3.connection import S3Connection from boto.s3.bucket import Bucket from boto.exception import S3ResponseError, S3CreateError from django.conf import settings def upload(user, passwd, bucket, metadata, key, fd): conn = S3Connection(user, passwd, host=settings.S3_HOST, is_secure=False) while bucket.endswith('-'): bucket = bucket[:-1] try: bucket = conn.get_bucket(bucket) except S3ResponseError: try: bucket = conn.create_bucket(bucket, headers=metadata) - except (S3ResponseError, UnicodeDecodeError): - bucket = conn.create_bucket(bucket) except S3CreateError as e: if e.status == 409: bucket = Bucket(conn, bucket) key = bucket.new_key(key) try: key.set_contents_from_file(fd) except S3ResponseError: key.set_contents_from_file(fd) return key.generate_url(0).split('?')[0]
78a596ba34a3a8a7435dd6ca997e6b6cb79fbdd6
setup.py
setup.py
from __future__ import print_function from distutils.core import setup import os version = '1.0.0b' # Append TeamCity build number if it gives us one. if 'TC_BUILD_NUMBER' in os.environ and version.endswith('b'): version += '' + os.environ['TC_BUILD_NUMBER'] setup(name='fetch', maintainer='Jeremy Hooke', maintainer_email='[email protected]', version=version, description='Automatic retrieval of ancillary and data', packages=[ 'fetch', ], scripts=[ 'bin/fetch-service' ], requires=[ 'neocommon', 'requests', 'feedparser', 'lxml', 'setproctitle', 'pyyaml', 'arrow' ] )
from __future__ import print_function from distutils.core import setup import os version = '1.0.0b' # Append TeamCity build number if it gives us one. if 'TC_BUILD_NUMBER' in os.environ and version.endswith('b'): version += '' + os.environ['TC_BUILD_NUMBER'] setup(name='fetch', maintainer='Jeremy Hooke', maintainer_email='[email protected]', version=version, description='Automatic retrieval of ancillary and data', packages=[ 'fetch', ], scripts=[ 'bin/fetch-service' ], requires=[ 'arrow', 'croniter', 'feedparser', 'lxml', 'neocommon', 'pyyaml', 'requests', 'setproctitle', ] )
Add croniter dependency. Sort deps.
Add croniter dependency. Sort deps.
Python
apache-2.0
GeoscienceAustralia/fetch,GeoscienceAustralia/fetch
from __future__ import print_function from distutils.core import setup import os version = '1.0.0b' # Append TeamCity build number if it gives us one. if 'TC_BUILD_NUMBER' in os.environ and version.endswith('b'): version += '' + os.environ['TC_BUILD_NUMBER'] setup(name='fetch', maintainer='Jeremy Hooke', maintainer_email='[email protected]', version=version, description='Automatic retrieval of ancillary and data', packages=[ 'fetch', ], scripts=[ 'bin/fetch-service' ], requires=[ - 'neocommon', - 'requests', + 'arrow', + 'croniter', 'feedparser', 'lxml', + 'neocommon', + 'pyyaml', + 'requests', 'setproctitle', - 'pyyaml', - 'arrow' ] )
Add croniter dependency. Sort deps.
## Code Before: from __future__ import print_function from distutils.core import setup import os version = '1.0.0b' # Append TeamCity build number if it gives us one. if 'TC_BUILD_NUMBER' in os.environ and version.endswith('b'): version += '' + os.environ['TC_BUILD_NUMBER'] setup(name='fetch', maintainer='Jeremy Hooke', maintainer_email='[email protected]', version=version, description='Automatic retrieval of ancillary and data', packages=[ 'fetch', ], scripts=[ 'bin/fetch-service' ], requires=[ 'neocommon', 'requests', 'feedparser', 'lxml', 'setproctitle', 'pyyaml', 'arrow' ] ) ## Instruction: Add croniter dependency. Sort deps. ## Code After: from __future__ import print_function from distutils.core import setup import os version = '1.0.0b' # Append TeamCity build number if it gives us one. if 'TC_BUILD_NUMBER' in os.environ and version.endswith('b'): version += '' + os.environ['TC_BUILD_NUMBER'] setup(name='fetch', maintainer='Jeremy Hooke', maintainer_email='[email protected]', version=version, description='Automatic retrieval of ancillary and data', packages=[ 'fetch', ], scripts=[ 'bin/fetch-service' ], requires=[ 'arrow', 'croniter', 'feedparser', 'lxml', 'neocommon', 'pyyaml', 'requests', 'setproctitle', ] )
from __future__ import print_function from distutils.core import setup import os version = '1.0.0b' # Append TeamCity build number if it gives us one. if 'TC_BUILD_NUMBER' in os.environ and version.endswith('b'): version += '' + os.environ['TC_BUILD_NUMBER'] setup(name='fetch', maintainer='Jeremy Hooke', maintainer_email='[email protected]', version=version, description='Automatic retrieval of ancillary and data', packages=[ 'fetch', ], scripts=[ 'bin/fetch-service' ], requires=[ - 'neocommon', - 'requests', + 'arrow', + 'croniter', 'feedparser', 'lxml', + 'neocommon', + 'pyyaml', + 'requests', 'setproctitle', - 'pyyaml', - 'arrow' ] )
9a18cd0cb6366d45803c19301843ddda3a362cfb
tests/test_publisher.py
tests/test_publisher.py
from lektor.publisher import Command def test_Command_triggers_no_warnings(recwarn): # This excercises the issue where publishing via rsync resulted # in ResourceWarnings about unclosed streams. # This is essentially how RsyncPublisher runs rsync. with Command(["echo"]) as client: for _ in client: pass # Delete our reference so that the Command instance gets garbage # collected here. Otherwise, gc will not happen until after the # test completes and warnings emitted during gc will not be captured # by the recwarn fixture. del client for warning in recwarn.list: print(warning) assert len(recwarn) == 0
import gc import warnings import weakref import pytest from lektor.publisher import Command def test_Command_triggers_no_warnings(): # This excercises the issue where publishing via rsync resulted # in ResourceWarnings about unclosed streams. with pytest.warns(None) as record: # This is essentially how RsyncPublisher runs rsync. with Command(["echo"]) as client: for _ in client: pass # The ResourceWarnings regarding unclosed files we are checking for # are issued during finalization. Without this extra effort, # finalization wouldn't happen until after the test completes. client_is_alive = weakref.ref(client) del client if client_is_alive(): gc.collect() if client_is_alive(): warnings.warn( "Unable to trigger garbage collection of Command instance, " "so unable to check for warnings issued during finalization." ) for warning in record.list: print(warning) assert len(record) == 0
Reword comment, add check that Command is actually garbage collected
Reword comment, add check that Command is actually garbage collected
Python
bsd-3-clause
lektor/lektor,lektor/lektor,lektor/lektor,lektor/lektor
+ import gc + import warnings + import weakref + + import pytest + from lektor.publisher import Command - def test_Command_triggers_no_warnings(recwarn): + def test_Command_triggers_no_warnings(): # This excercises the issue where publishing via rsync resulted # in ResourceWarnings about unclosed streams. + with pytest.warns(None) as record: - # This is essentially how RsyncPublisher runs rsync. + # This is essentially how RsyncPublisher runs rsync. - with Command(["echo"]) as client: + with Command(["echo"]) as client: - for _ in client: + for _ in client: - pass + pass - # Delete our reference so that the Command instance gets garbage - # collected here. Otherwise, gc will not happen until after the - # test completes and warnings emitted during gc will not be captured - # by the recwarn fixture. - del client + # The ResourceWarnings regarding unclosed files we are checking for + # are issued during finalization. Without this extra effort, + # finalization wouldn't happen until after the test completes. + client_is_alive = weakref.ref(client) + del client + if client_is_alive(): + gc.collect() + + if client_is_alive(): + warnings.warn( + "Unable to trigger garbage collection of Command instance, " + "so unable to check for warnings issued during finalization." + ) + - for warning in recwarn.list: + for warning in record.list: print(warning) - assert len(recwarn) == 0 + assert len(record) == 0
Reword comment, add check that Command is actually garbage collected
## Code Before: from lektor.publisher import Command def test_Command_triggers_no_warnings(recwarn): # This excercises the issue where publishing via rsync resulted # in ResourceWarnings about unclosed streams. # This is essentially how RsyncPublisher runs rsync. with Command(["echo"]) as client: for _ in client: pass # Delete our reference so that the Command instance gets garbage # collected here. Otherwise, gc will not happen until after the # test completes and warnings emitted during gc will not be captured # by the recwarn fixture. del client for warning in recwarn.list: print(warning) assert len(recwarn) == 0 ## Instruction: Reword comment, add check that Command is actually garbage collected ## Code After: import gc import warnings import weakref import pytest from lektor.publisher import Command def test_Command_triggers_no_warnings(): # This excercises the issue where publishing via rsync resulted # in ResourceWarnings about unclosed streams. with pytest.warns(None) as record: # This is essentially how RsyncPublisher runs rsync. with Command(["echo"]) as client: for _ in client: pass # The ResourceWarnings regarding unclosed files we are checking for # are issued during finalization. Without this extra effort, # finalization wouldn't happen until after the test completes. client_is_alive = weakref.ref(client) del client if client_is_alive(): gc.collect() if client_is_alive(): warnings.warn( "Unable to trigger garbage collection of Command instance, " "so unable to check for warnings issued during finalization." ) for warning in record.list: print(warning) assert len(record) == 0
+ import gc + import warnings + import weakref + + import pytest + from lektor.publisher import Command - def test_Command_triggers_no_warnings(recwarn): ? ------- + def test_Command_triggers_no_warnings(): # This excercises the issue where publishing via rsync resulted # in ResourceWarnings about unclosed streams. + with pytest.warns(None) as record: - # This is essentially how RsyncPublisher runs rsync. + # This is essentially how RsyncPublisher runs rsync. ? ++++ - with Command(["echo"]) as client: + with Command(["echo"]) as client: ? ++++ - for _ in client: + for _ in client: ? ++++ - pass + pass ? ++++ - # Delete our reference so that the Command instance gets garbage - # collected here. Otherwise, gc will not happen until after the - # test completes and warnings emitted during gc will not be captured - # by the recwarn fixture. - del client + # The ResourceWarnings regarding unclosed files we are checking for + # are issued during finalization. Without this extra effort, + # finalization wouldn't happen until after the test completes. + client_is_alive = weakref.ref(client) + del client + if client_is_alive(): + gc.collect() + + if client_is_alive(): + warnings.warn( + "Unable to trigger garbage collection of Command instance, " + "so unable to check for warnings issued during finalization." + ) + - for warning in recwarn.list: ? ^^ ^ + for warning in record.list: ? ^ ^ print(warning) - assert len(recwarn) == 0 ? ^^ ^ + assert len(record) == 0 ? ^ ^
27c54cfd5eaf180595e671c80bd7c39406c8a24c
databroker/__init__.py
databroker/__init__.py
import intake del intake import warnings import logging logger = logging.getLogger(__name__) from ._core import (Broker, BrokerES, Header, ALL, lookup_config, list_configs, describe_configs, temp_config, wrap_in_doct, DeprecatedDoct, wrap_in_deprecated_doct) from .discovery import MergedCatalog, EntrypointsCatalog, V0Catalog # A catalog created from discovered entrypoints and v0 catalogs. catalog = MergedCatalog([EntrypointsCatalog(), V0Catalog()]) # set version string using versioneer from ._version import get_versions __version__ = get_versions()['version'] del get_versions ### Legacy imports ### try: from .databroker import DataBroker except ImportError: pass else: from .databroker import (DataBroker, DataBroker as db, get_events, get_table, stream, get_fields, restream, process) from .pims_readers import get_images
import intake del intake import warnings import logging logger = logging.getLogger(__name__) from .v1 import Broker, Header, ALL, temp, temp_config from .utils import (lookup_config, list_configs, describe_configs, wrap_in_doct, DeprecatedDoct, wrap_in_deprecated_doct) from .discovery import MergedCatalog, EntrypointsCatalog, V0Catalog # A catalog created from discovered entrypoints and v0 catalogs. catalog = MergedCatalog([EntrypointsCatalog(), V0Catalog()]) # set version string using versioneer from ._version import get_versions __version__ = get_versions()['version'] del get_versions ### Legacy imports ### try: from .databroker import DataBroker except ImportError: pass else: from .databroker import (DataBroker, DataBroker as db, get_events, get_table, stream, get_fields, restream, process) from .pims_readers import get_images
Move top-level imports from v0 to v1.
Move top-level imports from v0 to v1.
Python
bsd-3-clause
ericdill/databroker,ericdill/databroker
import intake del intake import warnings import logging logger = logging.getLogger(__name__) + from .v1 import Broker, Header, ALL, temp, temp_config + from .utils import (lookup_config, list_configs, describe_configs, - from ._core import (Broker, BrokerES, Header, ALL, - lookup_config, list_configs, describe_configs, temp_config, - wrap_in_doct, - DeprecatedDoct, wrap_in_deprecated_doct) + wrap_in_doct, DeprecatedDoct, wrap_in_deprecated_doct) from .discovery import MergedCatalog, EntrypointsCatalog, V0Catalog # A catalog created from discovered entrypoints and v0 catalogs. catalog = MergedCatalog([EntrypointsCatalog(), V0Catalog()]) # set version string using versioneer from ._version import get_versions __version__ = get_versions()['version'] del get_versions ### Legacy imports ### try: from .databroker import DataBroker except ImportError: pass else: from .databroker import (DataBroker, DataBroker as db, get_events, get_table, stream, get_fields, restream, process) from .pims_readers import get_images
Move top-level imports from v0 to v1.
## Code Before: import intake del intake import warnings import logging logger = logging.getLogger(__name__) from ._core import (Broker, BrokerES, Header, ALL, lookup_config, list_configs, describe_configs, temp_config, wrap_in_doct, DeprecatedDoct, wrap_in_deprecated_doct) from .discovery import MergedCatalog, EntrypointsCatalog, V0Catalog # A catalog created from discovered entrypoints and v0 catalogs. catalog = MergedCatalog([EntrypointsCatalog(), V0Catalog()]) # set version string using versioneer from ._version import get_versions __version__ = get_versions()['version'] del get_versions ### Legacy imports ### try: from .databroker import DataBroker except ImportError: pass else: from .databroker import (DataBroker, DataBroker as db, get_events, get_table, stream, get_fields, restream, process) from .pims_readers import get_images ## Instruction: Move top-level imports from v0 to v1. ## Code After: import intake del intake import warnings import logging logger = logging.getLogger(__name__) from .v1 import Broker, Header, ALL, temp, temp_config from .utils import (lookup_config, list_configs, describe_configs, wrap_in_doct, DeprecatedDoct, wrap_in_deprecated_doct) from .discovery import MergedCatalog, EntrypointsCatalog, V0Catalog # A catalog created from discovered entrypoints and v0 catalogs. catalog = MergedCatalog([EntrypointsCatalog(), V0Catalog()]) # set version string using versioneer from ._version import get_versions __version__ = get_versions()['version'] del get_versions ### Legacy imports ### try: from .databroker import DataBroker except ImportError: pass else: from .databroker import (DataBroker, DataBroker as db, get_events, get_table, stream, get_fields, restream, process) from .pims_readers import get_images
import intake del intake import warnings import logging logger = logging.getLogger(__name__) + from .v1 import Broker, Header, ALL, temp, temp_config + from .utils import (lookup_config, list_configs, describe_configs, - from ._core import (Broker, BrokerES, Header, ALL, - lookup_config, list_configs, describe_configs, temp_config, - wrap_in_doct, - DeprecatedDoct, wrap_in_deprecated_doct) + wrap_in_doct, DeprecatedDoct, wrap_in_deprecated_doct) ? ++++++++++++++ from .discovery import MergedCatalog, EntrypointsCatalog, V0Catalog # A catalog created from discovered entrypoints and v0 catalogs. catalog = MergedCatalog([EntrypointsCatalog(), V0Catalog()]) # set version string using versioneer from ._version import get_versions __version__ = get_versions()['version'] del get_versions ### Legacy imports ### try: from .databroker import DataBroker except ImportError: pass else: from .databroker import (DataBroker, DataBroker as db, get_events, get_table, stream, get_fields, restream, process) from .pims_readers import get_images
eb391dde8a157252a98fc9bb9b617bc821f7285a
email_from_template/utils.py
email_from_template/utils.py
from django.utils.functional import memoize from . import app_settings def get_render_method(): return from_dotted_path(app_settings.EMAIL_RENDER_METHOD) get_render_method = memoize(get_render_method, {}, 0) def get_context_processors(): return [from_dotted_path(x) for x in app_settings.EMAIL_CONTEXT_PROCESSORS] get_context_processors = memoize(get_context_processors, {}, 0) def from_dotted_path(fullpath): """ Returns the specified attribute of a module, specified by a string. ``from_dotted_path('a.b.c.d')`` is roughly equivalent to:: from a.b.c import d except that ``d`` is returned and not entered into the current namespace. """ module, attr = fullpath.rsplit('.', 1) return getattr(__import__(module, {}, {}, (attr,)), attr)
from django.utils.lru_cache import lru_cache from . import app_settings @lru_cache def get_render_method(): return from_dotted_path(app_settings.EMAIL_RENDER_METHOD) @lru_cache def get_context_processors(): return [from_dotted_path(x) for x in app_settings.EMAIL_CONTEXT_PROCESSORS] def from_dotted_path(fullpath): """ Returns the specified attribute of a module, specified by a string. ``from_dotted_path('a.b.c.d')`` is roughly equivalent to:: from a.b.c import d except that ``d`` is returned and not entered into the current namespace. """ module, attr = fullpath.rsplit('.', 1) return getattr(__import__(module, {}, {}, (attr,)), attr)
Use @lru_cache now that memoize is gone.
Use @lru_cache now that memoize is gone.
Python
bsd-3-clause
lamby/django-email-from-template
- from django.utils.functional import memoize + from django.utils.lru_cache import lru_cache from . import app_settings + @lru_cache def get_render_method(): return from_dotted_path(app_settings.EMAIL_RENDER_METHOD) - get_render_method = memoize(get_render_method, {}, 0) + @lru_cache def get_context_processors(): return [from_dotted_path(x) for x in app_settings.EMAIL_CONTEXT_PROCESSORS] - get_context_processors = memoize(get_context_processors, {}, 0) def from_dotted_path(fullpath): """ Returns the specified attribute of a module, specified by a string. ``from_dotted_path('a.b.c.d')`` is roughly equivalent to:: from a.b.c import d except that ``d`` is returned and not entered into the current namespace. """ module, attr = fullpath.rsplit('.', 1) return getattr(__import__(module, {}, {}, (attr,)), attr)
Use @lru_cache now that memoize is gone.
## Code Before: from django.utils.functional import memoize from . import app_settings def get_render_method(): return from_dotted_path(app_settings.EMAIL_RENDER_METHOD) get_render_method = memoize(get_render_method, {}, 0) def get_context_processors(): return [from_dotted_path(x) for x in app_settings.EMAIL_CONTEXT_PROCESSORS] get_context_processors = memoize(get_context_processors, {}, 0) def from_dotted_path(fullpath): """ Returns the specified attribute of a module, specified by a string. ``from_dotted_path('a.b.c.d')`` is roughly equivalent to:: from a.b.c import d except that ``d`` is returned and not entered into the current namespace. """ module, attr = fullpath.rsplit('.', 1) return getattr(__import__(module, {}, {}, (attr,)), attr) ## Instruction: Use @lru_cache now that memoize is gone. ## Code After: from django.utils.lru_cache import lru_cache from . import app_settings @lru_cache def get_render_method(): return from_dotted_path(app_settings.EMAIL_RENDER_METHOD) @lru_cache def get_context_processors(): return [from_dotted_path(x) for x in app_settings.EMAIL_CONTEXT_PROCESSORS] def from_dotted_path(fullpath): """ Returns the specified attribute of a module, specified by a string. ``from_dotted_path('a.b.c.d')`` is roughly equivalent to:: from a.b.c import d except that ``d`` is returned and not entered into the current namespace. """ module, attr = fullpath.rsplit('.', 1) return getattr(__import__(module, {}, {}, (attr,)), attr)
- from django.utils.functional import memoize + from django.utils.lru_cache import lru_cache from . import app_settings + @lru_cache def get_render_method(): return from_dotted_path(app_settings.EMAIL_RENDER_METHOD) - get_render_method = memoize(get_render_method, {}, 0) + @lru_cache def get_context_processors(): return [from_dotted_path(x) for x in app_settings.EMAIL_CONTEXT_PROCESSORS] - get_context_processors = memoize(get_context_processors, {}, 0) def from_dotted_path(fullpath): """ Returns the specified attribute of a module, specified by a string. ``from_dotted_path('a.b.c.d')`` is roughly equivalent to:: from a.b.c import d except that ``d`` is returned and not entered into the current namespace. """ module, attr = fullpath.rsplit('.', 1) return getattr(__import__(module, {}, {}, (attr,)), attr)
f8290954b27e655562878d16df7e4793262f50d7
wafer/tickets/management/commands/import_quicket_guest_list.py
wafer/tickets/management/commands/import_quicket_guest_list.py
import csv from django.core.management.base import BaseCommand, CommandError from wafer.tickets.views import import_ticket class Command(BaseCommand): args = '<csv file>' help = "Import a guest list CSV from Quicket" def handle(self, *args, **options): if len(args) != 1: raise CommandError('1 CSV File required') with open(args[0], 'r') as f: reader = csv.reader(f) header = next(reader) if len(header) != 11: raise CommandError('CSV format has changed. Update wafer') for ticket in reader: self.import_ticket(*ticket) def import_ticket(self, ticket_number, ticket_barcode, purchase_date, ticket_type, ticket_holder, email, cellphone, checked_in, checked_in_date, checked_in_by, complimentary): import_ticket(ticket_barcode, ticket_type, email)
import csv from django.core.management.base import BaseCommand, CommandError from wafer.tickets.views import import_ticket class Command(BaseCommand): args = '<csv file>' help = "Import a guest list CSV from Quicket" def handle(self, *args, **options): if len(args) != 1: raise CommandError('1 CSV File required') columns = ('Ticket Number', 'Ticket Barcode', 'Purchase Date', 'Ticket Type', 'Ticket Holder', 'Email', 'Cellphone', 'Checked in', 'Checked in date', 'Checked in by', 'Complimentary') keys = [column.lower().replace(' ', '_') for column in columns] with open(args[0], 'r') as f: reader = csv.reader(f) header = tuple(next(reader)) if header != columns: raise CommandError('CSV format has changed. Update wafer') for row in reader: ticket = dict(zip(keys, row)) import_ticket(ticket['ticket_barcode'], ticket['ticket_type'], ticket['email'])
Check CSV header, not column count (and refactor)
Check CSV header, not column count (and refactor)
Python
isc
CarlFK/wafer,CarlFK/wafer,CarlFK/wafer,CarlFK/wafer,CTPUG/wafer,CTPUG/wafer,CTPUG/wafer,CTPUG/wafer
import csv from django.core.management.base import BaseCommand, CommandError from wafer.tickets.views import import_ticket class Command(BaseCommand): args = '<csv file>' help = "Import a guest list CSV from Quicket" def handle(self, *args, **options): if len(args) != 1: raise CommandError('1 CSV File required') + columns = ('Ticket Number', 'Ticket Barcode', 'Purchase Date', + 'Ticket Type', 'Ticket Holder', 'Email', 'Cellphone', + 'Checked in', 'Checked in date', 'Checked in by', + 'Complimentary') + keys = [column.lower().replace(' ', '_') for column in columns] + with open(args[0], 'r') as f: reader = csv.reader(f) - header = next(reader) + header = tuple(next(reader)) - if len(header) != 11: + if header != columns: raise CommandError('CSV format has changed. Update wafer') - for ticket in reader: + for row in reader: - self.import_ticket(*ticket) + ticket = dict(zip(keys, row)) + import_ticket(ticket['ticket_barcode'], + ticket['ticket_type'], + ticket['email']) - def import_ticket(self, ticket_number, ticket_barcode, purchase_date, - ticket_type, ticket_holder, email, cellphone, checked_in, - checked_in_date, checked_in_by, complimentary): - import_ticket(ticket_barcode, ticket_type, email) -
Check CSV header, not column count (and refactor)
## Code Before: import csv from django.core.management.base import BaseCommand, CommandError from wafer.tickets.views import import_ticket class Command(BaseCommand): args = '<csv file>' help = "Import a guest list CSV from Quicket" def handle(self, *args, **options): if len(args) != 1: raise CommandError('1 CSV File required') with open(args[0], 'r') as f: reader = csv.reader(f) header = next(reader) if len(header) != 11: raise CommandError('CSV format has changed. Update wafer') for ticket in reader: self.import_ticket(*ticket) def import_ticket(self, ticket_number, ticket_barcode, purchase_date, ticket_type, ticket_holder, email, cellphone, checked_in, checked_in_date, checked_in_by, complimentary): import_ticket(ticket_barcode, ticket_type, email) ## Instruction: Check CSV header, not column count (and refactor) ## Code After: import csv from django.core.management.base import BaseCommand, CommandError from wafer.tickets.views import import_ticket class Command(BaseCommand): args = '<csv file>' help = "Import a guest list CSV from Quicket" def handle(self, *args, **options): if len(args) != 1: raise CommandError('1 CSV File required') columns = ('Ticket Number', 'Ticket Barcode', 'Purchase Date', 'Ticket Type', 'Ticket Holder', 'Email', 'Cellphone', 'Checked in', 'Checked in date', 'Checked in by', 'Complimentary') keys = [column.lower().replace(' ', '_') for column in columns] with open(args[0], 'r') as f: reader = csv.reader(f) header = tuple(next(reader)) if header != columns: raise CommandError('CSV format has changed. Update wafer') for row in reader: ticket = dict(zip(keys, row)) import_ticket(ticket['ticket_barcode'], ticket['ticket_type'], ticket['email'])
import csv from django.core.management.base import BaseCommand, CommandError from wafer.tickets.views import import_ticket class Command(BaseCommand): args = '<csv file>' help = "Import a guest list CSV from Quicket" def handle(self, *args, **options): if len(args) != 1: raise CommandError('1 CSV File required') + columns = ('Ticket Number', 'Ticket Barcode', 'Purchase Date', + 'Ticket Type', 'Ticket Holder', 'Email', 'Cellphone', + 'Checked in', 'Checked in date', 'Checked in by', + 'Complimentary') + keys = [column.lower().replace(' ', '_') for column in columns] + with open(args[0], 'r') as f: reader = csv.reader(f) - header = next(reader) + header = tuple(next(reader)) ? ++++++ + - if len(header) != 11: ? ---- - ^^ + if header != columns: ? ^^^^^^^ raise CommandError('CSV format has changed. Update wafer') - for ticket in reader: ? ^^^^^^ + for row in reader: ? ^^^ + ticket = dict(zip(keys, row)) + import_ticket(ticket['ticket_barcode'], + ticket['ticket_type'], + ticket['email']) - self.import_ticket(*ticket) - - def import_ticket(self, ticket_number, ticket_barcode, purchase_date, - ticket_type, ticket_holder, email, cellphone, checked_in, - checked_in_date, checked_in_by, complimentary): - import_ticket(ticket_barcode, ticket_type, email)
32126085f361489bb5c9c18972479b0c313c7d10
bash_runner/tasks.py
bash_runner/tasks.py
from celery import task from cosmo.events import send_event as send_riemann_event from cloudify.utils import get_local_ip get_ip = get_local_ip send_event = send_riemann_event @task def start(__cloudify_id, port=8080, **kwargs): print 'HELLO BASH! %s' % port send_event(__cloudify_id, get_ip(), "hello_bash status", "state", "running")
from celery import task from cosmo.events import send_event as send_riemann_event from cloudify.utils import get_local_ip get_ip = get_local_ip send_event = send_riemann_event @task def start(__cloudify_id, port=8080, **kwargs): with open('/tmp/HELLO', 'w') as f: print >> f, 'HELLO BASH! %s' % port send_event(__cloudify_id, get_ip(), "hello_bash status", "state", "running")
Send the output to a tmp file
Send the output to a tmp file
Python
apache-2.0
rantav/cosmo-plugin-bash-runner
from celery import task from cosmo.events import send_event as send_riemann_event from cloudify.utils import get_local_ip get_ip = get_local_ip send_event = send_riemann_event @task def start(__cloudify_id, port=8080, **kwargs): + with open('/tmp/HELLO', 'w') as f: - print 'HELLO BASH! %s' % port + print >> f, 'HELLO BASH! %s' % port send_event(__cloudify_id, get_ip(), "hello_bash status", "state", "running")
Send the output to a tmp file
## Code Before: from celery import task from cosmo.events import send_event as send_riemann_event from cloudify.utils import get_local_ip get_ip = get_local_ip send_event = send_riemann_event @task def start(__cloudify_id, port=8080, **kwargs): print 'HELLO BASH! %s' % port send_event(__cloudify_id, get_ip(), "hello_bash status", "state", "running") ## Instruction: Send the output to a tmp file ## Code After: from celery import task from cosmo.events import send_event as send_riemann_event from cloudify.utils import get_local_ip get_ip = get_local_ip send_event = send_riemann_event @task def start(__cloudify_id, port=8080, **kwargs): with open('/tmp/HELLO', 'w') as f: print >> f, 'HELLO BASH! %s' % port send_event(__cloudify_id, get_ip(), "hello_bash status", "state", "running")
from celery import task from cosmo.events import send_event as send_riemann_event from cloudify.utils import get_local_ip get_ip = get_local_ip send_event = send_riemann_event @task def start(__cloudify_id, port=8080, **kwargs): + with open('/tmp/HELLO', 'w') as f: - print 'HELLO BASH! %s' % port + print >> f, 'HELLO BASH! %s' % port ? ++ ++++++ send_event(__cloudify_id, get_ip(), "hello_bash status", "state", "running")
9546faddab321eb508f358883faf45cbc7d48dd8
calexicon/internal/tests/test_julian.py
calexicon/internal/tests/test_julian.py
import unittest from calexicon.internal.julian import distant_julian_to_gregorian, julian_to_gregorian class TestJulian(unittest.TestCase): def test_distant_julian_to_gregorian(self): self.assertEqual(distant_julian_to_gregorian(9999, 12, 1), (10000, 2, 12)) def test_julian_to_gregorian(self): self.assertEqual(julian_to_gregorian(1984, 2, 29), (1984, 3, 13))
import unittest from datetime import date as vanilla_date from calexicon.internal.julian import distant_julian_to_gregorian, julian_to_gregorian class TestJulian(unittest.TestCase): def test_distant_julian_to_gregorian(self): self.assertEqual(distant_julian_to_gregorian(9999, 12, 1), (10000, 2, 12)) def test_julian_to_gregorian(self): self.assertEqual(julian_to_gregorian(1984, 2, 29), vanilla_date(1984, 3, 13))
Correct test - vanilla_date not tuple.
Correct test - vanilla_date not tuple.
Python
apache-2.0
jwg4/calexicon,jwg4/qual
import unittest + + from datetime import date as vanilla_date from calexicon.internal.julian import distant_julian_to_gregorian, julian_to_gregorian class TestJulian(unittest.TestCase): def test_distant_julian_to_gregorian(self): self.assertEqual(distant_julian_to_gregorian(9999, 12, 1), (10000, 2, 12)) def test_julian_to_gregorian(self): - self.assertEqual(julian_to_gregorian(1984, 2, 29), (1984, 3, 13)) + self.assertEqual(julian_to_gregorian(1984, 2, 29), vanilla_date(1984, 3, 13))
Correct test - vanilla_date not tuple.
## Code Before: import unittest from calexicon.internal.julian import distant_julian_to_gregorian, julian_to_gregorian class TestJulian(unittest.TestCase): def test_distant_julian_to_gregorian(self): self.assertEqual(distant_julian_to_gregorian(9999, 12, 1), (10000, 2, 12)) def test_julian_to_gregorian(self): self.assertEqual(julian_to_gregorian(1984, 2, 29), (1984, 3, 13)) ## Instruction: Correct test - vanilla_date not tuple. ## Code After: import unittest from datetime import date as vanilla_date from calexicon.internal.julian import distant_julian_to_gregorian, julian_to_gregorian class TestJulian(unittest.TestCase): def test_distant_julian_to_gregorian(self): self.assertEqual(distant_julian_to_gregorian(9999, 12, 1), (10000, 2, 12)) def test_julian_to_gregorian(self): self.assertEqual(julian_to_gregorian(1984, 2, 29), vanilla_date(1984, 3, 13))
import unittest + + from datetime import date as vanilla_date from calexicon.internal.julian import distant_julian_to_gregorian, julian_to_gregorian class TestJulian(unittest.TestCase): def test_distant_julian_to_gregorian(self): self.assertEqual(distant_julian_to_gregorian(9999, 12, 1), (10000, 2, 12)) def test_julian_to_gregorian(self): - self.assertEqual(julian_to_gregorian(1984, 2, 29), (1984, 3, 13)) + self.assertEqual(julian_to_gregorian(1984, 2, 29), vanilla_date(1984, 3, 13)) ? ++++++++++++
715098531f823c3b2932e6a03d2e4b113bd53ed9
tests/test_grammar.py
tests/test_grammar.py
import viper.grammar as vg import viper.grammar.languages as vgl import viper.lexer as vl import pytest @pytest.mark.parametrize('line,sppf', [ ('foo', vgl.SPPF(vgl.ParseTreeChar(vl.Name('foo')))), ('2', vgl.SPPF(vgl.ParseTreeChar(vl.Number('2')))), ('...', vgl.SPPF(vgl.ParseTreeChar(vl.Operator('...')))), ('Zilch', vgl.SPPF(vgl.ParseTreeChar(vl.Class('Zilch')))), ('True', vgl.SPPF(vgl.ParseTreeChar(vl.Class('True')))), ('False', vgl.SPPF(vgl.ParseTreeChar(vl.Class('False')))), ]) def test_atom(line: str, sppf: vgl.SPPF): atom = vg.GRAMMAR.get_rule('atom') lexemes = vl.lex_line(line) assert sppf == vg.make_sppf(atom, lexemes)
import viper.grammar as vg import viper.lexer as vl from viper.grammar.languages import ( SPPF, ParseTreeEmpty as PTE, ParseTreeChar as PTC, ParseTreePair as PTP, ParseTreeRep as PTR ) import pytest @pytest.mark.parametrize('line,sppf', [ ('foo', SPPF(PTC(vl.Name('foo')))), ('42', SPPF(PTC(vl.Number('42')))), ('...', SPPF(PTC(vl.Operator('...')))), ('Zilch', SPPF(PTC(vl.Class('Zilch')))), ('True', SPPF(PTC(vl.Class('True')))), ('False', SPPF(PTC(vl.Class('False')))), ('()', SPPF(PTP(SPPF(PTC(vl.OpenParen())), SPPF(PTC(vl.CloseParen()))))), ('(foo)', SPPF(PTP(SPPF(PTC(vl.OpenParen())), SPPF(PTP(SPPF(PTP(SPPF(PTC(vl.Name('foo'))), SPPF(PTR(SPPF())))), SPPF(PTC(vl.CloseParen()))))))), ]) def test_atom(line: str, sppf: SPPF): atom = vg.GRAMMAR.get_rule('atom') lexemes = vl.lex_line(line) assert sppf == vg.make_sppf(atom, lexemes)
Revise grammar tests for atom
Revise grammar tests for atom
Python
apache-2.0
pdarragh/Viper
import viper.grammar as vg - import viper.grammar.languages as vgl import viper.lexer as vl + + from viper.grammar.languages import ( + SPPF, + ParseTreeEmpty as PTE, ParseTreeChar as PTC, ParseTreePair as PTP, ParseTreeRep as PTR + ) import pytest @pytest.mark.parametrize('line,sppf', [ ('foo', - vgl.SPPF(vgl.ParseTreeChar(vl.Name('foo')))), + SPPF(PTC(vl.Name('foo')))), - ('2', + ('42', - vgl.SPPF(vgl.ParseTreeChar(vl.Number('2')))), + SPPF(PTC(vl.Number('42')))), ('...', - vgl.SPPF(vgl.ParseTreeChar(vl.Operator('...')))), + SPPF(PTC(vl.Operator('...')))), ('Zilch', - vgl.SPPF(vgl.ParseTreeChar(vl.Class('Zilch')))), + SPPF(PTC(vl.Class('Zilch')))), ('True', - vgl.SPPF(vgl.ParseTreeChar(vl.Class('True')))), + SPPF(PTC(vl.Class('True')))), ('False', - vgl.SPPF(vgl.ParseTreeChar(vl.Class('False')))), + SPPF(PTC(vl.Class('False')))), + ('()', + SPPF(PTP(SPPF(PTC(vl.OpenParen())), + SPPF(PTC(vl.CloseParen()))))), + ('(foo)', + SPPF(PTP(SPPF(PTC(vl.OpenParen())), + SPPF(PTP(SPPF(PTP(SPPF(PTC(vl.Name('foo'))), + SPPF(PTR(SPPF())))), + SPPF(PTC(vl.CloseParen()))))))), ]) - def test_atom(line: str, sppf: vgl.SPPF): + def test_atom(line: str, sppf: SPPF): atom = vg.GRAMMAR.get_rule('atom') lexemes = vl.lex_line(line) assert sppf == vg.make_sppf(atom, lexemes)
Revise grammar tests for atom
## Code Before: import viper.grammar as vg import viper.grammar.languages as vgl import viper.lexer as vl import pytest @pytest.mark.parametrize('line,sppf', [ ('foo', vgl.SPPF(vgl.ParseTreeChar(vl.Name('foo')))), ('2', vgl.SPPF(vgl.ParseTreeChar(vl.Number('2')))), ('...', vgl.SPPF(vgl.ParseTreeChar(vl.Operator('...')))), ('Zilch', vgl.SPPF(vgl.ParseTreeChar(vl.Class('Zilch')))), ('True', vgl.SPPF(vgl.ParseTreeChar(vl.Class('True')))), ('False', vgl.SPPF(vgl.ParseTreeChar(vl.Class('False')))), ]) def test_atom(line: str, sppf: vgl.SPPF): atom = vg.GRAMMAR.get_rule('atom') lexemes = vl.lex_line(line) assert sppf == vg.make_sppf(atom, lexemes) ## Instruction: Revise grammar tests for atom ## Code After: import viper.grammar as vg import viper.lexer as vl from viper.grammar.languages import ( SPPF, ParseTreeEmpty as PTE, ParseTreeChar as PTC, ParseTreePair as PTP, ParseTreeRep as PTR ) import pytest @pytest.mark.parametrize('line,sppf', [ ('foo', SPPF(PTC(vl.Name('foo')))), ('42', SPPF(PTC(vl.Number('42')))), ('...', SPPF(PTC(vl.Operator('...')))), ('Zilch', SPPF(PTC(vl.Class('Zilch')))), ('True', SPPF(PTC(vl.Class('True')))), ('False', SPPF(PTC(vl.Class('False')))), ('()', SPPF(PTP(SPPF(PTC(vl.OpenParen())), SPPF(PTC(vl.CloseParen()))))), ('(foo)', SPPF(PTP(SPPF(PTC(vl.OpenParen())), SPPF(PTP(SPPF(PTP(SPPF(PTC(vl.Name('foo'))), SPPF(PTR(SPPF())))), SPPF(PTC(vl.CloseParen()))))))), ]) def test_atom(line: str, sppf: SPPF): atom = vg.GRAMMAR.get_rule('atom') lexemes = vl.lex_line(line) assert sppf == vg.make_sppf(atom, lexemes)
import viper.grammar as vg - import viper.grammar.languages as vgl import viper.lexer as vl + + from viper.grammar.languages import ( + SPPF, + ParseTreeEmpty as PTE, ParseTreeChar as PTC, ParseTreePair as PTP, ParseTreeRep as PTR + ) import pytest @pytest.mark.parametrize('line,sppf', [ ('foo', - vgl.SPPF(vgl.ParseTreeChar(vl.Name('foo')))), ? ---- ---- ---- --- --- + SPPF(PTC(vl.Name('foo')))), - ('2', + ('42', ? + - vgl.SPPF(vgl.ParseTreeChar(vl.Number('2')))), ? ---- ---- ---- --- --- + SPPF(PTC(vl.Number('42')))), ? + ('...', - vgl.SPPF(vgl.ParseTreeChar(vl.Operator('...')))), ? ---- ---- ---- --- --- + SPPF(PTC(vl.Operator('...')))), ('Zilch', - vgl.SPPF(vgl.ParseTreeChar(vl.Class('Zilch')))), ? ---- ---- ---- --- --- + SPPF(PTC(vl.Class('Zilch')))), ('True', - vgl.SPPF(vgl.ParseTreeChar(vl.Class('True')))), ? ---- ---- ---- --- --- + SPPF(PTC(vl.Class('True')))), ('False', - vgl.SPPF(vgl.ParseTreeChar(vl.Class('False')))), ? ---- ---- ---- --- --- + SPPF(PTC(vl.Class('False')))), + ('()', + SPPF(PTP(SPPF(PTC(vl.OpenParen())), + SPPF(PTC(vl.CloseParen()))))), + ('(foo)', + SPPF(PTP(SPPF(PTC(vl.OpenParen())), + SPPF(PTP(SPPF(PTP(SPPF(PTC(vl.Name('foo'))), + SPPF(PTR(SPPF())))), + SPPF(PTC(vl.CloseParen()))))))), ]) - def test_atom(line: str, sppf: vgl.SPPF): ? ---- + def test_atom(line: str, sppf: SPPF): atom = vg.GRAMMAR.get_rule('atom') lexemes = vl.lex_line(line) assert sppf == vg.make_sppf(atom, lexemes)
b03bc28da7476ca27e64b8cc01b685e11eb6d505
menpodetect/pico/conversion.py
menpodetect/pico/conversion.py
from menpo.shape import PointDirectedGraph import numpy as np def pointgraph_from_circle(fitting): y, x = fitting.center radius = fitting.diameter / 2.0 return PointDirectedGraph(np.array(((y, x), (y + radius, x), (y + radius, x + radius), (y, x + radius))), np.array([[0, 1], [1, 2], [2, 3], [3, 0]]))
from menpo.shape import PointDirectedGraph import numpy as np def pointgraph_from_circle(fitting): diameter = fitting.diameter radius = diameter / 2.0 y, x = fitting.center y -= radius x -= radius return PointDirectedGraph(np.array(((y, x), (y + diameter, x), (y + diameter, x + diameter), (y, x + diameter))), np.array([[0, 1], [1, 2], [2, 3], [3, 0]]))
Fix the circle to rectangle code
Fix the circle to rectangle code Was totally incorrect previously
Python
bsd-3-clause
jabooth/menpodetect,yuxiang-zhou/menpodetect,yuxiang-zhou/menpodetect,jabooth/menpodetect
from menpo.shape import PointDirectedGraph import numpy as np def pointgraph_from_circle(fitting): + diameter = fitting.diameter + radius = diameter / 2.0 y, x = fitting.center - radius = fitting.diameter / 2.0 + y -= radius + x -= radius return PointDirectedGraph(np.array(((y, x), - (y + radius, x), + (y + diameter, x), - (y + radius, x + radius), + (y + diameter, x + diameter), - (y, x + radius))), + (y, x + diameter))), np.array([[0, 1], [1, 2], [2, 3], [3, 0]]))
Fix the circle to rectangle code
## Code Before: from menpo.shape import PointDirectedGraph import numpy as np def pointgraph_from_circle(fitting): y, x = fitting.center radius = fitting.diameter / 2.0 return PointDirectedGraph(np.array(((y, x), (y + radius, x), (y + radius, x + radius), (y, x + radius))), np.array([[0, 1], [1, 2], [2, 3], [3, 0]])) ## Instruction: Fix the circle to rectangle code ## Code After: from menpo.shape import PointDirectedGraph import numpy as np def pointgraph_from_circle(fitting): diameter = fitting.diameter radius = diameter / 2.0 y, x = fitting.center y -= radius x -= radius return PointDirectedGraph(np.array(((y, x), (y + diameter, x), (y + diameter, x + diameter), (y, x + diameter))), np.array([[0, 1], [1, 2], [2, 3], [3, 0]]))
from menpo.shape import PointDirectedGraph import numpy as np def pointgraph_from_circle(fitting): + diameter = fitting.diameter + radius = diameter / 2.0 y, x = fitting.center - radius = fitting.diameter / 2.0 + y -= radius + x -= radius return PointDirectedGraph(np.array(((y, x), - (y + radius, x), ? -- ^^ + (y + diameter, x), ? ^^^^^^ - (y + radius, x + radius), ? -- ^^ -- ^^ + (y + diameter, x + diameter), ? ^^^^^^ ^^^^^^ - (y, x + radius))), ? -- ^^ + (y, x + diameter))), ? ^^^^^^ np.array([[0, 1], [1, 2], [2, 3], [3, 0]]))
db0aa94de30d73217f9091635c92f59b8af98ef7
alg_sum_list.py
alg_sum_list.py
from __future__ import absolute_import from __future__ import print_function from __future__ import division def sum_list_for(num_ls): """Sum number list by for loop.""" _sum = 0 for num in num_ls: _sum += num return _sum def sum_list_recur(num_ls): """Sum number list by recursion.""" if len(num_ls) == 1: return num_ls[0] else: return num_ls[0] + sum_list_recur(num_ls[1:]) def main(): import time num_ls = [0, 1, 2, 3, 4, 5] start_time = time.time() print('By for loop: {}'.format(sum_list_for(num_ls))) print('Time: {}'.format(time.time() - start_time)) start_time = time.time() print('By recursion: {}'.format(sum_list_recur(num_ls))) print('Time: {}'.format(time.time() - start_time)) if __name__ == '__main__': main()
from __future__ import absolute_import from __future__ import print_function from __future__ import division def sum_list_iter(num_ls): """Sum number list by for loop.""" _sum = 0 for num in num_ls: _sum += num return _sum def sum_list_recur(num_ls): """Sum number list by recursion.""" if len(num_ls) == 1: return num_ls[0] else: return num_ls[0] + sum_list_recur(num_ls[1:]) def main(): import time num_ls = range(100) start_time = time.time() print('By iteration: {}'.format(sum_list_iter(num_ls))) print('Time: {}'.format(time.time() - start_time)) start_time = time.time() print('By recursion: {}'.format(sum_list_recur(num_ls))) print('Time: {}'.format(time.time() - start_time)) if __name__ == '__main__': main()
Rename to sum_list_iter() and revise main()'s num_ls
Rename to sum_list_iter() and revise main()'s num_ls
Python
bsd-2-clause
bowen0701/algorithms_data_structures
from __future__ import absolute_import from __future__ import print_function from __future__ import division - def sum_list_for(num_ls): + def sum_list_iter(num_ls): """Sum number list by for loop.""" _sum = 0 for num in num_ls: _sum += num return _sum def sum_list_recur(num_ls): """Sum number list by recursion.""" if len(num_ls) == 1: return num_ls[0] else: return num_ls[0] + sum_list_recur(num_ls[1:]) def main(): import time - num_ls = [0, 1, 2, 3, 4, 5] + num_ls = range(100) start_time = time.time() - print('By for loop: {}'.format(sum_list_for(num_ls))) + print('By iteration: {}'.format(sum_list_iter(num_ls))) print('Time: {}'.format(time.time() - start_time)) start_time = time.time() print('By recursion: {}'.format(sum_list_recur(num_ls))) print('Time: {}'.format(time.time() - start_time)) if __name__ == '__main__': main()
Rename to sum_list_iter() and revise main()'s num_ls
## Code Before: from __future__ import absolute_import from __future__ import print_function from __future__ import division def sum_list_for(num_ls): """Sum number list by for loop.""" _sum = 0 for num in num_ls: _sum += num return _sum def sum_list_recur(num_ls): """Sum number list by recursion.""" if len(num_ls) == 1: return num_ls[0] else: return num_ls[0] + sum_list_recur(num_ls[1:]) def main(): import time num_ls = [0, 1, 2, 3, 4, 5] start_time = time.time() print('By for loop: {}'.format(sum_list_for(num_ls))) print('Time: {}'.format(time.time() - start_time)) start_time = time.time() print('By recursion: {}'.format(sum_list_recur(num_ls))) print('Time: {}'.format(time.time() - start_time)) if __name__ == '__main__': main() ## Instruction: Rename to sum_list_iter() and revise main()'s num_ls ## Code After: from __future__ import absolute_import from __future__ import print_function from __future__ import division def sum_list_iter(num_ls): """Sum number list by for loop.""" _sum = 0 for num in num_ls: _sum += num return _sum def sum_list_recur(num_ls): """Sum number list by recursion.""" if len(num_ls) == 1: return num_ls[0] else: return num_ls[0] + sum_list_recur(num_ls[1:]) def main(): import time num_ls = range(100) start_time = time.time() print('By iteration: {}'.format(sum_list_iter(num_ls))) print('Time: {}'.format(time.time() - start_time)) start_time = time.time() print('By recursion: {}'.format(sum_list_recur(num_ls))) print('Time: {}'.format(time.time() - start_time)) if __name__ == '__main__': main()
from __future__ import absolute_import from __future__ import print_function from __future__ import division - def sum_list_for(num_ls): ? ^^ + def sum_list_iter(num_ls): ? ^^^ """Sum number list by for loop.""" _sum = 0 for num in num_ls: _sum += num return _sum def sum_list_recur(num_ls): """Sum number list by recursion.""" if len(num_ls) == 1: return num_ls[0] else: return num_ls[0] + sum_list_recur(num_ls[1:]) def main(): import time - num_ls = [0, 1, 2, 3, 4, 5] + num_ls = range(100) start_time = time.time() - print('By for loop: {}'.format(sum_list_for(num_ls))) ? ^ ^^^^^^ ^^ + print('By iteration: {}'.format(sum_list_iter(num_ls))) ? ^^^^^^^ ^ ^^^ print('Time: {}'.format(time.time() - start_time)) start_time = time.time() print('By recursion: {}'.format(sum_list_recur(num_ls))) print('Time: {}'.format(time.time() - start_time)) if __name__ == '__main__': main()
eb5dc3ef7e7904549f50a4255477ed50d3ee53ab
twinsies/clock.py
twinsies/clock.py
from apscheduler.schedulers.blocking import BlockingScheduler from twinsies.twitter import (random_trend_query, fetch_tweets, dig_for_twins, update_status) from memory_profiler import profile sched = BlockingScheduler() @sched.scheduled_job('interval', minutes=16) @profile def twinsy_finder(fetch_size=10000): print("Running twinsy finder...") fetched_tweets = fetch_tweets('Kanye', fetch_size=fetch_size) tweets = dig_for_twins(fetched_tweets) if tweets: print("Twins found, updating status.") update_status(tweets) else: print("No twins found.") if __name__ == '__main__': twinsy_finder() print("Starting scheduler") sched.start()
from apscheduler.schedulers.blocking import BlockingScheduler from twinsies.twitter import (random_trend_query, fetch_tweets, dig_for_twins, update_status) from memory_profiler import profile sched = BlockingScheduler() @sched.scheduled_job('interval', minutes=16) @profile def twinsy_finder(fetch_size=5000): print("Running twinsy finder...") fetched_tweets = fetch_tweets('Kanye', fetch_size=fetch_size) tweets = dig_for_twins(fetched_tweets) if tweets: print("Twins found, updating status.") update_status(tweets) else: print("No twins found.") if __name__ == '__main__': print("Starting scheduler") sched.start()
Reduce fetch size to 5000. Don't run job on startup.
Reduce fetch size to 5000. Don't run job on startup.
Python
mit
kkwteh/twinyewest
from apscheduler.schedulers.blocking import BlockingScheduler from twinsies.twitter import (random_trend_query, fetch_tweets, dig_for_twins, update_status) from memory_profiler import profile sched = BlockingScheduler() @sched.scheduled_job('interval', minutes=16) @profile - def twinsy_finder(fetch_size=10000): + def twinsy_finder(fetch_size=5000): print("Running twinsy finder...") fetched_tweets = fetch_tweets('Kanye', fetch_size=fetch_size) tweets = dig_for_twins(fetched_tweets) if tweets: print("Twins found, updating status.") update_status(tweets) else: print("No twins found.") if __name__ == '__main__': - twinsy_finder() print("Starting scheduler") sched.start()
Reduce fetch size to 5000. Don't run job on startup.
## Code Before: from apscheduler.schedulers.blocking import BlockingScheduler from twinsies.twitter import (random_trend_query, fetch_tweets, dig_for_twins, update_status) from memory_profiler import profile sched = BlockingScheduler() @sched.scheduled_job('interval', minutes=16) @profile def twinsy_finder(fetch_size=10000): print("Running twinsy finder...") fetched_tweets = fetch_tweets('Kanye', fetch_size=fetch_size) tweets = dig_for_twins(fetched_tweets) if tweets: print("Twins found, updating status.") update_status(tweets) else: print("No twins found.") if __name__ == '__main__': twinsy_finder() print("Starting scheduler") sched.start() ## Instruction: Reduce fetch size to 5000. Don't run job on startup. ## Code After: from apscheduler.schedulers.blocking import BlockingScheduler from twinsies.twitter import (random_trend_query, fetch_tweets, dig_for_twins, update_status) from memory_profiler import profile sched = BlockingScheduler() @sched.scheduled_job('interval', minutes=16) @profile def twinsy_finder(fetch_size=5000): print("Running twinsy finder...") fetched_tweets = fetch_tweets('Kanye', fetch_size=fetch_size) tweets = dig_for_twins(fetched_tweets) if tweets: print("Twins found, updating status.") update_status(tweets) else: print("No twins found.") if __name__ == '__main__': print("Starting scheduler") sched.start()
from apscheduler.schedulers.blocking import BlockingScheduler from twinsies.twitter import (random_trend_query, fetch_tweets, dig_for_twins, update_status) from memory_profiler import profile sched = BlockingScheduler() @sched.scheduled_job('interval', minutes=16) @profile - def twinsy_finder(fetch_size=10000): ? ^^ + def twinsy_finder(fetch_size=5000): ? ^ print("Running twinsy finder...") fetched_tweets = fetch_tweets('Kanye', fetch_size=fetch_size) tweets = dig_for_twins(fetched_tweets) if tweets: print("Twins found, updating status.") update_status(tweets) else: print("No twins found.") if __name__ == '__main__': - twinsy_finder() print("Starting scheduler") sched.start()
710a2a6d9c462041bae6c41f0578d99262c6a861
tests/test_execute.py
tests/test_execute.py
import asyncpg from asyncpg import _testbase as tb class TestExecuteScript(tb.ConnectedTestCase): async def test_execute_script_1(self): r = await self.con.execute(''' SELECT 1; SELECT true FROM pg_type WHERE false = true; SELECT 2; ''') self.assertIsNone(r) async def test_execute_script_check_transactionality(self): with self.assertRaises(asyncpg.Error): await self.con.execute(''' CREATE TABLE mytab (a int); SELECT * FROM mytab WHERE 1 / 0 = 1; ''') with self.assertRaisesRegex(asyncpg.Error, '"mytab" does not exist'): await self.con.prepare(''' SELECT * FROM mytab ''')
import asyncpg from asyncpg import _testbase as tb class TestExecuteScript(tb.ConnectedTestCase): async def test_execute_script_1(self): r = await self.con.execute(''' SELECT 1; SELECT true FROM pg_type WHERE false = true; SELECT 2; ''') self.assertIsNone(r) async def test_execute_script_check_transactionality(self): with self.assertRaises(asyncpg.Error): await self.con.execute(''' CREATE TABLE mytab (a int); SELECT * FROM mytab WHERE 1 / 0 = 1; ''') with self.assertRaisesRegex(asyncpg.Error, '"mytab" does not exist'): await self.con.prepare(''' SELECT * FROM mytab ''') async def test_execute_exceptions_1(self): with self.assertRaisesRegex(asyncpg.Error, 'relation "__dne__" does not exist'): await self.con.execute('select * from __dne__')
Test that con.execute() propagate Postgres exceptions
Test that con.execute() propagate Postgres exceptions
Python
apache-2.0
MagicStack/asyncpg,MagicStack/asyncpg
import asyncpg from asyncpg import _testbase as tb class TestExecuteScript(tb.ConnectedTestCase): async def test_execute_script_1(self): r = await self.con.execute(''' SELECT 1; SELECT true FROM pg_type WHERE false = true; SELECT 2; ''') self.assertIsNone(r) async def test_execute_script_check_transactionality(self): with self.assertRaises(asyncpg.Error): await self.con.execute(''' CREATE TABLE mytab (a int); SELECT * FROM mytab WHERE 1 / 0 = 1; ''') with self.assertRaisesRegex(asyncpg.Error, '"mytab" does not exist'): await self.con.prepare(''' SELECT * FROM mytab ''') + async def test_execute_exceptions_1(self): + with self.assertRaisesRegex(asyncpg.Error, + 'relation "__dne__" does not exist'): + + await self.con.execute('select * from __dne__') +
Test that con.execute() propagate Postgres exceptions
## Code Before: import asyncpg from asyncpg import _testbase as tb class TestExecuteScript(tb.ConnectedTestCase): async def test_execute_script_1(self): r = await self.con.execute(''' SELECT 1; SELECT true FROM pg_type WHERE false = true; SELECT 2; ''') self.assertIsNone(r) async def test_execute_script_check_transactionality(self): with self.assertRaises(asyncpg.Error): await self.con.execute(''' CREATE TABLE mytab (a int); SELECT * FROM mytab WHERE 1 / 0 = 1; ''') with self.assertRaisesRegex(asyncpg.Error, '"mytab" does not exist'): await self.con.prepare(''' SELECT * FROM mytab ''') ## Instruction: Test that con.execute() propagate Postgres exceptions ## Code After: import asyncpg from asyncpg import _testbase as tb class TestExecuteScript(tb.ConnectedTestCase): async def test_execute_script_1(self): r = await self.con.execute(''' SELECT 1; SELECT true FROM pg_type WHERE false = true; SELECT 2; ''') self.assertIsNone(r) async def test_execute_script_check_transactionality(self): with self.assertRaises(asyncpg.Error): await self.con.execute(''' CREATE TABLE mytab (a int); SELECT * FROM mytab WHERE 1 / 0 = 1; ''') with self.assertRaisesRegex(asyncpg.Error, '"mytab" does not exist'): await self.con.prepare(''' SELECT * FROM mytab ''') async def test_execute_exceptions_1(self): with self.assertRaisesRegex(asyncpg.Error, 'relation "__dne__" does not exist'): await self.con.execute('select * from __dne__')
import asyncpg from asyncpg import _testbase as tb class TestExecuteScript(tb.ConnectedTestCase): async def test_execute_script_1(self): r = await self.con.execute(''' SELECT 1; SELECT true FROM pg_type WHERE false = true; SELECT 2; ''') self.assertIsNone(r) async def test_execute_script_check_transactionality(self): with self.assertRaises(asyncpg.Error): await self.con.execute(''' CREATE TABLE mytab (a int); SELECT * FROM mytab WHERE 1 / 0 = 1; ''') with self.assertRaisesRegex(asyncpg.Error, '"mytab" does not exist'): await self.con.prepare(''' SELECT * FROM mytab ''') + + async def test_execute_exceptions_1(self): + with self.assertRaisesRegex(asyncpg.Error, + 'relation "__dne__" does not exist'): + + await self.con.execute('select * from __dne__')
b52f0e9fe2c9e41205a8d703985ac39ab3524a8a
tests/blueprints/test_entity.py
tests/blueprints/test_entity.py
from json import loads from tests import AppTestCase, main from tentd import db from tentd.models.entity import Entity class EntityBlueprintTest (AppTestCase): def setUp (self): super(EntityBlueprintTest, self).setUp() self.user = Entity(name="testuser") db.session.add(self.user) db.session.commit() def test_entity_link (self): r = self.client.head("/testuser/") self.assertIn("/testuser/profile", r.headers['Link']) def test_entity_profile_json (self): r = self.client.get("/testuser/profile") self.assertEquals(r.mimetype, 'application/json') self.assertIn('https://tent.io/types/info/core/v0.1.0', r.json) if __name__ == "__main__": main()
from json import loads from tests import AppTestCase, main from tentd import db from tentd.models.entity import Entity class EntityBlueprintTest (AppTestCase): def setUp (self): super(EntityBlueprintTest, self).setUp() self.name = 'testuser' self.user = Entity(name=self.name) db.session.add(self.user) db.session.commit() def test_entity_link (self): r = self.client.head('/' + self.name) self.assertIn('/' + self.name + 'profile', r.headers['Link']) def test_entity_link_404 (self): self.assertStatus(self.client.head('/non-existent-user'), 404) def test_entity_profile_404 (self): self.assertStatus(self.client.head('/non-existent-user/profile'), 404) def test_entity_profile_json (self): r = self.client.get('/testuser/profile') self.assertEquals(r.mimetype, 'application/json') self.assertIn('https://tent.io/types/info/core/v0.1.0', r.json) if __name__ == "__main__": main()
Test that the api 404's when a user does not exist
Test that the api 404's when a user does not exist
Python
apache-2.0
pytent/pytentd
from json import loads from tests import AppTestCase, main from tentd import db from tentd.models.entity import Entity class EntityBlueprintTest (AppTestCase): def setUp (self): super(EntityBlueprintTest, self).setUp() + self.name = 'testuser' - self.user = Entity(name="testuser") + self.user = Entity(name=self.name) db.session.add(self.user) db.session.commit() def test_entity_link (self): - r = self.client.head("/testuser/") + r = self.client.head('/' + self.name) - self.assertIn("/testuser/profile", r.headers['Link']) + self.assertIn('/' + self.name + 'profile', r.headers['Link']) - + + def test_entity_link_404 (self): + self.assertStatus(self.client.head('/non-existent-user'), 404) + + def test_entity_profile_404 (self): + self.assertStatus(self.client.head('/non-existent-user/profile'), 404) + def test_entity_profile_json (self): - r = self.client.get("/testuser/profile") + r = self.client.get('/testuser/profile') self.assertEquals(r.mimetype, 'application/json') self.assertIn('https://tent.io/types/info/core/v0.1.0', r.json) if __name__ == "__main__": main()
Test that the api 404's when a user does not exist
## Code Before: from json import loads from tests import AppTestCase, main from tentd import db from tentd.models.entity import Entity class EntityBlueprintTest (AppTestCase): def setUp (self): super(EntityBlueprintTest, self).setUp() self.user = Entity(name="testuser") db.session.add(self.user) db.session.commit() def test_entity_link (self): r = self.client.head("/testuser/") self.assertIn("/testuser/profile", r.headers['Link']) def test_entity_profile_json (self): r = self.client.get("/testuser/profile") self.assertEquals(r.mimetype, 'application/json') self.assertIn('https://tent.io/types/info/core/v0.1.0', r.json) if __name__ == "__main__": main() ## Instruction: Test that the api 404's when a user does not exist ## Code After: from json import loads from tests import AppTestCase, main from tentd import db from tentd.models.entity import Entity class EntityBlueprintTest (AppTestCase): def setUp (self): super(EntityBlueprintTest, self).setUp() self.name = 'testuser' self.user = Entity(name=self.name) db.session.add(self.user) db.session.commit() def test_entity_link (self): r = self.client.head('/' + self.name) self.assertIn('/' + self.name + 'profile', r.headers['Link']) def test_entity_link_404 (self): self.assertStatus(self.client.head('/non-existent-user'), 404) def test_entity_profile_404 (self): self.assertStatus(self.client.head('/non-existent-user/profile'), 404) def test_entity_profile_json (self): r = self.client.get('/testuser/profile') self.assertEquals(r.mimetype, 'application/json') self.assertIn('https://tent.io/types/info/core/v0.1.0', r.json) if __name__ == "__main__": main()
from json import loads from tests import AppTestCase, main from tentd import db from tentd.models.entity import Entity class EntityBlueprintTest (AppTestCase): def setUp (self): super(EntityBlueprintTest, self).setUp() + self.name = 'testuser' - self.user = Entity(name="testuser") ? ------ ^^ + self.user = Entity(name=self.name) ? ^^^^^^^ db.session.add(self.user) db.session.commit() def test_entity_link (self): - r = self.client.head("/testuser/") + r = self.client.head('/' + self.name) - self.assertIn("/testuser/profile", r.headers['Link']) ? ^ ^^^^^ ^^ ^ + self.assertIn('/' + self.name + 'profile', r.headers['Link']) ? ^ ^^^^ ^^^^^^^^^^^ ^ - + + def test_entity_link_404 (self): + self.assertStatus(self.client.head('/non-existent-user'), 404) + + def test_entity_profile_404 (self): + self.assertStatus(self.client.head('/non-existent-user/profile'), 404) + def test_entity_profile_json (self): - r = self.client.get("/testuser/profile") ? ^ ^ + r = self.client.get('/testuser/profile') ? ^ ^ self.assertEquals(r.mimetype, 'application/json') self.assertIn('https://tent.io/types/info/core/v0.1.0', r.json) if __name__ == "__main__": main()
5863cbf81156074df4e0a9abb7a823a7701933da
tlsenum/__init__.py
tlsenum/__init__.py
import click CONTEXT_SETTINGS = dict(help_option_names=["-h", "--help"]) @click.command(context_settings=CONTEXT_SETTINGS) @click.argument("host", type=click.STRING) @click.argument("port", type=click.INT) @click.option("--verify-cert", is_flag=True) def cli(host, port, verify_cert): """ A command line tool to enumerate TLS cipher-suites supported by a server. """ pass
import socket import click from construct import UBInt16 from tlsenum.parse_hello import ( ClientHello, Extensions, HandshakeFailure, ServerHello ) from tlsenum.mappings import CipherSuites, ECCurves, ECPointFormat CONTEXT_SETTINGS = dict(help_option_names=["-h", "--help"]) def send_client_hello(host, port, data): """ Sends a ClientHello message in bytes. Returns a ServerHello message in bytes """ s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect((host, port)) s.send(data) server_hello = s.recv(5) server_hello += s.recv(UBInt16("length").parse(server_hello[3:5])) return server_hello @click.command(context_settings=CONTEXT_SETTINGS) @click.argument("host", type=click.STRING) @click.argument("port", type=click.INT) @click.option("--verify-cert", is_flag=True) def cli(host, port, verify_cert): """ A command line tool to enumerate TLS cipher-suites supported by a server. """ cipher_suites_list = [i.name for i in CipherSuites] extension = Extensions() extension.sni = host extension.ec_curves = [i.name for i in ECCurves] extension.ec_point_format = [i.name for i in ECPointFormat] client_hello = ClientHello() client_hello.protocol_version = "1.2" client_hello.deflate = False client_hello.extensions = extension.build() supported_cipher_suites = [] while True: client_hello.cipher_suites = cipher_suites_list server_hello = send_client_hello(host, port, client_hello.build()) try: server_hello = ServerHello.parse_server_hello(server_hello) except HandshakeFailure: break supported_cipher_suites.append(server_hello.cipher_suite) cipher_suites_list.remove(server_hello.cipher_suite) for i in supported_cipher_suites: print(i)
Add very basic logic to figure out supported cipher suites.
Add very basic logic to figure out supported cipher suites.
Python
mit
Ayrx/tlsenum,Ayrx/tlsenum
+ import socket + import click + from construct import UBInt16 + + from tlsenum.parse_hello import ( + ClientHello, Extensions, HandshakeFailure, ServerHello + ) + from tlsenum.mappings import CipherSuites, ECCurves, ECPointFormat CONTEXT_SETTINGS = dict(help_option_names=["-h", "--help"]) + + + def send_client_hello(host, port, data): + """ + Sends a ClientHello message in bytes. + + Returns a ServerHello message in bytes + + """ + s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + s.connect((host, port)) + s.send(data) + + server_hello = s.recv(5) + server_hello += s.recv(UBInt16("length").parse(server_hello[3:5])) + + return server_hello @click.command(context_settings=CONTEXT_SETTINGS) @click.argument("host", type=click.STRING) @click.argument("port", type=click.INT) @click.option("--verify-cert", is_flag=True) def cli(host, port, verify_cert): """ A command line tool to enumerate TLS cipher-suites supported by a server. """ - pass + cipher_suites_list = [i.name for i in CipherSuites] + extension = Extensions() + extension.sni = host + extension.ec_curves = [i.name for i in ECCurves] + extension.ec_point_format = [i.name for i in ECPointFormat] + + client_hello = ClientHello() + client_hello.protocol_version = "1.2" + client_hello.deflate = False + client_hello.extensions = extension.build() + + supported_cipher_suites = [] + + while True: + client_hello.cipher_suites = cipher_suites_list + server_hello = send_client_hello(host, port, client_hello.build()) + try: + server_hello = ServerHello.parse_server_hello(server_hello) + except HandshakeFailure: + break + + supported_cipher_suites.append(server_hello.cipher_suite) + cipher_suites_list.remove(server_hello.cipher_suite) + + for i in supported_cipher_suites: + print(i) +
Add very basic logic to figure out supported cipher suites.
## Code Before: import click CONTEXT_SETTINGS = dict(help_option_names=["-h", "--help"]) @click.command(context_settings=CONTEXT_SETTINGS) @click.argument("host", type=click.STRING) @click.argument("port", type=click.INT) @click.option("--verify-cert", is_flag=True) def cli(host, port, verify_cert): """ A command line tool to enumerate TLS cipher-suites supported by a server. """ pass ## Instruction: Add very basic logic to figure out supported cipher suites. ## Code After: import socket import click from construct import UBInt16 from tlsenum.parse_hello import ( ClientHello, Extensions, HandshakeFailure, ServerHello ) from tlsenum.mappings import CipherSuites, ECCurves, ECPointFormat CONTEXT_SETTINGS = dict(help_option_names=["-h", "--help"]) def send_client_hello(host, port, data): """ Sends a ClientHello message in bytes. Returns a ServerHello message in bytes """ s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect((host, port)) s.send(data) server_hello = s.recv(5) server_hello += s.recv(UBInt16("length").parse(server_hello[3:5])) return server_hello @click.command(context_settings=CONTEXT_SETTINGS) @click.argument("host", type=click.STRING) @click.argument("port", type=click.INT) @click.option("--verify-cert", is_flag=True) def cli(host, port, verify_cert): """ A command line tool to enumerate TLS cipher-suites supported by a server. """ cipher_suites_list = [i.name for i in CipherSuites] extension = Extensions() extension.sni = host extension.ec_curves = [i.name for i in ECCurves] extension.ec_point_format = [i.name for i in ECPointFormat] client_hello = ClientHello() client_hello.protocol_version = "1.2" client_hello.deflate = False client_hello.extensions = extension.build() supported_cipher_suites = [] while True: client_hello.cipher_suites = cipher_suites_list server_hello = send_client_hello(host, port, client_hello.build()) try: server_hello = ServerHello.parse_server_hello(server_hello) except HandshakeFailure: break supported_cipher_suites.append(server_hello.cipher_suite) cipher_suites_list.remove(server_hello.cipher_suite) for i in supported_cipher_suites: print(i)
+ import socket + import click + from construct import UBInt16 + + from tlsenum.parse_hello import ( + ClientHello, Extensions, HandshakeFailure, ServerHello + ) + from tlsenum.mappings import CipherSuites, ECCurves, ECPointFormat CONTEXT_SETTINGS = dict(help_option_names=["-h", "--help"]) + + + def send_client_hello(host, port, data): + """ + Sends a ClientHello message in bytes. + + Returns a ServerHello message in bytes + + """ + s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + s.connect((host, port)) + s.send(data) + + server_hello = s.recv(5) + server_hello += s.recv(UBInt16("length").parse(server_hello[3:5])) + + return server_hello @click.command(context_settings=CONTEXT_SETTINGS) @click.argument("host", type=click.STRING) @click.argument("port", type=click.INT) @click.option("--verify-cert", is_flag=True) def cli(host, port, verify_cert): """ A command line tool to enumerate TLS cipher-suites supported by a server. """ - pass + cipher_suites_list = [i.name for i in CipherSuites] + + extension = Extensions() + extension.sni = host + extension.ec_curves = [i.name for i in ECCurves] + extension.ec_point_format = [i.name for i in ECPointFormat] + + client_hello = ClientHello() + client_hello.protocol_version = "1.2" + client_hello.deflate = False + client_hello.extensions = extension.build() + + supported_cipher_suites = [] + + while True: + client_hello.cipher_suites = cipher_suites_list + server_hello = send_client_hello(host, port, client_hello.build()) + try: + server_hello = ServerHello.parse_server_hello(server_hello) + except HandshakeFailure: + break + + supported_cipher_suites.append(server_hello.cipher_suite) + cipher_suites_list.remove(server_hello.cipher_suite) + + for i in supported_cipher_suites: + print(i)
d142bed6916d8b34509c12623b4802eca9206695
tests/test_ab_testing.py
tests/test_ab_testing.py
from . import TheInternetTestCase from helium.api import go_to, S, get_driver class AbTestingTest(TheInternetTestCase): def get_page(self): return "http://the-internet.herokuapp.com/abtest" def test_ab_variates(self): variation = S("h3") first_variation = variation.web_element.text self.assertIn( first_variation, [u"A/B Test Variation 1", u"A/B Test Control"] ) get_driver().delete_all_cookies() go_to("http://the-internet.herokuapp.com/abtest") variation = S("h3") second_variation = variation.web_element.text self.assertIn( second_variation, [u"A/B Test Variation 1", u"A/B Test Control"] ) self.assertNotEqual(first_variation, second_variation)
from . import TheInternetTestCase from helium.api import go_to, S, get_driver class AbTestingTest(TheInternetTestCase): def get_page(self): return "http://the-internet.herokuapp.com/abtest" def test_ab_variates(self): header = S("h3") first_variation = header.web_element.text self.assertIn( first_variation, [u"A/B Test Variation 1", u"A/B Test Control"] ) second_variation = first_variation while second_variation == first_variation: get_driver().delete_all_cookies() go_to("http://the-internet.herokuapp.com/abtest") header = S("h3") second_variation = header.web_element.text self.assertIn( second_variation, [u"A/B Test Variation 1", u"A/B Test Control"] ) self.assertNotEqual(first_variation, second_variation)
Make the AB test case more stable.
Make the AB test case more stable.
Python
mit
bugfree-software/the-internet-solution-python
from . import TheInternetTestCase from helium.api import go_to, S, get_driver class AbTestingTest(TheInternetTestCase): def get_page(self): return "http://the-internet.herokuapp.com/abtest" def test_ab_variates(self): - variation = S("h3") + header = S("h3") - first_variation = variation.web_element.text + first_variation = header.web_element.text self.assertIn( first_variation, [u"A/B Test Variation 1", u"A/B Test Control"] ) + second_variation = first_variation + while second_variation == first_variation: - get_driver().delete_all_cookies() + get_driver().delete_all_cookies() - go_to("http://the-internet.herokuapp.com/abtest") + go_to("http://the-internet.herokuapp.com/abtest") - variation = S("h3") + header = S("h3") - second_variation = variation.web_element.text + second_variation = header.web_element.text - self.assertIn( + self.assertIn( - second_variation, [u"A/B Test Variation 1", u"A/B Test Control"] + second_variation, [u"A/B Test Variation 1", u"A/B Test Control"] - ) + ) self.assertNotEqual(first_variation, second_variation)
Make the AB test case more stable.
## Code Before: from . import TheInternetTestCase from helium.api import go_to, S, get_driver class AbTestingTest(TheInternetTestCase): def get_page(self): return "http://the-internet.herokuapp.com/abtest" def test_ab_variates(self): variation = S("h3") first_variation = variation.web_element.text self.assertIn( first_variation, [u"A/B Test Variation 1", u"A/B Test Control"] ) get_driver().delete_all_cookies() go_to("http://the-internet.herokuapp.com/abtest") variation = S("h3") second_variation = variation.web_element.text self.assertIn( second_variation, [u"A/B Test Variation 1", u"A/B Test Control"] ) self.assertNotEqual(first_variation, second_variation) ## Instruction: Make the AB test case more stable. ## Code After: from . import TheInternetTestCase from helium.api import go_to, S, get_driver class AbTestingTest(TheInternetTestCase): def get_page(self): return "http://the-internet.herokuapp.com/abtest" def test_ab_variates(self): header = S("h3") first_variation = header.web_element.text self.assertIn( first_variation, [u"A/B Test Variation 1", u"A/B Test Control"] ) second_variation = first_variation while second_variation == first_variation: get_driver().delete_all_cookies() go_to("http://the-internet.herokuapp.com/abtest") header = S("h3") second_variation = header.web_element.text self.assertIn( second_variation, [u"A/B Test Variation 1", u"A/B Test Control"] ) self.assertNotEqual(first_variation, second_variation)
from . import TheInternetTestCase from helium.api import go_to, S, get_driver class AbTestingTest(TheInternetTestCase): def get_page(self): return "http://the-internet.herokuapp.com/abtest" def test_ab_variates(self): - variation = S("h3") + header = S("h3") - first_variation = variation.web_element.text ? ^ ------ + first_variation = header.web_element.text ? ^^ ++ self.assertIn( first_variation, [u"A/B Test Variation 1", u"A/B Test Control"] ) + second_variation = first_variation + while second_variation == first_variation: - get_driver().delete_all_cookies() + get_driver().delete_all_cookies() ? + - go_to("http://the-internet.herokuapp.com/abtest") + go_to("http://the-internet.herokuapp.com/abtest") ? + - variation = S("h3") + header = S("h3") - second_variation = variation.web_element.text ? ^ ------ + second_variation = header.web_element.text ? + ^^ ++ - self.assertIn( + self.assertIn( ? + - second_variation, [u"A/B Test Variation 1", u"A/B Test Control"] + second_variation, [u"A/B Test Variation 1", u"A/B Test Control"] ? + - ) + ) ? + self.assertNotEqual(first_variation, second_variation)
e8a0e7c3714445577851c5a84ecf7a036937725a
clang_corpus/__init__.py
clang_corpus/__init__.py
from os import listdir from os.path import abspath, isfile, join, splitext # C, C++, Obj-C, & Obj-C++ SOURCE_EXTENSIONS = ('.h', '.hh', '.hpp', '.c', '.cpp', '.cxx', '.m', '.mm') class SourceFile(object): """ A simple object which wraps a text file. """ def __init__(self, path): self._path = abspath(path) @property def path(self): return self._path @property def bytes(self): with open(self._path, "rb") as fp: bytes = fp.read() return bytes def create_package_modules(package_path, module_dict): """ Populate a module dictionary with `SourceFile` objects for each source file in a directory. """ package_path = abspath(package_path) for filename in listdir(package_path): file_path = join(package_path, filename) key, ext = splitext(filename) if isfile(file_path) and ext in SOURCE_EXTENSIONS: key, ext = splitext(filename) module_dict[key] = SourceFile(file_path)
from os import listdir from os.path import abspath, isfile, join, split, splitext # C, C++, Obj-C, & Obj-C++ SOURCE_EXTENSIONS = ('.h', '.hh', '.hpp', '.c', '.cpp', '.cxx', '.m', '.mm') class SourceFile(object): """ A simple object which wraps a text file. """ def __init__(self, path): self._path = abspath(path) @property def path(self): return self._path @property def include_paths(self): return [split(self._path)[0]] @property def bytes(self): with open(self._path, "rb") as fp: bytes = fp.read() return bytes def create_package_modules(package_path, module_dict): """ Populate a module dictionary with `SourceFile` objects for each source file in a directory. """ package_path = abspath(package_path) for filename in listdir(package_path): file_path = join(package_path, filename) key, ext = splitext(filename) if isfile(file_path) and ext in SOURCE_EXTENSIONS: key, ext = splitext(filename) module_dict[key] = SourceFile(file_path)
Add an include_paths property to the SourceFile class.
Add an include_paths property to the SourceFile class.
Python
unlicense
jwiggins/clang-corpus,jwiggins/clang-corpus,jwiggins/clang-corpus
from os import listdir - from os.path import abspath, isfile, join, splitext + from os.path import abspath, isfile, join, split, splitext # C, C++, Obj-C, & Obj-C++ SOURCE_EXTENSIONS = ('.h', '.hh', '.hpp', '.c', '.cpp', '.cxx', '.m', '.mm') class SourceFile(object): """ A simple object which wraps a text file. """ def __init__(self, path): self._path = abspath(path) @property def path(self): return self._path + + @property + def include_paths(self): + return [split(self._path)[0]] @property def bytes(self): with open(self._path, "rb") as fp: bytes = fp.read() return bytes def create_package_modules(package_path, module_dict): """ Populate a module dictionary with `SourceFile` objects for each source file in a directory. """ package_path = abspath(package_path) for filename in listdir(package_path): file_path = join(package_path, filename) key, ext = splitext(filename) if isfile(file_path) and ext in SOURCE_EXTENSIONS: key, ext = splitext(filename) module_dict[key] = SourceFile(file_path)
Add an include_paths property to the SourceFile class.
## Code Before: from os import listdir from os.path import abspath, isfile, join, splitext # C, C++, Obj-C, & Obj-C++ SOURCE_EXTENSIONS = ('.h', '.hh', '.hpp', '.c', '.cpp', '.cxx', '.m', '.mm') class SourceFile(object): """ A simple object which wraps a text file. """ def __init__(self, path): self._path = abspath(path) @property def path(self): return self._path @property def bytes(self): with open(self._path, "rb") as fp: bytes = fp.read() return bytes def create_package_modules(package_path, module_dict): """ Populate a module dictionary with `SourceFile` objects for each source file in a directory. """ package_path = abspath(package_path) for filename in listdir(package_path): file_path = join(package_path, filename) key, ext = splitext(filename) if isfile(file_path) and ext in SOURCE_EXTENSIONS: key, ext = splitext(filename) module_dict[key] = SourceFile(file_path) ## Instruction: Add an include_paths property to the SourceFile class. ## Code After: from os import listdir from os.path import abspath, isfile, join, split, splitext # C, C++, Obj-C, & Obj-C++ SOURCE_EXTENSIONS = ('.h', '.hh', '.hpp', '.c', '.cpp', '.cxx', '.m', '.mm') class SourceFile(object): """ A simple object which wraps a text file. """ def __init__(self, path): self._path = abspath(path) @property def path(self): return self._path @property def include_paths(self): return [split(self._path)[0]] @property def bytes(self): with open(self._path, "rb") as fp: bytes = fp.read() return bytes def create_package_modules(package_path, module_dict): """ Populate a module dictionary with `SourceFile` objects for each source file in a directory. """ package_path = abspath(package_path) for filename in listdir(package_path): file_path = join(package_path, filename) key, ext = splitext(filename) if isfile(file_path) and ext in SOURCE_EXTENSIONS: key, ext = splitext(filename) module_dict[key] = SourceFile(file_path)
from os import listdir - from os.path import abspath, isfile, join, splitext + from os.path import abspath, isfile, join, split, splitext ? +++++++ # C, C++, Obj-C, & Obj-C++ SOURCE_EXTENSIONS = ('.h', '.hh', '.hpp', '.c', '.cpp', '.cxx', '.m', '.mm') class SourceFile(object): """ A simple object which wraps a text file. """ def __init__(self, path): self._path = abspath(path) @property def path(self): return self._path + + @property + def include_paths(self): + return [split(self._path)[0]] @property def bytes(self): with open(self._path, "rb") as fp: bytes = fp.read() return bytes def create_package_modules(package_path, module_dict): """ Populate a module dictionary with `SourceFile` objects for each source file in a directory. """ package_path = abspath(package_path) for filename in listdir(package_path): file_path = join(package_path, filename) key, ext = splitext(filename) if isfile(file_path) and ext in SOURCE_EXTENSIONS: key, ext = splitext(filename) module_dict[key] = SourceFile(file_path)
1ff19fcd0bcbb396b7cb676c5dddf8d3c8652419
live/components/misc.py
live/components/misc.py
from live.helpers import Timer def timed(fun, time, next_fun=None): """A component that runs another component for a fixed length of time. Can optionally be given a follow-up component for chaining. :param callable fun: The component to be run: :param number time: The amount of time to run the component :keyword callable next_fun: A component to run after the timed component is finished """ timer = Timer(time) def timed_callback(self, id): nonlocal timer if timer > 0.0: fun(self, id) else: if next_fun: self.logic_components.set(next_fun, id=id) else: self.logic_components.remove(id) return timed_callback def suspend(time, next_fun): """A component that suspends a component currently in the component list for a fixed length of time. Can optionally be given a different component to be run after the suspension is lifted. :param number time: The amount of time to run the component :keyword callable next_fun: A component to run after the suspension is lifted """ def suspend_callback(self, id): pass return timed(suspend_callback, time, next_fun )
from live.helpers import Timer def timed(fun, time, next_fun=None): """A component that runs another component for a fixed length of time. Can optionally be given a follow-up component for chaining. :param callable fun: The component to be run: :param number time: The amount of time to run the component :keyword callable next_fun: A component to run after the timed component is finished """ timer = Timer(time) def timed_callback(self, id, *args): nonlocal timer if timer > 0.0: fun(self, id) else: if len(args) == 0: correct_queue = self.logic_components else: correct_queue = self.collision_components if next_fun: correct_queue.set(next_fun, id=id) else: correct_queue.remove(id) return timed_callback def suspend(time, next_fun): """A component that suspends a component currently in the component list for a fixed length of time. Can optionally be given a different component to be run after the suspension is lifted. :param number time: The amount of time to run the component :keyword callable next_fun: A component to run after the suspension is lifted """ def suspend_callback(self, id): pass return timed(suspend_callback, time, next_fun )
Update timed_callback to support collision callbacks.
Update timed_callback to support collision callbacks.
Python
lgpl-2.1
GalanCM/BGELive
from live.helpers import Timer def timed(fun, time, next_fun=None): """A component that runs another component for a fixed length of time. Can optionally be given a follow-up component for chaining. :param callable fun: The component to be run: :param number time: The amount of time to run the component :keyword callable next_fun: A component to run after the timed component is finished """ timer = Timer(time) - def timed_callback(self, id): + def timed_callback(self, id, *args): nonlocal timer if timer > 0.0: fun(self, id) else: + if len(args) == 0: + correct_queue = self.logic_components + else: + correct_queue = self.collision_components + if next_fun: - self.logic_components.set(next_fun, id=id) + correct_queue.set(next_fun, id=id) else: - self.logic_components.remove(id) + correct_queue.remove(id) return timed_callback def suspend(time, next_fun): """A component that suspends a component currently in the component list for a fixed length of time. Can optionally be given a different component to be run after the suspension is lifted. :param number time: The amount of time to run the component :keyword callable next_fun: A component to run after the suspension is lifted """ def suspend_callback(self, id): pass return timed(suspend_callback, time, next_fun )
Update timed_callback to support collision callbacks.
## Code Before: from live.helpers import Timer def timed(fun, time, next_fun=None): """A component that runs another component for a fixed length of time. Can optionally be given a follow-up component for chaining. :param callable fun: The component to be run: :param number time: The amount of time to run the component :keyword callable next_fun: A component to run after the timed component is finished """ timer = Timer(time) def timed_callback(self, id): nonlocal timer if timer > 0.0: fun(self, id) else: if next_fun: self.logic_components.set(next_fun, id=id) else: self.logic_components.remove(id) return timed_callback def suspend(time, next_fun): """A component that suspends a component currently in the component list for a fixed length of time. Can optionally be given a different component to be run after the suspension is lifted. :param number time: The amount of time to run the component :keyword callable next_fun: A component to run after the suspension is lifted """ def suspend_callback(self, id): pass return timed(suspend_callback, time, next_fun ) ## Instruction: Update timed_callback to support collision callbacks. ## Code After: from live.helpers import Timer def timed(fun, time, next_fun=None): """A component that runs another component for a fixed length of time. Can optionally be given a follow-up component for chaining. :param callable fun: The component to be run: :param number time: The amount of time to run the component :keyword callable next_fun: A component to run after the timed component is finished """ timer = Timer(time) def timed_callback(self, id, *args): nonlocal timer if timer > 0.0: fun(self, id) else: if len(args) == 0: correct_queue = self.logic_components else: correct_queue = self.collision_components if next_fun: correct_queue.set(next_fun, id=id) else: correct_queue.remove(id) return timed_callback def suspend(time, next_fun): """A component that suspends a component currently in the component list for a fixed length of time. Can optionally be given a different component to be run after the suspension is lifted. :param number time: The amount of time to run the component :keyword callable next_fun: A component to run after the suspension is lifted """ def suspend_callback(self, id): pass return timed(suspend_callback, time, next_fun )
from live.helpers import Timer def timed(fun, time, next_fun=None): """A component that runs another component for a fixed length of time. Can optionally be given a follow-up component for chaining. :param callable fun: The component to be run: :param number time: The amount of time to run the component :keyword callable next_fun: A component to run after the timed component is finished """ timer = Timer(time) - def timed_callback(self, id): + def timed_callback(self, id, *args): ? +++++++ nonlocal timer if timer > 0.0: fun(self, id) else: + if len(args) == 0: + correct_queue = self.logic_components + else: + correct_queue = self.collision_components + if next_fun: - self.logic_components.set(next_fun, id=id) + correct_queue.set(next_fun, id=id) else: - self.logic_components.remove(id) + correct_queue.remove(id) return timed_callback def suspend(time, next_fun): """A component that suspends a component currently in the component list for a fixed length of time. Can optionally be given a different component to be run after the suspension is lifted. :param number time: The amount of time to run the component :keyword callable next_fun: A component to run after the suspension is lifted """ def suspend_callback(self, id): pass return timed(suspend_callback, time, next_fun )
8de9c51eedaadc68cc64ca7f5763b48a6448dca3
main.py
main.py
from dogbot.bot import bot from dogbot.web import app from mongoengine import connect from config import config import getopt import sys import threading from importlib import reload reload(threading) def bot_main(): print("QQBot is running...") try: bot.start() except KeyboardInterrupt: pass finally: print('Bye') def web_main(): app.run(threaded=True) if __name__ == '__main__': connect(config.get('db')) try: opts, args = getopt.gnu_getopt(sys.argv[1:], 'bw') except getopt.GetoptError as err: print(err) sys.exit(2) for o, a in opts: if o == '-b': bot_main() elif o in '-w': reload(threading) web_main() else: assert False, "unhandled option"
from dogbot.bot import bot from dogbot.web import app from mongoengine import connect from config import config import getopt import sys import threading from importlib import reload reload(threading) def bot_main(): print("QQBot is running...") try: bot.start() except KeyboardInterrupt: pass finally: print('Bye') def web_main(): app.run(host='0.0.0.0', threaded=True) if __name__ == '__main__': connect(config.get('db')) try: opts, args = getopt.gnu_getopt(sys.argv[1:], 'bw') except getopt.GetoptError as err: print(err) sys.exit(2) for o, a in opts: if o == '-b': bot_main() elif o in '-w': reload(threading) web_main() else: assert False, "unhandled option"
Edit host from 127.0.0.1 -> 0.0.0.0
Edit host from 127.0.0.1 -> 0.0.0.0
Python
apache-2.0
moondropx/dogbot,moondropx/dogbot
from dogbot.bot import bot from dogbot.web import app from mongoengine import connect from config import config import getopt import sys import threading from importlib import reload reload(threading) def bot_main(): print("QQBot is running...") try: bot.start() except KeyboardInterrupt: pass finally: print('Bye') def web_main(): - app.run(threaded=True) + app.run(host='0.0.0.0', threaded=True) if __name__ == '__main__': connect(config.get('db')) try: opts, args = getopt.gnu_getopt(sys.argv[1:], 'bw') except getopt.GetoptError as err: print(err) sys.exit(2) for o, a in opts: if o == '-b': bot_main() elif o in '-w': reload(threading) web_main() else: assert False, "unhandled option"
Edit host from 127.0.0.1 -> 0.0.0.0
## Code Before: from dogbot.bot import bot from dogbot.web import app from mongoengine import connect from config import config import getopt import sys import threading from importlib import reload reload(threading) def bot_main(): print("QQBot is running...") try: bot.start() except KeyboardInterrupt: pass finally: print('Bye') def web_main(): app.run(threaded=True) if __name__ == '__main__': connect(config.get('db')) try: opts, args = getopt.gnu_getopt(sys.argv[1:], 'bw') except getopt.GetoptError as err: print(err) sys.exit(2) for o, a in opts: if o == '-b': bot_main() elif o in '-w': reload(threading) web_main() else: assert False, "unhandled option" ## Instruction: Edit host from 127.0.0.1 -> 0.0.0.0 ## Code After: from dogbot.bot import bot from dogbot.web import app from mongoengine import connect from config import config import getopt import sys import threading from importlib import reload reload(threading) def bot_main(): print("QQBot is running...") try: bot.start() except KeyboardInterrupt: pass finally: print('Bye') def web_main(): app.run(host='0.0.0.0', threaded=True) if __name__ == '__main__': connect(config.get('db')) try: opts, args = getopt.gnu_getopt(sys.argv[1:], 'bw') except getopt.GetoptError as err: print(err) sys.exit(2) for o, a in opts: if o == '-b': bot_main() elif o in '-w': reload(threading) web_main() else: assert False, "unhandled option"
from dogbot.bot import bot from dogbot.web import app from mongoengine import connect from config import config import getopt import sys import threading from importlib import reload reload(threading) def bot_main(): print("QQBot is running...") try: bot.start() except KeyboardInterrupt: pass finally: print('Bye') def web_main(): - app.run(threaded=True) + app.run(host='0.0.0.0', threaded=True) ? ++++++++++++++++ if __name__ == '__main__': connect(config.get('db')) try: opts, args = getopt.gnu_getopt(sys.argv[1:], 'bw') except getopt.GetoptError as err: print(err) sys.exit(2) for o, a in opts: if o == '-b': bot_main() elif o in '-w': reload(threading) web_main() else: assert False, "unhandled option"
2acb5a2eb7ae0a0f8ea8423a7da5a7a8b9f07151
fore/mailer.py
fore/mailer.py
import smtplib from email.mime.text import MIMEText import apikeys def AlertMessage(message, subject='Glitch System Message', me=apikeys.system_email, you=apikeys.admin_email): msg = MIMEText(message) msg['Subject'] = subject msg['From'] = me msg['To'] = you # Send the message via our own SMTP server, but don't include the # envelope header. s = smtplib.SMTP('localhost') s.sendmail(me, [you], msg.as_string()) s.quit() a_message = 'There is someting I need to tell you.' AlertMessage(a_message)
import smtplib from email.mime.text import MIMEText import apikeys def AlertMessage(message, subject='Glitch System Message', me=apikeys.system_email, you=apikeys.admin_email): msg = MIMEText(message) msg['Subject'] = subject msg['From'] = me msg['To'] = you # Send the message via our own SMTP server, but don't include the # envelope header. s = smtplib.SMTP('localhost') s.sendmail(me, [you], msg.as_string()) s.quit() def test(): a_message = 'There is someting I need to tell you.' AlertMessage(a_message)
Move test message into function.
Move test message into function.
Python
artistic-2.0
Rosuav/appension,MikeiLL/appension,Rosuav/appension,MikeiLL/appension,Rosuav/appension,MikeiLL/appension,Rosuav/appension,MikeiLL/appension
import smtplib from email.mime.text import MIMEText import apikeys def AlertMessage(message, subject='Glitch System Message', me=apikeys.system_email, you=apikeys.admin_email): msg = MIMEText(message) msg['Subject'] = subject msg['From'] = me msg['To'] = you # Send the message via our own SMTP server, but don't include the # envelope header. s = smtplib.SMTP('localhost') s.sendmail(me, [you], msg.as_string()) s.quit() + def test(): - a_message = 'There is someting I need to tell you.' + a_message = 'There is someting I need to tell you.' - AlertMessage(a_message) + AlertMessage(a_message)
Move test message into function.
## Code Before: import smtplib from email.mime.text import MIMEText import apikeys def AlertMessage(message, subject='Glitch System Message', me=apikeys.system_email, you=apikeys.admin_email): msg = MIMEText(message) msg['Subject'] = subject msg['From'] = me msg['To'] = you # Send the message via our own SMTP server, but don't include the # envelope header. s = smtplib.SMTP('localhost') s.sendmail(me, [you], msg.as_string()) s.quit() a_message = 'There is someting I need to tell you.' AlertMessage(a_message) ## Instruction: Move test message into function. ## Code After: import smtplib from email.mime.text import MIMEText import apikeys def AlertMessage(message, subject='Glitch System Message', me=apikeys.system_email, you=apikeys.admin_email): msg = MIMEText(message) msg['Subject'] = subject msg['From'] = me msg['To'] = you # Send the message via our own SMTP server, but don't include the # envelope header. s = smtplib.SMTP('localhost') s.sendmail(me, [you], msg.as_string()) s.quit() def test(): a_message = 'There is someting I need to tell you.' AlertMessage(a_message)
import smtplib from email.mime.text import MIMEText import apikeys def AlertMessage(message, subject='Glitch System Message', me=apikeys.system_email, you=apikeys.admin_email): msg = MIMEText(message) msg['Subject'] = subject msg['From'] = me msg['To'] = you # Send the message via our own SMTP server, but don't include the # envelope header. s = smtplib.SMTP('localhost') s.sendmail(me, [you], msg.as_string()) s.quit() + def test(): - a_message = 'There is someting I need to tell you.' + a_message = 'There is someting I need to tell you.' ? + - AlertMessage(a_message) + AlertMessage(a_message) ? +
2b3df42f77c7277369631c1b31266a41526bf90c
src/rotest/management/migrations/0002_auto_20150224_1427.py
src/rotest/management/migrations/0002_auto_20150224_1427.py
from __future__ import unicode_literals from django.db import migrations from django.contrib.auth import models as auth_models def create_users(apps, schema_editor): qa_group, _ = auth_models.Group.objects.get_or_create(name="QA") localhost, _ = auth_models.User.objects.get_or_create(username="localhost", password="localhost", email="[email protected]") qa_group.user_set.add(localhost) class Migration(migrations.Migration): dependencies = [ ('management', '0001_initial'), ] operations = [migrations.RunPython(create_users) ]
from __future__ import unicode_literals from django.db import migrations from django.contrib.auth import models as auth_models ADMIN_USERNAME = "rotest" ADMIN_PASSWORD = "rotest" def create_users(apps, schema_editor): qa_group, _ = auth_models.Group.objects.get_or_create(name="QA") localhost, _ = auth_models.User.objects.get_or_create(username="localhost", password="localhost", email="[email protected]") qa_group.user_set.add(localhost) try: auth_models.User.objects.get(username=ADMIN_USERNAME) except auth_models.User.DoesNotExist: auth_models.User.objects.create_superuser(ADMIN_USERNAME, "[email protected]", ADMIN_PASSWORD) class Migration(migrations.Migration): dependencies = [ ('management', '0001_initial'), ] operations = [migrations.RunPython(create_users) ]
Revert the superuser creation in a migration
Revert the superuser creation in a migration
Python
mit
gregoil/rotest
from __future__ import unicode_literals from django.db import migrations from django.contrib.auth import models as auth_models + + ADMIN_USERNAME = "rotest" + ADMIN_PASSWORD = "rotest" def create_users(apps, schema_editor): qa_group, _ = auth_models.Group.objects.get_or_create(name="QA") localhost, _ = auth_models.User.objects.get_or_create(username="localhost", password="localhost", email="[email protected]") qa_group.user_set.add(localhost) + + try: + auth_models.User.objects.get(username=ADMIN_USERNAME) + except auth_models.User.DoesNotExist: + auth_models.User.objects.create_superuser(ADMIN_USERNAME, + "[email protected]", + ADMIN_PASSWORD) class Migration(migrations.Migration): dependencies = [ ('management', '0001_initial'), ] operations = [migrations.RunPython(create_users) ]
Revert the superuser creation in a migration
## Code Before: from __future__ import unicode_literals from django.db import migrations from django.contrib.auth import models as auth_models def create_users(apps, schema_editor): qa_group, _ = auth_models.Group.objects.get_or_create(name="QA") localhost, _ = auth_models.User.objects.get_or_create(username="localhost", password="localhost", email="[email protected]") qa_group.user_set.add(localhost) class Migration(migrations.Migration): dependencies = [ ('management', '0001_initial'), ] operations = [migrations.RunPython(create_users) ] ## Instruction: Revert the superuser creation in a migration ## Code After: from __future__ import unicode_literals from django.db import migrations from django.contrib.auth import models as auth_models ADMIN_USERNAME = "rotest" ADMIN_PASSWORD = "rotest" def create_users(apps, schema_editor): qa_group, _ = auth_models.Group.objects.get_or_create(name="QA") localhost, _ = auth_models.User.objects.get_or_create(username="localhost", password="localhost", email="[email protected]") qa_group.user_set.add(localhost) try: auth_models.User.objects.get(username=ADMIN_USERNAME) except auth_models.User.DoesNotExist: auth_models.User.objects.create_superuser(ADMIN_USERNAME, "[email protected]", ADMIN_PASSWORD) class Migration(migrations.Migration): dependencies = [ ('management', '0001_initial'), ] operations = [migrations.RunPython(create_users) ]
from __future__ import unicode_literals from django.db import migrations from django.contrib.auth import models as auth_models + + ADMIN_USERNAME = "rotest" + ADMIN_PASSWORD = "rotest" def create_users(apps, schema_editor): qa_group, _ = auth_models.Group.objects.get_or_create(name="QA") localhost, _ = auth_models.User.objects.get_or_create(username="localhost", password="localhost", email="[email protected]") qa_group.user_set.add(localhost) + + try: + auth_models.User.objects.get(username=ADMIN_USERNAME) + except auth_models.User.DoesNotExist: + auth_models.User.objects.create_superuser(ADMIN_USERNAME, + "[email protected]", + ADMIN_PASSWORD) class Migration(migrations.Migration): dependencies = [ ('management', '0001_initial'), ] operations = [migrations.RunPython(create_users) ]
7c425075280fea87b1c8dd61b43f51e19e84b770
astropy/utils/exceptions.py
astropy/utils/exceptions.py
from __future__ import (absolute_import, division, print_function, unicode_literals) class AstropyWarning(Warning): """ The base warning class from which all Astropy warnings should inherit. Any warning inheriting from this class is handled by the Astropy logger. """ class AstropyUserWarning(UserWarning, AstropyWarning): """ The primary warning class for Astropy. Use this if you do not need a specific sub-class. """ class AstropyDeprecationWarning(DeprecationWarning, AstropyWarning): """ A warning class to indicate a deprecated feature. """ class AstropyPendingDeprecationWarning(PendingDeprecationWarning, AstropyWarning): """ A warning class to indicate a soon-to-be deprecated feature. """ class AstropyBackwardsIncompatibleChangeWarning(AstropyWarning): """ A warning class indicating a change in astropy that is incompatible with previous versions. The suggested procedure is to issue this warning for the version in which the change occurs, and remove it for all following versions. """
from __future__ import (absolute_import, division, print_function, unicode_literals) class AstropyWarning(Warning): """ The base warning class from which all Astropy warnings should inherit. Any warning inheriting from this class is handled by the Astropy logger. """ class AstropyUserWarning(UserWarning, AstropyWarning): """ The primary warning class for Astropy. Use this if you do not need a specific sub-class. """ class AstropyDeprecationWarning(AstropyWarning): """ A warning class to indicate a deprecated feature. """ class AstropyPendingDeprecationWarning(PendingDeprecationWarning, AstropyWarning): """ A warning class to indicate a soon-to-be deprecated feature. """ class AstropyBackwardsIncompatibleChangeWarning(AstropyWarning): """ A warning class indicating a change in astropy that is incompatible with previous versions. The suggested procedure is to issue this warning for the version in which the change occurs, and remove it for all following versions. """
Remove DeprecationWarning superclass for AstropyDeprecationWarning
Remove DeprecationWarning superclass for AstropyDeprecationWarning we do this because in py2.7, DeprecationWarning and subclasses are hidden by default, but we want astropy's deprecations to get shown by default
Python
bsd-3-clause
bsipocz/astropy,astropy/astropy,funbaker/astropy,StuartLittlefair/astropy,StuartLittlefair/astropy,larrybradley/astropy,lpsinger/astropy,dhomeier/astropy,funbaker/astropy,MSeifert04/astropy,stargaser/astropy,dhomeier/astropy,mhvk/astropy,AustereCuriosity/astropy,saimn/astropy,pllim/astropy,larrybradley/astropy,joergdietrich/astropy,stargaser/astropy,tbabej/astropy,aleksandr-bakanov/astropy,DougBurke/astropy,kelle/astropy,funbaker/astropy,bsipocz/astropy,AustereCuriosity/astropy,pllim/astropy,saimn/astropy,kelle/astropy,lpsinger/astropy,joergdietrich/astropy,StuartLittlefair/astropy,bsipocz/astropy,AustereCuriosity/astropy,kelle/astropy,AustereCuriosity/astropy,stargaser/astropy,StuartLittlefair/astropy,tbabej/astropy,saimn/astropy,MSeifert04/astropy,saimn/astropy,tbabej/astropy,larrybradley/astropy,larrybradley/astropy,joergdietrich/astropy,tbabej/astropy,pllim/astropy,kelle/astropy,aleksandr-bakanov/astropy,astropy/astropy,dhomeier/astropy,aleksandr-bakanov/astropy,mhvk/astropy,kelle/astropy,AustereCuriosity/astropy,aleksandr-bakanov/astropy,dhomeier/astropy,lpsinger/astropy,DougBurke/astropy,mhvk/astropy,pllim/astropy,astropy/astropy,dhomeier/astropy,astropy/astropy,DougBurke/astropy,lpsinger/astropy,bsipocz/astropy,tbabej/astropy,lpsinger/astropy,astropy/astropy,larrybradley/astropy,MSeifert04/astropy,joergdietrich/astropy,funbaker/astropy,saimn/astropy,pllim/astropy,mhvk/astropy,stargaser/astropy,joergdietrich/astropy,mhvk/astropy,MSeifert04/astropy,StuartLittlefair/astropy,DougBurke/astropy
from __future__ import (absolute_import, division, print_function, unicode_literals) class AstropyWarning(Warning): """ The base warning class from which all Astropy warnings should inherit. Any warning inheriting from this class is handled by the Astropy logger. """ class AstropyUserWarning(UserWarning, AstropyWarning): """ The primary warning class for Astropy. Use this if you do not need a specific sub-class. """ - class AstropyDeprecationWarning(DeprecationWarning, AstropyWarning): + class AstropyDeprecationWarning(AstropyWarning): """ A warning class to indicate a deprecated feature. """ class AstropyPendingDeprecationWarning(PendingDeprecationWarning, AstropyWarning): """ A warning class to indicate a soon-to-be deprecated feature. """ class AstropyBackwardsIncompatibleChangeWarning(AstropyWarning): """ A warning class indicating a change in astropy that is incompatible with previous versions. The suggested procedure is to issue this warning for the version in which the change occurs, and remove it for all following versions. """
Remove DeprecationWarning superclass for AstropyDeprecationWarning
## Code Before: from __future__ import (absolute_import, division, print_function, unicode_literals) class AstropyWarning(Warning): """ The base warning class from which all Astropy warnings should inherit. Any warning inheriting from this class is handled by the Astropy logger. """ class AstropyUserWarning(UserWarning, AstropyWarning): """ The primary warning class for Astropy. Use this if you do not need a specific sub-class. """ class AstropyDeprecationWarning(DeprecationWarning, AstropyWarning): """ A warning class to indicate a deprecated feature. """ class AstropyPendingDeprecationWarning(PendingDeprecationWarning, AstropyWarning): """ A warning class to indicate a soon-to-be deprecated feature. """ class AstropyBackwardsIncompatibleChangeWarning(AstropyWarning): """ A warning class indicating a change in astropy that is incompatible with previous versions. The suggested procedure is to issue this warning for the version in which the change occurs, and remove it for all following versions. """ ## Instruction: Remove DeprecationWarning superclass for AstropyDeprecationWarning ## Code After: from __future__ import (absolute_import, division, print_function, unicode_literals) class AstropyWarning(Warning): """ The base warning class from which all Astropy warnings should inherit. Any warning inheriting from this class is handled by the Astropy logger. """ class AstropyUserWarning(UserWarning, AstropyWarning): """ The primary warning class for Astropy. Use this if you do not need a specific sub-class. """ class AstropyDeprecationWarning(AstropyWarning): """ A warning class to indicate a deprecated feature. """ class AstropyPendingDeprecationWarning(PendingDeprecationWarning, AstropyWarning): """ A warning class to indicate a soon-to-be deprecated feature. """ class AstropyBackwardsIncompatibleChangeWarning(AstropyWarning): """ A warning class indicating a change in astropy that is incompatible with previous versions. The suggested procedure is to issue this warning for the version in which the change occurs, and remove it for all following versions. """
from __future__ import (absolute_import, division, print_function, unicode_literals) class AstropyWarning(Warning): """ The base warning class from which all Astropy warnings should inherit. Any warning inheriting from this class is handled by the Astropy logger. """ class AstropyUserWarning(UserWarning, AstropyWarning): """ The primary warning class for Astropy. Use this if you do not need a specific sub-class. """ - class AstropyDeprecationWarning(DeprecationWarning, AstropyWarning): ? -------------------- + class AstropyDeprecationWarning(AstropyWarning): """ A warning class to indicate a deprecated feature. """ class AstropyPendingDeprecationWarning(PendingDeprecationWarning, AstropyWarning): """ A warning class to indicate a soon-to-be deprecated feature. """ class AstropyBackwardsIncompatibleChangeWarning(AstropyWarning): """ A warning class indicating a change in astropy that is incompatible with previous versions. The suggested procedure is to issue this warning for the version in which the change occurs, and remove it for all following versions. """
93c6e5d39b1779f0eca9b28f5111d7c402ebc1ba
geotagging/views.py
geotagging/views.py
from django.http import HttpResponseRedirect from django.shortcuts import render_to_response from django.template import RequestContext from django.contrib.contenttypes.models import ContentType from geotagging.models import Point def add_edit_point(request, content_type_id, object_id, template=None, form_class=None): model_class = ContentType.objects.get(id=content_type_id).model_class() object = model_class.objects.get(id=object_id) object_content_type = ContentType.objects.get_for_model(object) geotag = Point.objects.get(content_type__pk=object_content_type.id, object_id=object.id) if request.method == "POST": form = form_class(request.POST, instance=geotag) if form.is_valid(): new_object = form.save(commit=False) new_object.object = object new_object.save() return HttpResponseRedirect("/admin/%s/%s/%s/" %(object_content_type.app_label, object_content_type.model, object.id)) form = form_class(instance=geotag) #import ipdb; ipdb.set_trace() context = RequestContext(request, { 'form': form, 'object' : object, 'object_content_type' : object_content_type, 'geotag' : geotag, }) return render_to_response(template, context_instance=context )
from django.http import HttpResponseRedirect from django.shortcuts import render_to_response from django.template import RequestContext from django.contrib.contenttypes.models import ContentType from django.core.exceptions import ObjectDoesNotExist from geotagging.models import Point def add_edit_point(request, content_type_id, object_id, template=None, form_class=None): model_class = ContentType.objects.get(id=content_type_id).model_class() object = model_class.objects.get(id=object_id) object_content_type = ContentType.objects.get_for_model(object) try: geotag = Point.objects.get(content_type__pk=object_content_type.id, object_id=object.id) except ObjectDoesNotExist: geotag = None if request.method == "POST": form = form_class(request.POST, instance=geotag) if form.is_valid(): new_object = form.save(commit=False) new_object.object = object new_object.save() return HttpResponseRedirect("/admin/%s/%s/%s/" %(object_content_type.app_label, object_content_type.model, object.id)) form = form_class(instance=geotag) #import ipdb; ipdb.set_trace() context = RequestContext(request, { 'form': form, 'object' : object, 'object_content_type' : object_content_type, 'geotag' : geotag, }) return render_to_response(template, context_instance=context )
Fix a bug when you try to add a geo tag to an object that does not have already one
Fix a bug when you try to add a geo tag to an object that does not have already one
Python
bsd-3-clause
lincolnloop/django-geotagging,lincolnloop/django-geotagging
from django.http import HttpResponseRedirect from django.shortcuts import render_to_response from django.template import RequestContext from django.contrib.contenttypes.models import ContentType + from django.core.exceptions import ObjectDoesNotExist from geotagging.models import Point def add_edit_point(request, content_type_id, object_id, template=None, form_class=None): model_class = ContentType.objects.get(id=content_type_id).model_class() object = model_class.objects.get(id=object_id) object_content_type = ContentType.objects.get_for_model(object) + try: - geotag = Point.objects.get(content_type__pk=object_content_type.id, + geotag = Point.objects.get(content_type__pk=object_content_type.id, object_id=object.id) + except ObjectDoesNotExist: + geotag = None if request.method == "POST": form = form_class(request.POST, instance=geotag) if form.is_valid(): new_object = form.save(commit=False) new_object.object = object new_object.save() return HttpResponseRedirect("/admin/%s/%s/%s/" %(object_content_type.app_label, object_content_type.model, object.id)) form = form_class(instance=geotag) #import ipdb; ipdb.set_trace() context = RequestContext(request, { 'form': form, 'object' : object, 'object_content_type' : object_content_type, 'geotag' : geotag, }) return render_to_response(template, context_instance=context )
Fix a bug when you try to add a geo tag to an object that does not have already one
## Code Before: from django.http import HttpResponseRedirect from django.shortcuts import render_to_response from django.template import RequestContext from django.contrib.contenttypes.models import ContentType from geotagging.models import Point def add_edit_point(request, content_type_id, object_id, template=None, form_class=None): model_class = ContentType.objects.get(id=content_type_id).model_class() object = model_class.objects.get(id=object_id) object_content_type = ContentType.objects.get_for_model(object) geotag = Point.objects.get(content_type__pk=object_content_type.id, object_id=object.id) if request.method == "POST": form = form_class(request.POST, instance=geotag) if form.is_valid(): new_object = form.save(commit=False) new_object.object = object new_object.save() return HttpResponseRedirect("/admin/%s/%s/%s/" %(object_content_type.app_label, object_content_type.model, object.id)) form = form_class(instance=geotag) #import ipdb; ipdb.set_trace() context = RequestContext(request, { 'form': form, 'object' : object, 'object_content_type' : object_content_type, 'geotag' : geotag, }) return render_to_response(template, context_instance=context ) ## Instruction: Fix a bug when you try to add a geo tag to an object that does not have already one ## Code After: from django.http import HttpResponseRedirect from django.shortcuts import render_to_response from django.template import RequestContext from django.contrib.contenttypes.models import ContentType from django.core.exceptions import ObjectDoesNotExist from geotagging.models import Point def add_edit_point(request, content_type_id, object_id, template=None, form_class=None): model_class = ContentType.objects.get(id=content_type_id).model_class() object = model_class.objects.get(id=object_id) object_content_type = ContentType.objects.get_for_model(object) try: geotag = Point.objects.get(content_type__pk=object_content_type.id, object_id=object.id) except ObjectDoesNotExist: geotag = None if request.method == "POST": form = form_class(request.POST, instance=geotag) if form.is_valid(): new_object = form.save(commit=False) new_object.object = object new_object.save() return HttpResponseRedirect("/admin/%s/%s/%s/" %(object_content_type.app_label, object_content_type.model, object.id)) form = form_class(instance=geotag) #import ipdb; ipdb.set_trace() context = RequestContext(request, { 'form': form, 'object' : object, 'object_content_type' : object_content_type, 'geotag' : geotag, }) return render_to_response(template, context_instance=context )
from django.http import HttpResponseRedirect from django.shortcuts import render_to_response from django.template import RequestContext from django.contrib.contenttypes.models import ContentType + from django.core.exceptions import ObjectDoesNotExist from geotagging.models import Point def add_edit_point(request, content_type_id, object_id, template=None, form_class=None): model_class = ContentType.objects.get(id=content_type_id).model_class() object = model_class.objects.get(id=object_id) object_content_type = ContentType.objects.get_for_model(object) + try: - geotag = Point.objects.get(content_type__pk=object_content_type.id, + geotag = Point.objects.get(content_type__pk=object_content_type.id, ? ++++ object_id=object.id) + except ObjectDoesNotExist: + geotag = None if request.method == "POST": form = form_class(request.POST, instance=geotag) if form.is_valid(): new_object = form.save(commit=False) new_object.object = object new_object.save() return HttpResponseRedirect("/admin/%s/%s/%s/" %(object_content_type.app_label, object_content_type.model, object.id)) form = form_class(instance=geotag) #import ipdb; ipdb.set_trace() context = RequestContext(request, { 'form': form, 'object' : object, 'object_content_type' : object_content_type, 'geotag' : geotag, }) return render_to_response(template, context_instance=context )
ac7477803739d303df8374f916748173da32cb07
test_elasticsearch/test_server/__init__.py
test_elasticsearch/test_server/__init__.py
from elasticsearch.helpers.test import get_test_client, ElasticsearchTestCase as BaseTestCase client = None def get_client(): global client if client is not None: return client # try and locate manual override in the local environment try: from test_elasticsearch.local import get_client as local_get_client client = local_get_client() except ImportError: # fallback to using vanilla client client = get_test_client() return client def setup(): get_client() class ElasticsearchTestCase(BaseTestCase): @staticmethod def _get_client(): return get_client()
from elasticsearch.helpers.test import get_test_client, ElasticsearchTestCase as BaseTestCase client = None def get_client(**kwargs): global client if client is not None and not kwargs: return client # try and locate manual override in the local environment try: from test_elasticsearch.local import get_client as local_get_client new_client = local_get_client(**kwargs) except ImportError: # fallback to using vanilla client new_client = get_test_client(**kwargs) if not kwargs: client = new_client return new_client def setup(): get_client() class ElasticsearchTestCase(BaseTestCase): @staticmethod def _get_client(**kwargs): return get_client(**kwargs)
Allow test client to be created with kwargs
Allow test client to be created with kwargs
Python
apache-2.0
brunobell/elasticsearch-py,elastic/elasticsearch-py,brunobell/elasticsearch-py,elastic/elasticsearch-py
from elasticsearch.helpers.test import get_test_client, ElasticsearchTestCase as BaseTestCase client = None - def get_client(): + def get_client(**kwargs): global client - if client is not None: + if client is not None and not kwargs: return client # try and locate manual override in the local environment try: from test_elasticsearch.local import get_client as local_get_client - client = local_get_client() + new_client = local_get_client(**kwargs) except ImportError: # fallback to using vanilla client - client = get_test_client() + new_client = get_test_client(**kwargs) + if not kwargs: + client = new_client + - return client + return new_client def setup(): get_client() class ElasticsearchTestCase(BaseTestCase): @staticmethod - def _get_client(): + def _get_client(**kwargs): - return get_client() + return get_client(**kwargs)
Allow test client to be created with kwargs
## Code Before: from elasticsearch.helpers.test import get_test_client, ElasticsearchTestCase as BaseTestCase client = None def get_client(): global client if client is not None: return client # try and locate manual override in the local environment try: from test_elasticsearch.local import get_client as local_get_client client = local_get_client() except ImportError: # fallback to using vanilla client client = get_test_client() return client def setup(): get_client() class ElasticsearchTestCase(BaseTestCase): @staticmethod def _get_client(): return get_client() ## Instruction: Allow test client to be created with kwargs ## Code After: from elasticsearch.helpers.test import get_test_client, ElasticsearchTestCase as BaseTestCase client = None def get_client(**kwargs): global client if client is not None and not kwargs: return client # try and locate manual override in the local environment try: from test_elasticsearch.local import get_client as local_get_client new_client = local_get_client(**kwargs) except ImportError: # fallback to using vanilla client new_client = get_test_client(**kwargs) if not kwargs: client = new_client return new_client def setup(): get_client() class ElasticsearchTestCase(BaseTestCase): @staticmethod def _get_client(**kwargs): return get_client(**kwargs)
from elasticsearch.helpers.test import get_test_client, ElasticsearchTestCase as BaseTestCase client = None - def get_client(): + def get_client(**kwargs): ? ++++++++ global client - if client is not None: + if client is not None and not kwargs: ? +++++++++++++++ return client # try and locate manual override in the local environment try: from test_elasticsearch.local import get_client as local_get_client - client = local_get_client() + new_client = local_get_client(**kwargs) ? ++++ ++++++++ except ImportError: # fallback to using vanilla client - client = get_test_client() + new_client = get_test_client(**kwargs) ? ++++ ++++++++ + if not kwargs: + client = new_client + - return client + return new_client ? ++++ def setup(): get_client() class ElasticsearchTestCase(BaseTestCase): @staticmethod - def _get_client(): + def _get_client(**kwargs): ? ++++++++ - return get_client() + return get_client(**kwargs) ? ++++++++
71ce7f3e745b9cee357f867f126dce65f6e210ac
main.py
main.py
import os import sys PROJECT_ROOT = os.path.dirname(__file__) sys.path.insert(0, os.path.join(PROJECT_ROOT, 'vroom')) import pygame import math from world import Universe # Initialize pygame pygame.init() size = width, height = 800, 600 black = 0, 0, 0 screen = pygame.display.set_mode(size) clock = pygame.time.Clock() clock.tick(30) # Initialize the universe universe = Universe(screen) universe.add_nest(100, 100, math.pi / 2, 30.0) universe.add_hole(600, 100) while 1: for event in pygame.event.get(): if event.type == pygame.QUIT: sys.exit() screen.fill(black) universe.update(clock.get_time()) universe.draw() pygame.display.flip() clock.tick(30)
import os import sys PROJECT_ROOT = os.path.dirname(__file__) sys.path.insert(0, os.path.join(PROJECT_ROOT, 'vroom')) import pygame import math from world import Universe # Initialize pygame pygame.init() size = width, height = 800, 600 black = 0, 0, 0 screen = pygame.display.set_mode(size) clock = pygame.time.Clock() clock.tick(30) # Initialize the universe universe = Universe(screen) universe.add_road(( (100, 100), (150, 300), (250, 500), (400, 500), (700, 200), )) universe.add_nest(100, 100, math.pi / 2, 30.0) universe.add_hole(600, 100) while 1: for event in pygame.event.get(): if event.type == pygame.QUIT: sys.exit() screen.fill(black) universe.update(clock.get_time()) universe.draw() pygame.display.flip() clock.tick(30)
Add more roads on map
Add more roads on map
Python
mit
thibault/vroom
import os import sys PROJECT_ROOT = os.path.dirname(__file__) sys.path.insert(0, os.path.join(PROJECT_ROOT, 'vroom')) import pygame import math from world import Universe # Initialize pygame pygame.init() size = width, height = 800, 600 black = 0, 0, 0 screen = pygame.display.set_mode(size) clock = pygame.time.Clock() clock.tick(30) # Initialize the universe universe = Universe(screen) + universe.add_road(( + (100, 100), + (150, 300), + (250, 500), + (400, 500), + (700, 200), + )) universe.add_nest(100, 100, math.pi / 2, 30.0) universe.add_hole(600, 100) + while 1: for event in pygame.event.get(): if event.type == pygame.QUIT: sys.exit() screen.fill(black) universe.update(clock.get_time()) universe.draw() pygame.display.flip() clock.tick(30)
Add more roads on map
## Code Before: import os import sys PROJECT_ROOT = os.path.dirname(__file__) sys.path.insert(0, os.path.join(PROJECT_ROOT, 'vroom')) import pygame import math from world import Universe # Initialize pygame pygame.init() size = width, height = 800, 600 black = 0, 0, 0 screen = pygame.display.set_mode(size) clock = pygame.time.Clock() clock.tick(30) # Initialize the universe universe = Universe(screen) universe.add_nest(100, 100, math.pi / 2, 30.0) universe.add_hole(600, 100) while 1: for event in pygame.event.get(): if event.type == pygame.QUIT: sys.exit() screen.fill(black) universe.update(clock.get_time()) universe.draw() pygame.display.flip() clock.tick(30) ## Instruction: Add more roads on map ## Code After: import os import sys PROJECT_ROOT = os.path.dirname(__file__) sys.path.insert(0, os.path.join(PROJECT_ROOT, 'vroom')) import pygame import math from world import Universe # Initialize pygame pygame.init() size = width, height = 800, 600 black = 0, 0, 0 screen = pygame.display.set_mode(size) clock = pygame.time.Clock() clock.tick(30) # Initialize the universe universe = Universe(screen) universe.add_road(( (100, 100), (150, 300), (250, 500), (400, 500), (700, 200), )) universe.add_nest(100, 100, math.pi / 2, 30.0) universe.add_hole(600, 100) while 1: for event in pygame.event.get(): if event.type == pygame.QUIT: sys.exit() screen.fill(black) universe.update(clock.get_time()) universe.draw() pygame.display.flip() clock.tick(30)
import os import sys PROJECT_ROOT = os.path.dirname(__file__) sys.path.insert(0, os.path.join(PROJECT_ROOT, 'vroom')) import pygame import math from world import Universe # Initialize pygame pygame.init() size = width, height = 800, 600 black = 0, 0, 0 screen = pygame.display.set_mode(size) clock = pygame.time.Clock() clock.tick(30) # Initialize the universe universe = Universe(screen) + universe.add_road(( + (100, 100), + (150, 300), + (250, 500), + (400, 500), + (700, 200), + )) universe.add_nest(100, 100, math.pi / 2, 30.0) universe.add_hole(600, 100) + while 1: for event in pygame.event.get(): if event.type == pygame.QUIT: sys.exit() screen.fill(black) universe.update(clock.get_time()) universe.draw() pygame.display.flip() clock.tick(30)
9db16e16db9131806d76a1f6875dfba33a7d452b
smile_model_graph/__openerp__.py
smile_model_graph/__openerp__.py
{ "name": "Objects Graph", "version": "0.1", "depends": ["base"], "author": "Smile", "license": 'AGPL-3', "description": """ Generate Objects Graph Suggestions & Feedback to: [email protected] """, "website": "http://www.smile.fr", "category": "Hidden", "sequence": 32, "data": [ "wizard/ir_model_graph_wizard_view.xml", ], "demo": [], 'test': [], "auto_install": True, "installable": True, "application": False, }
{ "name": "Models Graph", "version": "0.1", "depends": ["base"], "author": "Smile", "license": 'AGPL-3', "description": """ Generate Models Graph Suggestions & Feedback to: [email protected] """, "website": "http://www.smile.fr", "category": "Hidden", "sequence": 32, "data": [ "wizard/ir_model_graph_wizard_view.xml", ], "demo": [], 'test': [], "auto_install": True, "installable": True, "application": False, }
Rename Objects to Models in module description
[IMP] Rename Objects to Models in module description
Python
agpl-3.0
tiexinliu/odoo_addons,chadyred/odoo_addons,chadyred/odoo_addons,ovnicraft/odoo_addons,bmya/odoo_addons,odoocn/odoo_addons,odoocn/odoo_addons,tiexinliu/odoo_addons,chadyred/odoo_addons,odoocn/odoo_addons,ovnicraft/odoo_addons,bmya/odoo_addons,ovnicraft/odoo_addons,bmya/odoo_addons,tiexinliu/odoo_addons
{ - "name": "Objects Graph", + "name": "Models Graph", "version": "0.1", "depends": ["base"], "author": "Smile", "license": 'AGPL-3', "description": """ - Generate Objects Graph + Generate Models Graph Suggestions & Feedback to: [email protected] """, "website": "http://www.smile.fr", "category": "Hidden", "sequence": 32, "data": [ "wizard/ir_model_graph_wizard_view.xml", ], "demo": [], 'test': [], "auto_install": True, "installable": True, "application": False, }
Rename Objects to Models in module description
## Code Before: { "name": "Objects Graph", "version": "0.1", "depends": ["base"], "author": "Smile", "license": 'AGPL-3', "description": """ Generate Objects Graph Suggestions & Feedback to: [email protected] """, "website": "http://www.smile.fr", "category": "Hidden", "sequence": 32, "data": [ "wizard/ir_model_graph_wizard_view.xml", ], "demo": [], 'test': [], "auto_install": True, "installable": True, "application": False, } ## Instruction: Rename Objects to Models in module description ## Code After: { "name": "Models Graph", "version": "0.1", "depends": ["base"], "author": "Smile", "license": 'AGPL-3', "description": """ Generate Models Graph Suggestions & Feedback to: [email protected] """, "website": "http://www.smile.fr", "category": "Hidden", "sequence": 32, "data": [ "wizard/ir_model_graph_wizard_view.xml", ], "demo": [], 'test': [], "auto_install": True, "installable": True, "application": False, }
{ - "name": "Objects Graph", ? ^^^ ^^ + "name": "Models Graph", ? ^^^ ^ "version": "0.1", "depends": ["base"], "author": "Smile", "license": 'AGPL-3', "description": """ - Generate Objects Graph ? ^^^ ^^ + Generate Models Graph ? ^^^ ^ Suggestions & Feedback to: [email protected] """, "website": "http://www.smile.fr", "category": "Hidden", "sequence": 32, "data": [ "wizard/ir_model_graph_wizard_view.xml", ], "demo": [], 'test': [], "auto_install": True, "installable": True, "application": False, }
4d547ffa4112412e340abd6231cd406d14b8ff35
l10n_lu_ecdf/__openerp__.py
l10n_lu_ecdf/__openerp__.py
{ "name": "eCDF annual reports", "version": "8.0.1.0.0", "author": "ACSONE SA/NV", "license": "AGPL-3", "category": "Accounting & Finance", "website": "http://acsone.eu", "depends": ["l10n_lu_mis_reports", "mis_builder"], "module": "", "summary": "Generates XML eCDF annual financial reports", "data": [ "views/res_company.xml", "wizard/ecdf_report_view.xml", ], "installable": True, }
{ "name": "eCDF annual reports", "version": "8.0.1.0.0", "author": "ACSONE SA/NV", "license": "AGPL-3", "category": "Accounting & Finance", "website": "http://acsone.eu", "depends": ["l10n_lu_ext", "l10n_lu_mis_reports", "mis_builder"], "module": "", "summary": "Generates XML eCDF annual financial reports", "data": [ "views/res_company.xml", "wizard/ecdf_report_view.xml", ], "installable": True, }
Add dependency on l10n_lu_ext, for the field l10n_lu_matricule
[FIX] Add dependency on l10n_lu_ext, for the field l10n_lu_matricule
Python
agpl-3.0
acsone/l10n-luxemburg
{ "name": "eCDF annual reports", "version": "8.0.1.0.0", "author": "ACSONE SA/NV", "license": "AGPL-3", "category": "Accounting & Finance", "website": "http://acsone.eu", - "depends": ["l10n_lu_mis_reports", + "depends": ["l10n_lu_ext", + "l10n_lu_mis_reports", "mis_builder"], "module": "", "summary": "Generates XML eCDF annual financial reports", "data": [ "views/res_company.xml", "wizard/ecdf_report_view.xml", ], "installable": True, }
Add dependency on l10n_lu_ext, for the field l10n_lu_matricule
## Code Before: { "name": "eCDF annual reports", "version": "8.0.1.0.0", "author": "ACSONE SA/NV", "license": "AGPL-3", "category": "Accounting & Finance", "website": "http://acsone.eu", "depends": ["l10n_lu_mis_reports", "mis_builder"], "module": "", "summary": "Generates XML eCDF annual financial reports", "data": [ "views/res_company.xml", "wizard/ecdf_report_view.xml", ], "installable": True, } ## Instruction: Add dependency on l10n_lu_ext, for the field l10n_lu_matricule ## Code After: { "name": "eCDF annual reports", "version": "8.0.1.0.0", "author": "ACSONE SA/NV", "license": "AGPL-3", "category": "Accounting & Finance", "website": "http://acsone.eu", "depends": ["l10n_lu_ext", "l10n_lu_mis_reports", "mis_builder"], "module": "", "summary": "Generates XML eCDF annual financial reports", "data": [ "views/res_company.xml", "wizard/ecdf_report_view.xml", ], "installable": True, }
{ "name": "eCDF annual reports", "version": "8.0.1.0.0", "author": "ACSONE SA/NV", "license": "AGPL-3", "category": "Accounting & Finance", "website": "http://acsone.eu", - "depends": ["l10n_lu_mis_reports", ? ----- ^^^ - + "depends": ["l10n_lu_ext", ? ^ + "l10n_lu_mis_reports", "mis_builder"], "module": "", "summary": "Generates XML eCDF annual financial reports", "data": [ "views/res_company.xml", "wizard/ecdf_report_view.xml", ], "installable": True, }
ac0f0780beb61cab95809b2e0d02e5dab481e225
py/valid-parenthesis-string.py
py/valid-parenthesis-string.py
from collections import Counter class Solution(object): def dfs(self, s, pos, stack): if stack + self.min_possible_opening[-1] - self.min_possible_opening[pos] > self.max_possible_closing[-1] - self.max_possible_closing[pos]: return False if stack + self.max_possible_opening[-1] - self.max_possible_opening[pos] < self.min_possible_closing[-1] - self.min_possible_closing[pos]: return False if pos == len(s): return not stack if s[pos] == '(': stack += 1 if self.dfs(s, pos + 1, stack): return True stack -= 1 elif s[pos] == ')': if not stack: return False else: stack -= 1 if self.dfs(s, pos + 1, stack): return True stack += 1 else: if stack: # treat as ')' stack -= 1 if self.dfs(s, pos + 1, stack): return True stack += 1 # treat as '(' stack += 1 if self.dfs(s, pos + 1, stack): return True stack -= 1 # treat as '' if self.dfs(s, pos + 1, stack): return True return False def checkValidString(self, s): """ :type s: str :rtype: bool """ c = Counter(s) mpo, mpc = c['('] + c['*'], c[')'] + c['*'] self.max_possible_opening = [0] self.min_possible_opening = [0] self.max_possible_closing = [0] self.min_possible_closing = [0] for c in s: self.min_possible_opening.append(self.min_possible_opening[-1] + (c == '(')) self.max_possible_opening.append(self.max_possible_opening[-1] + (c != ')')) self.min_possible_closing.append(self.min_possible_closing[-1] + (c == ')')) self.max_possible_closing.append(self.max_possible_closing[-1] + (c != '(')) return self.dfs(s, 0, 0)
class Solution(object): def checkValidString(self, s): """ :type s: str :rtype: bool """ lowest, highest = 0, 0 for c in s: if c == '(': lowest += 1 highest += 1 elif c == ')': if lowest > 0: lowest -= 1 highest -= 1 if highest < 0: return False else: if lowest > 0: lowest -= 1 highest += 1 return lowest == 0
Add py solution for 678. Valid Parenthesis String
Add py solution for 678. Valid Parenthesis String 678. Valid Parenthesis String: https://leetcode.com/problems/valid-parenthesis-string/ Approach2: Maintain the lowest/highest possible stack size and check if one of them is invalid O(n) time, O(1) size
Python
apache-2.0
ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode
- from collections import Counter class Solution(object): - def dfs(self, s, pos, stack): - if stack + self.min_possible_opening[-1] - self.min_possible_opening[pos] > self.max_possible_closing[-1] - self.max_possible_closing[pos]: - return False - if stack + self.max_possible_opening[-1] - self.max_possible_opening[pos] < self.min_possible_closing[-1] - self.min_possible_closing[pos]: - return False - if pos == len(s): - return not stack - if s[pos] == '(': - stack += 1 - if self.dfs(s, pos + 1, stack): - return True - stack -= 1 - elif s[pos] == ')': - if not stack: - return False - else: - stack -= 1 - if self.dfs(s, pos + 1, stack): - return True - stack += 1 - else: - if stack: # treat as ')' - stack -= 1 - if self.dfs(s, pos + 1, stack): - return True - stack += 1 - # treat as '(' - stack += 1 - if self.dfs(s, pos + 1, stack): - return True - stack -= 1 - - # treat as '' - if self.dfs(s, pos + 1, stack): - return True - return False def checkValidString(self, s): """ :type s: str :rtype: bool """ - c = Counter(s) - mpo, mpc = c['('] + c['*'], c[')'] + c['*'] + lowest, highest = 0, 0 + for c in s: + if c == '(': + lowest += 1 + highest += 1 + elif c == ')': + if lowest > 0: + lowest -= 1 + highest -= 1 + if highest < 0: + return False + else: + if lowest > 0: + lowest -= 1 + highest += 1 + return lowest == 0 - self.max_possible_opening = [0] - self.min_possible_opening = [0] - self.max_possible_closing = [0] - self.min_possible_closing = [0] - for c in s: - self.min_possible_opening.append(self.min_possible_opening[-1] + (c == '(')) - self.max_possible_opening.append(self.max_possible_opening[-1] + (c != ')')) - self.min_possible_closing.append(self.min_possible_closing[-1] + (c == ')')) - self.max_possible_closing.append(self.max_possible_closing[-1] + (c != '(')) - - return self.dfs(s, 0, 0) -
Add py solution for 678. Valid Parenthesis String
## Code Before: from collections import Counter class Solution(object): def dfs(self, s, pos, stack): if stack + self.min_possible_opening[-1] - self.min_possible_opening[pos] > self.max_possible_closing[-1] - self.max_possible_closing[pos]: return False if stack + self.max_possible_opening[-1] - self.max_possible_opening[pos] < self.min_possible_closing[-1] - self.min_possible_closing[pos]: return False if pos == len(s): return not stack if s[pos] == '(': stack += 1 if self.dfs(s, pos + 1, stack): return True stack -= 1 elif s[pos] == ')': if not stack: return False else: stack -= 1 if self.dfs(s, pos + 1, stack): return True stack += 1 else: if stack: # treat as ')' stack -= 1 if self.dfs(s, pos + 1, stack): return True stack += 1 # treat as '(' stack += 1 if self.dfs(s, pos + 1, stack): return True stack -= 1 # treat as '' if self.dfs(s, pos + 1, stack): return True return False def checkValidString(self, s): """ :type s: str :rtype: bool """ c = Counter(s) mpo, mpc = c['('] + c['*'], c[')'] + c['*'] self.max_possible_opening = [0] self.min_possible_opening = [0] self.max_possible_closing = [0] self.min_possible_closing = [0] for c in s: self.min_possible_opening.append(self.min_possible_opening[-1] + (c == '(')) self.max_possible_opening.append(self.max_possible_opening[-1] + (c != ')')) self.min_possible_closing.append(self.min_possible_closing[-1] + (c == ')')) self.max_possible_closing.append(self.max_possible_closing[-1] + (c != '(')) return self.dfs(s, 0, 0) ## Instruction: Add py solution for 678. Valid Parenthesis String ## Code After: class Solution(object): def checkValidString(self, s): """ :type s: str :rtype: bool """ lowest, highest = 0, 0 for c in s: if c == '(': lowest += 1 highest += 1 elif c == ')': if lowest > 0: lowest -= 1 highest -= 1 if highest < 0: return False else: if lowest > 0: lowest -= 1 highest += 1 return lowest == 0
- from collections import Counter class Solution(object): - def dfs(self, s, pos, stack): - if stack + self.min_possible_opening[-1] - self.min_possible_opening[pos] > self.max_possible_closing[-1] - self.max_possible_closing[pos]: - return False - if stack + self.max_possible_opening[-1] - self.max_possible_opening[pos] < self.min_possible_closing[-1] - self.min_possible_closing[pos]: - return False - if pos == len(s): - return not stack - if s[pos] == '(': - stack += 1 - if self.dfs(s, pos + 1, stack): - return True - stack -= 1 - elif s[pos] == ')': - if not stack: - return False - else: - stack -= 1 - if self.dfs(s, pos + 1, stack): - return True - stack += 1 - else: - if stack: # treat as ')' - stack -= 1 - if self.dfs(s, pos + 1, stack): - return True - stack += 1 - # treat as '(' - stack += 1 - if self.dfs(s, pos + 1, stack): - return True - stack -= 1 - - # treat as '' - if self.dfs(s, pos + 1, stack): - return True - return False def checkValidString(self, s): """ :type s: str :rtype: bool """ + lowest, highest = 0, 0 - c = Counter(s) - mpo, mpc = c['('] + c['*'], c[')'] + c['*'] - - self.max_possible_opening = [0] - self.min_possible_opening = [0] - self.max_possible_closing = [0] - self.min_possible_closing = [0] for c in s: - self.min_possible_opening.append(self.min_possible_opening[-1] + (c == '(')) - self.max_possible_opening.append(self.max_possible_opening[-1] + (c != ')')) - self.min_possible_closing.append(self.min_possible_closing[-1] + (c == ')')) - self.max_possible_closing.append(self.max_possible_closing[-1] + (c != '(')) - - return self.dfs(s, 0, 0) + if c == '(': + lowest += 1 + highest += 1 + elif c == ')': + if lowest > 0: + lowest -= 1 + highest -= 1 + if highest < 0: + return False + else: + if lowest > 0: + lowest -= 1 + highest += 1 + return lowest == 0
8d313884a52b06e2fdf9a3c0d152b9e711ff02c2
kkbox/trac/secretticket.py
kkbox/trac/secretticket.py
from trac.core import Component, implements from trac.perm import IPermissionRequestor class KKBOXSecretTicketsPolicy(Component): implements(IPermissionRequestor) def get_permission_actions(self): return ['SECRET_VIEW']
from trac.ticket.model import Ticket from trac.core import Component, implements, TracError from trac.perm import IPermissionPolicy class KKBOXSecretTicketsPolicy(Component): implements(IPermissionPolicy) def __init__(self): config = self.env.config self.sensitive_keyword = config.get('kkbox', 'sensitive_keyword').strip() def check_permission(self, action, user, resource, perm): while resource: if 'ticket' == resource.realm: break resource = resource.parent if resource and 'ticket' == resource.realm and resource.id: return self.check_ticket_access(perm, resource) def check_ticket_access(self, perm, res): if not self.sensitive_keyword: return None try: ticket = Ticket(self.env, res.id) keywords = [k.strip() for k in ticket['keywords'].split(',')] if self.sensitive_keyword in keywords: cc_list = [cc.strip() for cc in ticket['cc'].split(',')] if perm.username == ticket['reporter'] or \ perm.username == ticket['owner'] or \ perm.username in cc_list: return None else: return False except TracError as e: self.log.error(e.message) return None
Mark ticket as sensitive by keyword
Mark ticket as sensitive by keyword Set sensitive_keyword in trac.ini as following example, These ticket has "secret" keyword are viewable by reporter, owner and cc. [kkbox] sensitive_keyword = secret
Python
bsd-3-clause
KKBOX/trac-keyword-secret-ticket-plugin
+ from trac.ticket.model import Ticket - from trac.core import Component, implements + from trac.core import Component, implements, TracError - from trac.perm import IPermissionRequestor + from trac.perm import IPermissionPolicy class KKBOXSecretTicketsPolicy(Component): - implements(IPermissionRequestor) + implements(IPermissionPolicy) - def get_permission_actions(self): - return ['SECRET_VIEW'] + def __init__(self): + config = self.env.config + self.sensitive_keyword = config.get('kkbox', 'sensitive_keyword').strip() + def check_permission(self, action, user, resource, perm): + while resource: + if 'ticket' == resource.realm: + break + resource = resource.parent + + if resource and 'ticket' == resource.realm and resource.id: + return self.check_ticket_access(perm, resource) + + def check_ticket_access(self, perm, res): + if not self.sensitive_keyword: + return None + + try: + ticket = Ticket(self.env, res.id) + keywords = [k.strip() for k in ticket['keywords'].split(',')] + if self.sensitive_keyword in keywords: + cc_list = [cc.strip() for cc in ticket['cc'].split(',')] + + if perm.username == ticket['reporter'] or \ + perm.username == ticket['owner'] or \ + perm.username in cc_list: + return None + else: + return False + except TracError as e: + self.log.error(e.message) + return None +
Mark ticket as sensitive by keyword
## Code Before: from trac.core import Component, implements from trac.perm import IPermissionRequestor class KKBOXSecretTicketsPolicy(Component): implements(IPermissionRequestor) def get_permission_actions(self): return ['SECRET_VIEW'] ## Instruction: Mark ticket as sensitive by keyword ## Code After: from trac.ticket.model import Ticket from trac.core import Component, implements, TracError from trac.perm import IPermissionPolicy class KKBOXSecretTicketsPolicy(Component): implements(IPermissionPolicy) def __init__(self): config = self.env.config self.sensitive_keyword = config.get('kkbox', 'sensitive_keyword').strip() def check_permission(self, action, user, resource, perm): while resource: if 'ticket' == resource.realm: break resource = resource.parent if resource and 'ticket' == resource.realm and resource.id: return self.check_ticket_access(perm, resource) def check_ticket_access(self, perm, res): if not self.sensitive_keyword: return None try: ticket = Ticket(self.env, res.id) keywords = [k.strip() for k in ticket['keywords'].split(',')] if self.sensitive_keyword in keywords: cc_list = [cc.strip() for cc in ticket['cc'].split(',')] if perm.username == ticket['reporter'] or \ perm.username == ticket['owner'] or \ perm.username in cc_list: return None else: return False except TracError as e: self.log.error(e.message) return None
+ from trac.ticket.model import Ticket - from trac.core import Component, implements + from trac.core import Component, implements, TracError ? +++++++++++ - from trac.perm import IPermissionRequestor ? ^^^^^^^ ^ + from trac.perm import IPermissionPolicy ? ^ ^^^^ class KKBOXSecretTicketsPolicy(Component): - implements(IPermissionRequestor) ? ^^^^^^^ ^ + implements(IPermissionPolicy) ? ^ ^^^^ - def get_permission_actions(self): - return ['SECRET_VIEW'] + def __init__(self): + config = self.env.config + self.sensitive_keyword = config.get('kkbox', 'sensitive_keyword').strip() + + def check_permission(self, action, user, resource, perm): + while resource: + if 'ticket' == resource.realm: + break + resource = resource.parent + + if resource and 'ticket' == resource.realm and resource.id: + return self.check_ticket_access(perm, resource) + + def check_ticket_access(self, perm, res): + if not self.sensitive_keyword: + return None + + try: + ticket = Ticket(self.env, res.id) + keywords = [k.strip() for k in ticket['keywords'].split(',')] + if self.sensitive_keyword in keywords: + cc_list = [cc.strip() for cc in ticket['cc'].split(',')] + + if perm.username == ticket['reporter'] or \ + perm.username == ticket['owner'] or \ + perm.username in cc_list: + return None + else: + return False + except TracError as e: + self.log.error(e.message) + return None
d2971af14f57e925e1500da9ede42adb34d0dc62
tastycrust/authentication.py
tastycrust/authentication.py
class AnonymousAuthentication(object): anonymous_allowed_methods = ['GET'] def __init__(self, allowed=None): if allowed is not None: self.anonymous_allowed_methods = allowed def is_authenticated(self, request, **kwargs): allowed_methods = [s.upper() for s in self.anonymous_allowed_methods] if request.method in allowed_methods: return True return False
class AnonymousAuthentication(object): allowed_methods = ['GET'] def __init__(self, allowed=None): if allowed is not None: self.allowed_methods = allowed def is_authenticated(self, request, **kwargs): return (request.method in [s.upper() for s in self.allowed_methods])
Change some naming in AnonymousAuthentication
Change some naming in AnonymousAuthentication
Python
bsd-3-clause
uranusjr/django-tastypie-crust
class AnonymousAuthentication(object): - anonymous_allowed_methods = ['GET'] + allowed_methods = ['GET'] def __init__(self, allowed=None): if allowed is not None: - self.anonymous_allowed_methods = allowed + self.allowed_methods = allowed def is_authenticated(self, request, **kwargs): + return (request.method in [s.upper() for s in self.allowed_methods]) - allowed_methods = [s.upper() for s in self.anonymous_allowed_methods] - if request.method in allowed_methods: - return True - return False
Change some naming in AnonymousAuthentication
## Code Before: class AnonymousAuthentication(object): anonymous_allowed_methods = ['GET'] def __init__(self, allowed=None): if allowed is not None: self.anonymous_allowed_methods = allowed def is_authenticated(self, request, **kwargs): allowed_methods = [s.upper() for s in self.anonymous_allowed_methods] if request.method in allowed_methods: return True return False ## Instruction: Change some naming in AnonymousAuthentication ## Code After: class AnonymousAuthentication(object): allowed_methods = ['GET'] def __init__(self, allowed=None): if allowed is not None: self.allowed_methods = allowed def is_authenticated(self, request, **kwargs): return (request.method in [s.upper() for s in self.allowed_methods])
class AnonymousAuthentication(object): - anonymous_allowed_methods = ['GET'] ? ---------- + allowed_methods = ['GET'] def __init__(self, allowed=None): if allowed is not None: - self.anonymous_allowed_methods = allowed ? ---------- + self.allowed_methods = allowed def is_authenticated(self, request, **kwargs): - allowed_methods = [s.upper() for s in self.anonymous_allowed_methods] ? ^^^^^ ^^ - ^ ---------- + return (request.method in [s.upper() for s in self.allowed_methods]) ? ^ ^^^^^^^^^^^^^^ ^^ + - if request.method in allowed_methods: - return True - return False
38833f68daabe845650250e3edf9cb4b3cc9cb62
events/templatetags/humantime.py
events/templatetags/humantime.py
from django.template.defaultfilters import stringfilter from datetime import datetime, timedelta from django import template register = template.Library() @register.filter def event_time(start, end): today = datetime.today () result = "" if start == today: result += "aujourd'hui " else: result += "le %s " % start.strftime ("%A %d %B %Y") if start.day == end.day and start.month == end.month and start.year == end.year: result += "de %s " % start.strftime ("%H:%M") result += "à %s " % end.strftime ("%H:%M") else: result += "à %s" % start.strftime ("%H:%M") result += "jusqu'au %s" % end.strftime ("%A %d %B %Y à %H:%M") return result
from django.template.defaultfilters import stringfilter from datetime import datetime, timedelta from django import template import locale register = template.Library() @register.filter def event_time(start, end): today = datetime.today () result = "" # Hack! get the correct user local from the request loc = locale.getlocale() locale.setlocale(locale.LC_ALL, 'fr_CA.UTF8') if start == today: result += "Aujourd'hui " else: result += "Le %s " % start.strftime ("%A %d %B %Y") if start.day == end.day and start.month == end.month and start.year == end.year: result += "de %s " % start.strftime ("%H:%M") result += "à %s " % end.strftime ("%H:%M") else: result += "à %s" % start.strftime ("%H:%M") result += "jusqu'au %s" % end.strftime ("%A %d %B %Y à %H:%M") locale.setlocale(locale.LC_ALL, loc) return result
Print date in fr_CA locale
hack: Print date in fr_CA locale
Python
agpl-3.0
mlhamel/agendadulibre,mlhamel/agendadulibre,vcorreze/agendaEteAccoord,mlhamel/agendadulibre,vcorreze/agendaEteAccoord,vcorreze/agendaEteAccoord
from django.template.defaultfilters import stringfilter from datetime import datetime, timedelta from django import template + import locale register = template.Library() @register.filter def event_time(start, end): today = datetime.today () result = "" + # Hack! get the correct user local from the request + loc = locale.getlocale() + locale.setlocale(locale.LC_ALL, 'fr_CA.UTF8') + if start == today: - result += "aujourd'hui " + result += "Aujourd'hui " else: - result += "le %s " % start.strftime ("%A %d %B %Y") + result += "Le %s " % start.strftime ("%A %d %B %Y") if start.day == end.day and start.month == end.month and start.year == end.year: result += "de %s " % start.strftime ("%H:%M") result += "à %s " % end.strftime ("%H:%M") else: result += "à %s" % start.strftime ("%H:%M") result += "jusqu'au %s" % end.strftime ("%A %d %B %Y à %H:%M") + locale.setlocale(locale.LC_ALL, loc) return result
Print date in fr_CA locale
## Code Before: from django.template.defaultfilters import stringfilter from datetime import datetime, timedelta from django import template register = template.Library() @register.filter def event_time(start, end): today = datetime.today () result = "" if start == today: result += "aujourd'hui " else: result += "le %s " % start.strftime ("%A %d %B %Y") if start.day == end.day and start.month == end.month and start.year == end.year: result += "de %s " % start.strftime ("%H:%M") result += "à %s " % end.strftime ("%H:%M") else: result += "à %s" % start.strftime ("%H:%M") result += "jusqu'au %s" % end.strftime ("%A %d %B %Y à %H:%M") return result ## Instruction: Print date in fr_CA locale ## Code After: from django.template.defaultfilters import stringfilter from datetime import datetime, timedelta from django import template import locale register = template.Library() @register.filter def event_time(start, end): today = datetime.today () result = "" # Hack! get the correct user local from the request loc = locale.getlocale() locale.setlocale(locale.LC_ALL, 'fr_CA.UTF8') if start == today: result += "Aujourd'hui " else: result += "Le %s " % start.strftime ("%A %d %B %Y") if start.day == end.day and start.month == end.month and start.year == end.year: result += "de %s " % start.strftime ("%H:%M") result += "à %s " % end.strftime ("%H:%M") else: result += "à %s" % start.strftime ("%H:%M") result += "jusqu'au %s" % end.strftime ("%A %d %B %Y à %H:%M") locale.setlocale(locale.LC_ALL, loc) return result
from django.template.defaultfilters import stringfilter from datetime import datetime, timedelta from django import template + import locale register = template.Library() @register.filter def event_time(start, end): today = datetime.today () result = "" + # Hack! get the correct user local from the request + loc = locale.getlocale() + locale.setlocale(locale.LC_ALL, 'fr_CA.UTF8') + if start == today: - result += "aujourd'hui " ? ^ + result += "Aujourd'hui " ? ^ else: - result += "le %s " % start.strftime ("%A %d %B %Y") ? ^ + result += "Le %s " % start.strftime ("%A %d %B %Y") ? ^ if start.day == end.day and start.month == end.month and start.year == end.year: result += "de %s " % start.strftime ("%H:%M") result += "à %s " % end.strftime ("%H:%M") else: result += "à %s" % start.strftime ("%H:%M") result += "jusqu'au %s" % end.strftime ("%A %d %B %Y à %H:%M") + locale.setlocale(locale.LC_ALL, loc) return result
9fb8b0a72740ba155c76a5812706612b656980f4
openprocurement/auctions/flash/constants.py
openprocurement/auctions/flash/constants.py
VIEW_LOCATIONS = [ "openprocurement.auctions.flash.views", "openprocurement.auctions.core.plugins", ]
VIEW_LOCATIONS = [ "openprocurement.auctions.flash.views", ]
Add view_locations for plugins in core
Add view_locations for plugins in core
Python
apache-2.0
openprocurement/openprocurement.auctions.flash
VIEW_LOCATIONS = [ "openprocurement.auctions.flash.views", - "openprocurement.auctions.core.plugins", ]
Add view_locations for plugins in core
## Code Before: VIEW_LOCATIONS = [ "openprocurement.auctions.flash.views", "openprocurement.auctions.core.plugins", ] ## Instruction: Add view_locations for plugins in core ## Code After: VIEW_LOCATIONS = [ "openprocurement.auctions.flash.views", ]
VIEW_LOCATIONS = [ "openprocurement.auctions.flash.views", - "openprocurement.auctions.core.plugins", ]
effbffd67d52561ca1ba09201782aafc6cfc52f7
blog/posts/models.py
blog/posts/models.py
from django.db import models # Create your models here.
from django.db import models class Author(models.Model): name = models.CharField(max_length=20) email = models.EmailField(max_length=254) def __unicode__(self): return self.name class Post(models.Model): body = models.TextField() title = models.CharField(max_length=50) author = models.ForeignKey(Author) publication_date = models.DateTimeField(auto_now_add=True) def __unicode__(self): return self.title
Set up the DB schema for posts.
Set up the DB schema for posts.
Python
mit
Lukasa/minimalog
from django.db import models - # Create your models here. + class Author(models.Model): + name = models.CharField(max_length=20) + email = models.EmailField(max_length=254) + def __unicode__(self): + return self.name + + class Post(models.Model): + body = models.TextField() + title = models.CharField(max_length=50) + author = models.ForeignKey(Author) + publication_date = models.DateTimeField(auto_now_add=True) + + def __unicode__(self): + return self.title + +
Set up the DB schema for posts.
## Code Before: from django.db import models # Create your models here. ## Instruction: Set up the DB schema for posts. ## Code After: from django.db import models class Author(models.Model): name = models.CharField(max_length=20) email = models.EmailField(max_length=254) def __unicode__(self): return self.name class Post(models.Model): body = models.TextField() title = models.CharField(max_length=50) author = models.ForeignKey(Author) publication_date = models.DateTimeField(auto_now_add=True) def __unicode__(self): return self.title
from django.db import models - # Create your models here. + class Author(models.Model): + name = models.CharField(max_length=20) + email = models.EmailField(max_length=254) + + def __unicode__(self): + return self.name + + class Post(models.Model): + body = models.TextField() + title = models.CharField(max_length=50) + author = models.ForeignKey(Author) + publication_date = models.DateTimeField(auto_now_add=True) + + def __unicode__(self): + return self.title +
ea2383175456257384e625bb1113d98536b78a92
tests/test_shutil.py
tests/test_shutil.py
__author__ = 'Shyue Ping Ong' __copyright__ = 'Copyright 2014, The Materials Virtual Lab' __version__ = '0.1' __maintainer__ = 'Shyue Ping Ong' __email__ = '[email protected]' __date__ = '1/24/14' import unittest import os import shutil from monty.shutil import copy_r class CopyRTest(unittest.TestCase): def setUp(self): os.mkdir("cpr_src") with open(os.path.join("cpr_src", "test"), "w") as f: f.write("what") def test_recursive_copy(self): copy_r(".", "cpr_dst") self.assertTrue(os.path.exists(os.path.join("cpr_dst", "cpr_src", "test"))) def tearDown(self): shutil.rmtree("cpr_src") shutil.rmtree("cpr_dst") if __name__ == "__main__": unittest.main()
__author__ = 'Shyue Ping Ong' __copyright__ = 'Copyright 2014, The Materials Virtual Lab' __version__ = '0.1' __maintainer__ = 'Shyue Ping Ong' __email__ = '[email protected]' __date__ = '1/24/14' import unittest import os import shutil from monty.shutil import copy_r # class CopyRTest(unittest.TestCase): # # def setUp(self): # os.mkdir("cpr_src") # with open(os.path.join("cpr_src", "test"), "w") as f: # f.write("what") # # def test_recursive_copy(self): # copy_r(".", "cpr_dst") # self.assertTrue(os.path.exists(os.path.join("cpr_dst", "cpr_src", # "test"))) # # def tearDown(self): # shutil.rmtree("cpr_src") # shutil.rmtree("cpr_dst") if __name__ == "__main__": unittest.main()
Comment out CopyR test for now.
Comment out CopyR test for now.
Python
mit
yanikou19/monty,davidwaroquiers/monty,materialsvirtuallab/monty,davidwaroquiers/monty,materialsvirtuallab/monty,gpetretto/monty,gmatteo/monty,gmatteo/monty
__author__ = 'Shyue Ping Ong' __copyright__ = 'Copyright 2014, The Materials Virtual Lab' __version__ = '0.1' __maintainer__ = 'Shyue Ping Ong' __email__ = '[email protected]' __date__ = '1/24/14' import unittest import os import shutil from monty.shutil import copy_r - class CopyRTest(unittest.TestCase): + # class CopyRTest(unittest.TestCase): - + # - def setUp(self): + # def setUp(self): - os.mkdir("cpr_src") + # os.mkdir("cpr_src") - with open(os.path.join("cpr_src", "test"), "w") as f: + # with open(os.path.join("cpr_src", "test"), "w") as f: - f.write("what") + # f.write("what") - + # - def test_recursive_copy(self): + # def test_recursive_copy(self): - copy_r(".", "cpr_dst") + # copy_r(".", "cpr_dst") - self.assertTrue(os.path.exists(os.path.join("cpr_dst", "cpr_src", + # self.assertTrue(os.path.exists(os.path.join("cpr_dst", "cpr_src", - "test"))) + # "test"))) - + # - def tearDown(self): + # def tearDown(self): - shutil.rmtree("cpr_src") + # shutil.rmtree("cpr_src") - shutil.rmtree("cpr_dst") + # shutil.rmtree("cpr_dst") if __name__ == "__main__": unittest.main()
Comment out CopyR test for now.
## Code Before: __author__ = 'Shyue Ping Ong' __copyright__ = 'Copyright 2014, The Materials Virtual Lab' __version__ = '0.1' __maintainer__ = 'Shyue Ping Ong' __email__ = '[email protected]' __date__ = '1/24/14' import unittest import os import shutil from monty.shutil import copy_r class CopyRTest(unittest.TestCase): def setUp(self): os.mkdir("cpr_src") with open(os.path.join("cpr_src", "test"), "w") as f: f.write("what") def test_recursive_copy(self): copy_r(".", "cpr_dst") self.assertTrue(os.path.exists(os.path.join("cpr_dst", "cpr_src", "test"))) def tearDown(self): shutil.rmtree("cpr_src") shutil.rmtree("cpr_dst") if __name__ == "__main__": unittest.main() ## Instruction: Comment out CopyR test for now. ## Code After: __author__ = 'Shyue Ping Ong' __copyright__ = 'Copyright 2014, The Materials Virtual Lab' __version__ = '0.1' __maintainer__ = 'Shyue Ping Ong' __email__ = '[email protected]' __date__ = '1/24/14' import unittest import os import shutil from monty.shutil import copy_r # class CopyRTest(unittest.TestCase): # # def setUp(self): # os.mkdir("cpr_src") # with open(os.path.join("cpr_src", "test"), "w") as f: # f.write("what") # # def test_recursive_copy(self): # copy_r(".", "cpr_dst") # self.assertTrue(os.path.exists(os.path.join("cpr_dst", "cpr_src", # "test"))) # # def tearDown(self): # shutil.rmtree("cpr_src") # shutil.rmtree("cpr_dst") if __name__ == "__main__": unittest.main()
__author__ = 'Shyue Ping Ong' __copyright__ = 'Copyright 2014, The Materials Virtual Lab' __version__ = '0.1' __maintainer__ = 'Shyue Ping Ong' __email__ = '[email protected]' __date__ = '1/24/14' import unittest import os import shutil from monty.shutil import copy_r - class CopyRTest(unittest.TestCase): + # class CopyRTest(unittest.TestCase): ? ++ - + # - def setUp(self): + # def setUp(self): ? ++ - os.mkdir("cpr_src") + # os.mkdir("cpr_src") ? ++ - with open(os.path.join("cpr_src", "test"), "w") as f: + # with open(os.path.join("cpr_src", "test"), "w") as f: ? ++ - f.write("what") + # f.write("what") ? ++ - + # - def test_recursive_copy(self): + # def test_recursive_copy(self): ? ++ - copy_r(".", "cpr_dst") + # copy_r(".", "cpr_dst") ? ++ - self.assertTrue(os.path.exists(os.path.join("cpr_dst", "cpr_src", + # self.assertTrue(os.path.exists(os.path.join("cpr_dst", "cpr_src", ? ++ - "test"))) + # "test"))) ? ++ - + # - def tearDown(self): + # def tearDown(self): ? ++ - shutil.rmtree("cpr_src") + # shutil.rmtree("cpr_src") ? ++ - shutil.rmtree("cpr_dst") + # shutil.rmtree("cpr_dst") ? ++ if __name__ == "__main__": unittest.main()
12254ea15b1f761ad63095ed7244f347d42e4c85
file_encryptor/__init__.py
file_encryptor/__init__.py
from file_encryptor import (convergence, key_generators)
from file_encryptor import (convergence, key_generators) __version__ = '0.2.0'
Add copyright, license and version information.
Add copyright, license and version information.
Python
mit
Storj/file-encryptor
from file_encryptor import (convergence, key_generators) + __version__ = '0.2.0' +
Add copyright, license and version information.
## Code Before: from file_encryptor import (convergence, key_generators) ## Instruction: Add copyright, license and version information. ## Code After: from file_encryptor import (convergence, key_generators) __version__ = '0.2.0'
from file_encryptor import (convergence, key_generators) + + __version__ = '0.2.0'
a72c494c5c0f010192f39d84c13c90c0f0f8941e
sympycore/calculus/__init__.py
sympycore/calculus/__init__.py
from .algebra import Calculus, I, integrate, oo, undefined from .functions import exp, log, sqrt, sin, cos, tan, cot, pi, E Symbol = Calculus.Symbol Number = Calculus.Number Add = lambda *args: Calculus.Add(*map(Calculus.convert, args)) Mul = lambda *args: Calculus.Mul(*map(Calculus.convert, args)) Pow = lambda *args: Calculus.Pow(*map(Calculus.convert, args))
from .algebra import Calculus, I, integrate, oo, undefined from .functions import exp, log, sqrt, sin, cos, tan, cot, pi, E Symbol = Calculus.Symbol def Number(num, denom=None): n = Calculus.Number(Calculus.convert_coefficient(num)) if denom is None: return n return n / denom Add = lambda *args: Calculus.Add(*map(Calculus.convert, args)) Mul = lambda *args: Calculus.Mul(*map(Calculus.convert, args)) Pow = lambda *args: Calculus.Pow(*map(Calculus.convert, args))
Fix calculus.Number to handle floats.
Fix calculus.Number to handle floats.
Python
bsd-3-clause
pearu/sympycore,pearu/sympycore
from .algebra import Calculus, I, integrate, oo, undefined from .functions import exp, log, sqrt, sin, cos, tan, cot, pi, E Symbol = Calculus.Symbol - Number = Calculus.Number + + def Number(num, denom=None): + n = Calculus.Number(Calculus.convert_coefficient(num)) + if denom is None: + return n + return n / denom Add = lambda *args: Calculus.Add(*map(Calculus.convert, args)) Mul = lambda *args: Calculus.Mul(*map(Calculus.convert, args)) Pow = lambda *args: Calculus.Pow(*map(Calculus.convert, args))
Fix calculus.Number to handle floats.
## Code Before: from .algebra import Calculus, I, integrate, oo, undefined from .functions import exp, log, sqrt, sin, cos, tan, cot, pi, E Symbol = Calculus.Symbol Number = Calculus.Number Add = lambda *args: Calculus.Add(*map(Calculus.convert, args)) Mul = lambda *args: Calculus.Mul(*map(Calculus.convert, args)) Pow = lambda *args: Calculus.Pow(*map(Calculus.convert, args)) ## Instruction: Fix calculus.Number to handle floats. ## Code After: from .algebra import Calculus, I, integrate, oo, undefined from .functions import exp, log, sqrt, sin, cos, tan, cot, pi, E Symbol = Calculus.Symbol def Number(num, denom=None): n = Calculus.Number(Calculus.convert_coefficient(num)) if denom is None: return n return n / denom Add = lambda *args: Calculus.Add(*map(Calculus.convert, args)) Mul = lambda *args: Calculus.Mul(*map(Calculus.convert, args)) Pow = lambda *args: Calculus.Pow(*map(Calculus.convert, args))
from .algebra import Calculus, I, integrate, oo, undefined from .functions import exp, log, sqrt, sin, cos, tan, cot, pi, E Symbol = Calculus.Symbol - Number = Calculus.Number + + def Number(num, denom=None): + n = Calculus.Number(Calculus.convert_coefficient(num)) + if denom is None: + return n + return n / denom Add = lambda *args: Calculus.Add(*map(Calculus.convert, args)) Mul = lambda *args: Calculus.Mul(*map(Calculus.convert, args)) Pow = lambda *args: Calculus.Pow(*map(Calculus.convert, args))
5b2f835f377481c6c217dd886f28c1bb400db553
linter.py
linter.py
"""This module exports the CFLint plugin class.""" from SublimeLinter.lint import Linter, util class CFLint(Linter): """Provides an interface to CFLint.""" syntax = ('coldfusioncfc', 'html+cfml') cmd = 'cflint -q -text -file' version_args = '-version' version_re = r'\b(?P<version>\d+\.\d+\.\d+)' version_requirement = '>= 0.1.8' regex = r'''(?xi) # The severity ^\s*Severity:(?:(?P<warning>(INFO|WARNING))|(?P<error>ERROR))\s*$\r?\n # The file name ^.*$\r?\n # The Message Code ^.*$\r?\n # The Column number ^\s*Column:(?P<col>\d+)\s*$\r?\n # The Line number ^\s*Line:(?P<line>\d+)\s*$\r?\n # The Error Message ^\s*Message:(?P<message>.+)$\r?\n ''' multiline = True error_stream = util.STREAM_STDOUT word_re = r'^<?(#?[-\w]+)' tempfile_suffix = '-'
"""This module exports the CFLint plugin class.""" from SublimeLinter.lint import Linter, util class CFLint(Linter): """Provides an interface to CFLint.""" syntax = ('coldfusioncfc', 'html+cfml') cmd = 'cflint -file @ -q -text' version_args = '-version' version_re = r'\b(?P<version>\d+\.\d+\.\d+)' version_requirement = '>= 0.1.8' regex = r'''(?xi) # The severity ^\s*Severity:(?:(?P<warning>(INFO|WARNING))|(?P<error>ERROR))\s*$\r?\n # The file name ^.*$\r?\n # The Message Code ^.*$\r?\n # The Column number ^\s*Column:(?P<col>\d+)\s*$\r?\n # The Line number ^\s*Line:(?P<line>\d+)\s*$\r?\n # The Error Message ^\s*Message:(?P<message>.+)$\r?\n ''' multiline = True error_stream = util.STREAM_STDOUT word_re = r'^<?(#?[-\w]+)' tempfile_suffix = '-'
Update cmd to allow args
Update cmd to allow args Change the cmd string so that the "args" argument can be used in linter settings. The way it was any args would be inserted between the '-file' and the filename which broke the '-file' argument. For this config, "cflint": { "@disable": false, "args": ['-configfile c:\cflintrc.xml'], "excludes": [] } The results are: old: cflint -q -text -file -configfile c:\cflintrc.xml index.cfm new: cflint -file index.cfm -q -text -configfile c:\cflintrc.xml
Python
mit
ckaznocha/SublimeLinter-contrib-CFLint
"""This module exports the CFLint plugin class.""" from SublimeLinter.lint import Linter, util class CFLint(Linter): """Provides an interface to CFLint.""" syntax = ('coldfusioncfc', 'html+cfml') - cmd = 'cflint -q -text -file' + cmd = 'cflint -file @ -q -text' version_args = '-version' version_re = r'\b(?P<version>\d+\.\d+\.\d+)' version_requirement = '>= 0.1.8' regex = r'''(?xi) # The severity ^\s*Severity:(?:(?P<warning>(INFO|WARNING))|(?P<error>ERROR))\s*$\r?\n # The file name ^.*$\r?\n # The Message Code ^.*$\r?\n # The Column number ^\s*Column:(?P<col>\d+)\s*$\r?\n # The Line number ^\s*Line:(?P<line>\d+)\s*$\r?\n # The Error Message ^\s*Message:(?P<message>.+)$\r?\n ''' multiline = True error_stream = util.STREAM_STDOUT word_re = r'^<?(#?[-\w]+)' tempfile_suffix = '-'
Update cmd to allow args
## Code Before: """This module exports the CFLint plugin class.""" from SublimeLinter.lint import Linter, util class CFLint(Linter): """Provides an interface to CFLint.""" syntax = ('coldfusioncfc', 'html+cfml') cmd = 'cflint -q -text -file' version_args = '-version' version_re = r'\b(?P<version>\d+\.\d+\.\d+)' version_requirement = '>= 0.1.8' regex = r'''(?xi) # The severity ^\s*Severity:(?:(?P<warning>(INFO|WARNING))|(?P<error>ERROR))\s*$\r?\n # The file name ^.*$\r?\n # The Message Code ^.*$\r?\n # The Column number ^\s*Column:(?P<col>\d+)\s*$\r?\n # The Line number ^\s*Line:(?P<line>\d+)\s*$\r?\n # The Error Message ^\s*Message:(?P<message>.+)$\r?\n ''' multiline = True error_stream = util.STREAM_STDOUT word_re = r'^<?(#?[-\w]+)' tempfile_suffix = '-' ## Instruction: Update cmd to allow args ## Code After: """This module exports the CFLint plugin class.""" from SublimeLinter.lint import Linter, util class CFLint(Linter): """Provides an interface to CFLint.""" syntax = ('coldfusioncfc', 'html+cfml') cmd = 'cflint -file @ -q -text' version_args = '-version' version_re = r'\b(?P<version>\d+\.\d+\.\d+)' version_requirement = '>= 0.1.8' regex = r'''(?xi) # The severity ^\s*Severity:(?:(?P<warning>(INFO|WARNING))|(?P<error>ERROR))\s*$\r?\n # The file name ^.*$\r?\n # The Message Code ^.*$\r?\n # The Column number ^\s*Column:(?P<col>\d+)\s*$\r?\n # The Line number ^\s*Line:(?P<line>\d+)\s*$\r?\n # The Error Message ^\s*Message:(?P<message>.+)$\r?\n ''' multiline = True error_stream = util.STREAM_STDOUT word_re = r'^<?(#?[-\w]+)' tempfile_suffix = '-'
"""This module exports the CFLint plugin class.""" from SublimeLinter.lint import Linter, util class CFLint(Linter): """Provides an interface to CFLint.""" syntax = ('coldfusioncfc', 'html+cfml') - cmd = 'cflint -q -text -file' ? ------ + cmd = 'cflint -file @ -q -text' ? ++++++++ version_args = '-version' version_re = r'\b(?P<version>\d+\.\d+\.\d+)' version_requirement = '>= 0.1.8' regex = r'''(?xi) # The severity ^\s*Severity:(?:(?P<warning>(INFO|WARNING))|(?P<error>ERROR))\s*$\r?\n # The file name ^.*$\r?\n # The Message Code ^.*$\r?\n # The Column number ^\s*Column:(?P<col>\d+)\s*$\r?\n # The Line number ^\s*Line:(?P<line>\d+)\s*$\r?\n # The Error Message ^\s*Message:(?P<message>.+)$\r?\n ''' multiline = True error_stream = util.STREAM_STDOUT word_re = r'^<?(#?[-\w]+)' tempfile_suffix = '-'
66212e51341562f156353a0ae195d15b0d22b21b
scripts/import.py
scripts/import.py
import sys import os import requests import multiprocessing def list_files(directory): for root, subdirs, files in os.walk(directory): print("ROOT: {}".format(root)) for file in files: yield os.path.abspath(os.path.join(root, file)) for subdir in subdirs: for subfile in list_files(subdir): yield subfile if __name__ == "__main__": _, base_url, access_token, listing_dir = sys.argv endpoint = "{}/services".format(base_url) print("Base URL: {}".format(base_url)) print("Access token: {}".format(access_token)) print("Listing dir: {}".format(listing_dir)) def post_file(file_path): with open(file_path) as f: response = requests.post( endpoint, data='{"services":%s}' % f.read(), headers={ "content-type": "application/json", "authorization": "Bearer {}".format(access_token), }) return response pool = multiprocessing.Pool(10) for result in pool.imap(post_file, list_files(listing_dir)): print(result)
import sys import json import os import requests import multiprocessing def list_files(directory): for root, subdirs, files in os.walk(directory): print("ROOT: {}".format(root)) for file in files: yield os.path.abspath(os.path.join(root, file)) for subdir in subdirs: for subfile in list_files(subdir): yield subfile if __name__ == "__main__": _, base_url, access_token, listing_dir = sys.argv endpoint = "{}/services".format(base_url) print("Base URL: {}".format(base_url)) print("Access token: {}".format(access_token)) print("Listing dir: {}".format(listing_dir)) def put_file(file_path): with open(file_path) as f: data = json.load(f) data = {'services': data} url = '{}/{}'.format(endpoint, data['services']['id']) response = requests.put( url, data=json.dumps(data), headers={ "content-type": "application/json", "authorization": "Bearer {}".format(access_token), }) return response pool = multiprocessing.Pool(10) for result in pool.imap(put_file, list_files(listing_dir)): print(result)
PUT services rather than POSTing them
PUT services rather than POSTing them
Python
mit
RichardKnop/digitalmarketplace-api,RichardKnop/digitalmarketplace-api,RichardKnop/digitalmarketplace-api,mtekel/digitalmarketplace-api,mtekel/digitalmarketplace-api,RichardKnop/digitalmarketplace-api,alphagov/digitalmarketplace-api,mtekel/digitalmarketplace-api,alphagov/digitalmarketplace-api,mtekel/digitalmarketplace-api,alphagov/digitalmarketplace-api
import sys + import json import os import requests import multiprocessing def list_files(directory): for root, subdirs, files in os.walk(directory): print("ROOT: {}".format(root)) for file in files: yield os.path.abspath(os.path.join(root, file)) for subdir in subdirs: for subfile in list_files(subdir): yield subfile if __name__ == "__main__": _, base_url, access_token, listing_dir = sys.argv endpoint = "{}/services".format(base_url) print("Base URL: {}".format(base_url)) print("Access token: {}".format(access_token)) print("Listing dir: {}".format(listing_dir)) - def post_file(file_path): + def put_file(file_path): with open(file_path) as f: + data = json.load(f) + data = {'services': data} + url = '{}/{}'.format(endpoint, data['services']['id']) - response = requests.post( + response = requests.put( - endpoint, + url, - data='{"services":%s}' % f.read(), + data=json.dumps(data), headers={ "content-type": "application/json", "authorization": "Bearer {}".format(access_token), }) return response pool = multiprocessing.Pool(10) - for result in pool.imap(post_file, list_files(listing_dir)): + for result in pool.imap(put_file, list_files(listing_dir)): print(result)
PUT services rather than POSTing them
## Code Before: import sys import os import requests import multiprocessing def list_files(directory): for root, subdirs, files in os.walk(directory): print("ROOT: {}".format(root)) for file in files: yield os.path.abspath(os.path.join(root, file)) for subdir in subdirs: for subfile in list_files(subdir): yield subfile if __name__ == "__main__": _, base_url, access_token, listing_dir = sys.argv endpoint = "{}/services".format(base_url) print("Base URL: {}".format(base_url)) print("Access token: {}".format(access_token)) print("Listing dir: {}".format(listing_dir)) def post_file(file_path): with open(file_path) as f: response = requests.post( endpoint, data='{"services":%s}' % f.read(), headers={ "content-type": "application/json", "authorization": "Bearer {}".format(access_token), }) return response pool = multiprocessing.Pool(10) for result in pool.imap(post_file, list_files(listing_dir)): print(result) ## Instruction: PUT services rather than POSTing them ## Code After: import sys import json import os import requests import multiprocessing def list_files(directory): for root, subdirs, files in os.walk(directory): print("ROOT: {}".format(root)) for file in files: yield os.path.abspath(os.path.join(root, file)) for subdir in subdirs: for subfile in list_files(subdir): yield subfile if __name__ == "__main__": _, base_url, access_token, listing_dir = sys.argv endpoint = "{}/services".format(base_url) print("Base URL: {}".format(base_url)) print("Access token: {}".format(access_token)) print("Listing dir: {}".format(listing_dir)) def put_file(file_path): with open(file_path) as f: data = json.load(f) data = {'services': data} url = '{}/{}'.format(endpoint, data['services']['id']) response = requests.put( url, data=json.dumps(data), headers={ "content-type": "application/json", "authorization": "Bearer {}".format(access_token), }) return response pool = multiprocessing.Pool(10) for result in pool.imap(put_file, list_files(listing_dir)): print(result)
import sys + import json import os import requests import multiprocessing def list_files(directory): for root, subdirs, files in os.walk(directory): print("ROOT: {}".format(root)) for file in files: yield os.path.abspath(os.path.join(root, file)) for subdir in subdirs: for subfile in list_files(subdir): yield subfile if __name__ == "__main__": _, base_url, access_token, listing_dir = sys.argv endpoint = "{}/services".format(base_url) print("Base URL: {}".format(base_url)) print("Access token: {}".format(access_token)) print("Listing dir: {}".format(listing_dir)) - def post_file(file_path): ? ^^ + def put_file(file_path): ? ^ with open(file_path) as f: + data = json.load(f) + data = {'services': data} + url = '{}/{}'.format(endpoint, data['services']['id']) - response = requests.post( ? ^^ + response = requests.put( ? ^ - endpoint, ? ^^^^^^^^ + url, ? ^^^ - data='{"services":%s}' % f.read(), + data=json.dumps(data), headers={ "content-type": "application/json", "authorization": "Bearer {}".format(access_token), }) return response pool = multiprocessing.Pool(10) - for result in pool.imap(post_file, list_files(listing_dir)): ? ^^ + for result in pool.imap(put_file, list_files(listing_dir)): ? ^ print(result)
8ae5079a2963a356a6073a245305fff98fcc7461
dbaas/logical/tasks.py
dbaas/logical/tasks.py
from logical.models import Database from dbaas.celery import app from util.decorators import only_one @app.task @only_one(key="purgequarantinekey", timeout=20) def purge_quarantine(): Database.purge_quarantine() return
from logical.models import Database from system.models import Configuration from datetime import date, timedelta from dbaas.celery import app from util.decorators import only_one from util.providers import destroy_infra from simple_audit.models import AuditRequest from notification.models import TaskHistory from account.models import AccountUser import logging LOG = logging.getLogger(__name__) @app.task(bind=True) @only_one(key="purgequarantinekey", timeout=1000) def purge_quarantine(self,): user = AccountUser.objects.get(username='admin') AuditRequest.new_request("purge_quarantine", user, "localhost") try: task_history = TaskHistory.register(request=self.request, user=user) LOG.info("id: %s | task: %s | kwargs: %s | args: %s" % ( self.request.id, self.request.task, self.request.kwargs, str(self.request.args))) quarantine_time = Configuration.get_by_name_as_int( 'quarantine_retention_days') quarantine_time_dt = date.today() - timedelta(days=quarantine_time) databases = Database.objects.filter( is_in_quarantine=True, quarantine_dt__lte=quarantine_time_dt) for database in databases: if database.plan.provider == database.plan.CLOUDSTACK: databaseinfra = database.databaseinfra destroy_infra(databaseinfra=databaseinfra, task=task_history) else: database.delete() LOG.info("The database %s was deleted, because it was set to quarentine %d days ago" % ( database.name, quarantine_time)) task_history.update_status_for(TaskHistory.STATUS_SUCCESS, details='Databases destroyed successfully') return except Exception: task_history.update_status_for(TaskHistory.STATUS_ERROR, details="Error") return finally: AuditRequest.cleanup_request()
Change purge quarantine to deal with cloudstack databases
Change purge quarantine to deal with cloudstack databases
Python
bsd-3-clause
globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service
from logical.models import Database - + from system.models import Configuration + from datetime import date, timedelta from dbaas.celery import app from util.decorators import only_one + from util.providers import destroy_infra + from simple_audit.models import AuditRequest + from notification.models import TaskHistory + from account.models import AccountUser + import logging + + LOG = logging.getLogger(__name__) + + @app.task(bind=True) + @only_one(key="purgequarantinekey", timeout=1000) + def purge_quarantine(self,): + user = AccountUser.objects.get(username='admin') + AuditRequest.new_request("purge_quarantine", user, "localhost") + try: + + task_history = TaskHistory.register(request=self.request, user=user) + + LOG.info("id: %s | task: %s | kwargs: %s | args: %s" % ( + self.request.id, self.request.task, self.request.kwargs, str(self.request.args))) + quarantine_time = Configuration.get_by_name_as_int( + 'quarantine_retention_days') + quarantine_time_dt = date.today() - timedelta(days=quarantine_time) + + databases = Database.objects.filter( + is_in_quarantine=True, quarantine_dt__lte=quarantine_time_dt) + + for database in databases: + if database.plan.provider == database.plan.CLOUDSTACK: + databaseinfra = database.databaseinfra + + destroy_infra(databaseinfra=databaseinfra, task=task_history) + else: + database.delete() + + LOG.info("The database %s was deleted, because it was set to quarentine %d days ago" % ( + database.name, quarantine_time)) + task_history.update_status_for(TaskHistory.STATUS_SUCCESS, details='Databases destroyed successfully') - @app.task - @only_one(key="purgequarantinekey", timeout=20) - def purge_quarantine(): - Database.purge_quarantine() - return + return + except Exception: + task_history.update_status_for(TaskHistory.STATUS_ERROR, details="Error") + return + finally: + AuditRequest.cleanup_request() +
Change purge quarantine to deal with cloudstack databases
## Code Before: from logical.models import Database from dbaas.celery import app from util.decorators import only_one @app.task @only_one(key="purgequarantinekey", timeout=20) def purge_quarantine(): Database.purge_quarantine() return ## Instruction: Change purge quarantine to deal with cloudstack databases ## Code After: from logical.models import Database from system.models import Configuration from datetime import date, timedelta from dbaas.celery import app from util.decorators import only_one from util.providers import destroy_infra from simple_audit.models import AuditRequest from notification.models import TaskHistory from account.models import AccountUser import logging LOG = logging.getLogger(__name__) @app.task(bind=True) @only_one(key="purgequarantinekey", timeout=1000) def purge_quarantine(self,): user = AccountUser.objects.get(username='admin') AuditRequest.new_request("purge_quarantine", user, "localhost") try: task_history = TaskHistory.register(request=self.request, user=user) LOG.info("id: %s | task: %s | kwargs: %s | args: %s" % ( self.request.id, self.request.task, self.request.kwargs, str(self.request.args))) quarantine_time = Configuration.get_by_name_as_int( 'quarantine_retention_days') quarantine_time_dt = date.today() - timedelta(days=quarantine_time) databases = Database.objects.filter( is_in_quarantine=True, quarantine_dt__lte=quarantine_time_dt) for database in databases: if database.plan.provider == database.plan.CLOUDSTACK: databaseinfra = database.databaseinfra destroy_infra(databaseinfra=databaseinfra, task=task_history) else: database.delete() LOG.info("The database %s was deleted, because it was set to quarentine %d days ago" % ( database.name, quarantine_time)) task_history.update_status_for(TaskHistory.STATUS_SUCCESS, details='Databases destroyed successfully') return except Exception: task_history.update_status_for(TaskHistory.STATUS_ERROR, details="Error") return finally: AuditRequest.cleanup_request()
from logical.models import Database - + from system.models import Configuration + from datetime import date, timedelta from dbaas.celery import app from util.decorators import only_one + from util.providers import destroy_infra + from simple_audit.models import AuditRequest + from notification.models import TaskHistory + from account.models import AccountUser + import logging + + LOG = logging.getLogger(__name__) + + @app.task(bind=True) + @only_one(key="purgequarantinekey", timeout=1000) + def purge_quarantine(self,): + user = AccountUser.objects.get(username='admin') + AuditRequest.new_request("purge_quarantine", user, "localhost") + try: + + task_history = TaskHistory.register(request=self.request, user=user) + + LOG.info("id: %s | task: %s | kwargs: %s | args: %s" % ( + self.request.id, self.request.task, self.request.kwargs, str(self.request.args))) + quarantine_time = Configuration.get_by_name_as_int( + 'quarantine_retention_days') + quarantine_time_dt = date.today() - timedelta(days=quarantine_time) + + databases = Database.objects.filter( + is_in_quarantine=True, quarantine_dt__lte=quarantine_time_dt) + + for database in databases: + if database.plan.provider == database.plan.CLOUDSTACK: + databaseinfra = database.databaseinfra + + destroy_infra(databaseinfra=databaseinfra, task=task_history) + else: + database.delete() + + LOG.info("The database %s was deleted, because it was set to quarentine %d days ago" % ( + database.name, quarantine_time)) + task_history.update_status_for(TaskHistory.STATUS_SUCCESS, details='Databases destroyed successfully') - @app.task - @only_one(key="purgequarantinekey", timeout=20) - def purge_quarantine(): - Database.purge_quarantine() - return + return ? ++++ + + except Exception: + task_history.update_status_for(TaskHistory.STATUS_ERROR, details="Error") + return + finally: + AuditRequest.cleanup_request()
fe0d86df9c4be9d33a461578b71c43865f79c715
tests/builtins/test_input.py
tests/builtins/test_input.py
from .. utils import TranspileTestCase, BuiltinFunctionTestCase class InputTests(TranspileTestCase): pass class BuiltinInputFunctionTests(BuiltinFunctionTestCase, TranspileTestCase): functions = ["input"] not_implemented = [ 'test_bool', 'test_bytearray', 'test_bytes', 'test_class', 'test_complex', 'test_dict', 'test_float', 'test_frozenset', 'test_int', 'test_list', 'test_set', 'test_str', 'test_tuple', ]
from .. utils import TranspileTestCase, BuiltinFunctionTestCase class InputTests(TranspileTestCase): pass # class BuiltinInputFunctionTests(BuiltinFunctionTestCase, TranspileTestCase): # functions = ["input"] # not_implemented = [ # 'test_bool', # 'test_bytearray', # 'test_bytes', # 'test_class', # 'test_complex', # 'test_dict', # 'test_float', # 'test_frozenset', # 'test_int', # 'test_list', # 'test_set', # 'test_str', # 'test_tuple', # ]
Disable builtin tests for input() as it hangs
Disable builtin tests for input() as it hangs
Python
bsd-3-clause
cflee/voc,Felix5721/voc,ASP1234/voc,cflee/voc,glasnt/voc,ASP1234/voc,glasnt/voc,freakboy3742/voc,freakboy3742/voc,gEt-rIgHt-jR/voc,Felix5721/voc,gEt-rIgHt-jR/voc,pombredanne/voc,pombredanne/voc
from .. utils import TranspileTestCase, BuiltinFunctionTestCase class InputTests(TranspileTestCase): pass - class BuiltinInputFunctionTests(BuiltinFunctionTestCase, TranspileTestCase): + # class BuiltinInputFunctionTests(BuiltinFunctionTestCase, TranspileTestCase): - functions = ["input"] + # functions = ["input"] - not_implemented = [ + # not_implemented = [ - 'test_bool', + # 'test_bool', - 'test_bytearray', + # 'test_bytearray', - 'test_bytes', + # 'test_bytes', - 'test_class', + # 'test_class', - 'test_complex', + # 'test_complex', - 'test_dict', + # 'test_dict', - 'test_float', + # 'test_float', - 'test_frozenset', + # 'test_frozenset', - 'test_int', + # 'test_int', - 'test_list', + # 'test_list', - 'test_set', + # 'test_set', - 'test_str', + # 'test_str', - 'test_tuple', + # 'test_tuple', - ] + # ]
Disable builtin tests for input() as it hangs
## Code Before: from .. utils import TranspileTestCase, BuiltinFunctionTestCase class InputTests(TranspileTestCase): pass class BuiltinInputFunctionTests(BuiltinFunctionTestCase, TranspileTestCase): functions = ["input"] not_implemented = [ 'test_bool', 'test_bytearray', 'test_bytes', 'test_class', 'test_complex', 'test_dict', 'test_float', 'test_frozenset', 'test_int', 'test_list', 'test_set', 'test_str', 'test_tuple', ] ## Instruction: Disable builtin tests for input() as it hangs ## Code After: from .. utils import TranspileTestCase, BuiltinFunctionTestCase class InputTests(TranspileTestCase): pass # class BuiltinInputFunctionTests(BuiltinFunctionTestCase, TranspileTestCase): # functions = ["input"] # not_implemented = [ # 'test_bool', # 'test_bytearray', # 'test_bytes', # 'test_class', # 'test_complex', # 'test_dict', # 'test_float', # 'test_frozenset', # 'test_int', # 'test_list', # 'test_set', # 'test_str', # 'test_tuple', # ]
from .. utils import TranspileTestCase, BuiltinFunctionTestCase class InputTests(TranspileTestCase): pass - class BuiltinInputFunctionTests(BuiltinFunctionTestCase, TranspileTestCase): + # class BuiltinInputFunctionTests(BuiltinFunctionTestCase, TranspileTestCase): ? ++ - functions = ["input"] + # functions = ["input"] ? ++ - not_implemented = [ + # not_implemented = [ ? ++ - 'test_bool', + # 'test_bool', ? ++ - 'test_bytearray', + # 'test_bytearray', ? ++ - 'test_bytes', + # 'test_bytes', ? ++ - 'test_class', + # 'test_class', ? ++ - 'test_complex', + # 'test_complex', ? ++ - 'test_dict', + # 'test_dict', ? ++ - 'test_float', + # 'test_float', ? ++ - 'test_frozenset', + # 'test_frozenset', ? ++ - 'test_int', + # 'test_int', ? ++ - 'test_list', + # 'test_list', ? ++ - 'test_set', + # 'test_set', ? ++ - 'test_str', + # 'test_str', ? ++ - 'test_tuple', + # 'test_tuple', ? ++ - ] + # ] ? ++
22f94c5bb08ee6ae816109bdc06eab9e1974884a
app/models/cnes_professional.py
app/models/cnes_professional.py
from sqlalchemy import Column, Integer, String, func from app import db class CnesProfessional(db.Model): __tablename__ = 'cnes_professional' year = Column(Integer, primary_key=True) region = Column(String(1), primary_key=True) mesoregion = Column(String(4), primary_key=True) microregion = Column(String(5), primary_key=True) state = Column(String(2), primary_key=True) municipality = Column(String(7), primary_key=True) cnes = Column(String(7), primary_key=True) @classmethod def dimensions(cls): return [ 'year', 'region', 'mesoregion', 'microregion', 'state', 'municipality', ] @classmethod def aggregate(cls, value): return { 'professionals': func.count(cls.cnes) }[value] @classmethod def values(cls): return ['professionals']
from sqlalchemy import Column, Integer, String, func from app import db class CnesProfessional(db.Model): __tablename__ = 'cnes_professional' year = Column(Integer, primary_key=True) region = Column(String(1), primary_key=True) mesoregion = Column(String(4), primary_key=True) microregion = Column(String(5), primary_key=True) state = Column(String(2), primary_key=True) municipality = Column(String(7), primary_key=True) cnes = Column(String(7), primary_key=True) cbo = Column(String(2), primary_key=True) @classmethod def dimensions(cls): return [ 'year', 'region', 'mesoregion', 'microregion', 'state', 'municipality', 'cbo', ] @classmethod def aggregate(cls, value): return { 'professionals': func.count(cls.cnes) }[value] @classmethod def values(cls): return ['professionals']
Add CBO column to cnes professional
Add CBO column to cnes professional
Python
mit
daniel1409/dataviva-api,DataViva/dataviva-api
from sqlalchemy import Column, Integer, String, func from app import db class CnesProfessional(db.Model): __tablename__ = 'cnes_professional' year = Column(Integer, primary_key=True) region = Column(String(1), primary_key=True) mesoregion = Column(String(4), primary_key=True) microregion = Column(String(5), primary_key=True) state = Column(String(2), primary_key=True) municipality = Column(String(7), primary_key=True) cnes = Column(String(7), primary_key=True) + cbo = Column(String(2), primary_key=True) @classmethod def dimensions(cls): return [ 'year', 'region', 'mesoregion', 'microregion', 'state', 'municipality', + 'cbo', ] @classmethod def aggregate(cls, value): return { 'professionals': func.count(cls.cnes) }[value] @classmethod def values(cls): return ['professionals']
Add CBO column to cnes professional
## Code Before: from sqlalchemy import Column, Integer, String, func from app import db class CnesProfessional(db.Model): __tablename__ = 'cnes_professional' year = Column(Integer, primary_key=True) region = Column(String(1), primary_key=True) mesoregion = Column(String(4), primary_key=True) microregion = Column(String(5), primary_key=True) state = Column(String(2), primary_key=True) municipality = Column(String(7), primary_key=True) cnes = Column(String(7), primary_key=True) @classmethod def dimensions(cls): return [ 'year', 'region', 'mesoregion', 'microregion', 'state', 'municipality', ] @classmethod def aggregate(cls, value): return { 'professionals': func.count(cls.cnes) }[value] @classmethod def values(cls): return ['professionals'] ## Instruction: Add CBO column to cnes professional ## Code After: from sqlalchemy import Column, Integer, String, func from app import db class CnesProfessional(db.Model): __tablename__ = 'cnes_professional' year = Column(Integer, primary_key=True) region = Column(String(1), primary_key=True) mesoregion = Column(String(4), primary_key=True) microregion = Column(String(5), primary_key=True) state = Column(String(2), primary_key=True) municipality = Column(String(7), primary_key=True) cnes = Column(String(7), primary_key=True) cbo = Column(String(2), primary_key=True) @classmethod def dimensions(cls): return [ 'year', 'region', 'mesoregion', 'microregion', 'state', 'municipality', 'cbo', ] @classmethod def aggregate(cls, value): return { 'professionals': func.count(cls.cnes) }[value] @classmethod def values(cls): return ['professionals']
from sqlalchemy import Column, Integer, String, func from app import db class CnesProfessional(db.Model): __tablename__ = 'cnes_professional' year = Column(Integer, primary_key=True) region = Column(String(1), primary_key=True) mesoregion = Column(String(4), primary_key=True) microregion = Column(String(5), primary_key=True) state = Column(String(2), primary_key=True) municipality = Column(String(7), primary_key=True) cnes = Column(String(7), primary_key=True) + cbo = Column(String(2), primary_key=True) @classmethod def dimensions(cls): return [ 'year', 'region', 'mesoregion', 'microregion', 'state', 'municipality', + 'cbo', ] @classmethod def aggregate(cls, value): return { 'professionals': func.count(cls.cnes) }[value] @classmethod def values(cls): return ['professionals']
ee494fd205c58029960d4a5702f59418c8110eb3
django_iceberg/context_processors.py
django_iceberg/context_processors.py
import logging logger = logging.getLogger(__name__) from django_iceberg.auth_utils import init_iceberg, get_conf_class def iceberg_settings(request): """ Defines some template variables in context """ conf = get_conf_class(request) if not conf: ICEBERG_API_URL_FULL = "https://api.iceberg.technology" ICEBERG_CORS = "https://api.iceberg.technology/cors/" iceberg_env = 'prod' else: iceberg_env = getattr(conf, 'ICEBERG_ENV', 'prod') ICEBERG_API_URL_FULL = conf.ICEBERG_API_URL_FULL ICEBERG_CORS = conf.ICEBERG_CORS res = { "ICEBERG_ENV": iceberg_env, "ICEBERG_API_URL": ICEBERG_API_URL_FULL, "ICEBERG_CORS": ICEBERG_CORS, } if request.user.is_authenticated(): res['access_token'] = init_iceberg(request).access_token else: res['ICEBERG_ENV'] = None res['access_token'] = "anonymous" return res
import logging logger = logging.getLogger(__name__) from django_iceberg.auth_utils import init_iceberg, get_conf_class def iceberg_settings(request): """ Defines some template variables in context """ conf = get_conf_class(request) if not conf: ICEBERG_API_URL_FULL = "https://api.iceberg.technology" ICEBERG_CORS = "https://api.iceberg.technology/cors/" iceberg_env = 'prod' else: iceberg_env = getattr(conf, 'ICEBERG_ENV', 'prod') ICEBERG_API_URL_FULL = conf.ICEBERG_API_URL_FULL ICEBERG_CORS = conf.ICEBERG_CORS res = { "ICEBERG_ENV": iceberg_env, "ICEBERG_API_URL": ICEBERG_API_URL_FULL, "ICEBERG_CORS": ICEBERG_CORS, } if request.user.is_authenticated(): res['access_token'] = init_iceberg(request).access_token res['username'] = init_iceberg(request).username else: res['ICEBERG_ENV'] = None res['access_token'] = "anonymous" return res
Add username to context in iceberg_settings context processor
Add username to context in iceberg_settings context processor
Python
mit
izberg-marketplace/django-izberg,izberg-marketplace/django-izberg,Iceberg-Marketplace/django-iceberg,Iceberg-Marketplace/django-iceberg
import logging logger = logging.getLogger(__name__) from django_iceberg.auth_utils import init_iceberg, get_conf_class def iceberg_settings(request): """ Defines some template variables in context """ conf = get_conf_class(request) if not conf: ICEBERG_API_URL_FULL = "https://api.iceberg.technology" ICEBERG_CORS = "https://api.iceberg.technology/cors/" iceberg_env = 'prod' else: iceberg_env = getattr(conf, 'ICEBERG_ENV', 'prod') ICEBERG_API_URL_FULL = conf.ICEBERG_API_URL_FULL ICEBERG_CORS = conf.ICEBERG_CORS res = { "ICEBERG_ENV": iceberg_env, "ICEBERG_API_URL": ICEBERG_API_URL_FULL, "ICEBERG_CORS": ICEBERG_CORS, } if request.user.is_authenticated(): res['access_token'] = init_iceberg(request).access_token + res['username'] = init_iceberg(request).username else: res['ICEBERG_ENV'] = None res['access_token'] = "anonymous" return res
Add username to context in iceberg_settings context processor
## Code Before: import logging logger = logging.getLogger(__name__) from django_iceberg.auth_utils import init_iceberg, get_conf_class def iceberg_settings(request): """ Defines some template variables in context """ conf = get_conf_class(request) if not conf: ICEBERG_API_URL_FULL = "https://api.iceberg.technology" ICEBERG_CORS = "https://api.iceberg.technology/cors/" iceberg_env = 'prod' else: iceberg_env = getattr(conf, 'ICEBERG_ENV', 'prod') ICEBERG_API_URL_FULL = conf.ICEBERG_API_URL_FULL ICEBERG_CORS = conf.ICEBERG_CORS res = { "ICEBERG_ENV": iceberg_env, "ICEBERG_API_URL": ICEBERG_API_URL_FULL, "ICEBERG_CORS": ICEBERG_CORS, } if request.user.is_authenticated(): res['access_token'] = init_iceberg(request).access_token else: res['ICEBERG_ENV'] = None res['access_token'] = "anonymous" return res ## Instruction: Add username to context in iceberg_settings context processor ## Code After: import logging logger = logging.getLogger(__name__) from django_iceberg.auth_utils import init_iceberg, get_conf_class def iceberg_settings(request): """ Defines some template variables in context """ conf = get_conf_class(request) if not conf: ICEBERG_API_URL_FULL = "https://api.iceberg.technology" ICEBERG_CORS = "https://api.iceberg.technology/cors/" iceberg_env = 'prod' else: iceberg_env = getattr(conf, 'ICEBERG_ENV', 'prod') ICEBERG_API_URL_FULL = conf.ICEBERG_API_URL_FULL ICEBERG_CORS = conf.ICEBERG_CORS res = { "ICEBERG_ENV": iceberg_env, "ICEBERG_API_URL": ICEBERG_API_URL_FULL, "ICEBERG_CORS": ICEBERG_CORS, } if request.user.is_authenticated(): res['access_token'] = init_iceberg(request).access_token res['username'] = init_iceberg(request).username else: res['ICEBERG_ENV'] = None res['access_token'] = "anonymous" return res
import logging logger = logging.getLogger(__name__) from django_iceberg.auth_utils import init_iceberg, get_conf_class def iceberg_settings(request): """ Defines some template variables in context """ conf = get_conf_class(request) if not conf: ICEBERG_API_URL_FULL = "https://api.iceberg.technology" ICEBERG_CORS = "https://api.iceberg.technology/cors/" iceberg_env = 'prod' else: iceberg_env = getattr(conf, 'ICEBERG_ENV', 'prod') ICEBERG_API_URL_FULL = conf.ICEBERG_API_URL_FULL ICEBERG_CORS = conf.ICEBERG_CORS res = { "ICEBERG_ENV": iceberg_env, "ICEBERG_API_URL": ICEBERG_API_URL_FULL, "ICEBERG_CORS": ICEBERG_CORS, } if request.user.is_authenticated(): res['access_token'] = init_iceberg(request).access_token + res['username'] = init_iceberg(request).username else: res['ICEBERG_ENV'] = None res['access_token'] = "anonymous" return res
46df020f5f349ac02c509e334ffd7e1f5970915b
detectem/exceptions.py
detectem/exceptions.py
class DockerStartError(Exception): pass class NotNamedParameterFound(Exception): pass class SplashError(Exception): def __init__(self, msg): self.msg = 'Splash error: {}'.format(msg) super().__init__(msg) class NoPluginsError(Exception): def __init__(self, msg): self.msg = msg super().__init__(msg)
class DockerStartError(Exception): pass class NotNamedParameterFound(Exception): pass class SplashError(Exception): def __init__(self, msg): self.msg = 'Splash error: {}'.format(msg) super().__init__(self.msg) class NoPluginsError(Exception): def __init__(self, msg): self.msg = msg super().__init__(self.msg)
Fix in tests for exception messages
Fix in tests for exception messages
Python
mit
spectresearch/detectem
class DockerStartError(Exception): pass class NotNamedParameterFound(Exception): pass class SplashError(Exception): def __init__(self, msg): self.msg = 'Splash error: {}'.format(msg) - super().__init__(msg) + super().__init__(self.msg) class NoPluginsError(Exception): def __init__(self, msg): self.msg = msg - super().__init__(msg) + super().__init__(self.msg)
Fix in tests for exception messages
## Code Before: class DockerStartError(Exception): pass class NotNamedParameterFound(Exception): pass class SplashError(Exception): def __init__(self, msg): self.msg = 'Splash error: {}'.format(msg) super().__init__(msg) class NoPluginsError(Exception): def __init__(self, msg): self.msg = msg super().__init__(msg) ## Instruction: Fix in tests for exception messages ## Code After: class DockerStartError(Exception): pass class NotNamedParameterFound(Exception): pass class SplashError(Exception): def __init__(self, msg): self.msg = 'Splash error: {}'.format(msg) super().__init__(self.msg) class NoPluginsError(Exception): def __init__(self, msg): self.msg = msg super().__init__(self.msg)
class DockerStartError(Exception): pass class NotNamedParameterFound(Exception): pass class SplashError(Exception): def __init__(self, msg): self.msg = 'Splash error: {}'.format(msg) - super().__init__(msg) + super().__init__(self.msg) ? +++++ class NoPluginsError(Exception): def __init__(self, msg): self.msg = msg - super().__init__(msg) + super().__init__(self.msg) ? +++++
efdfcccf57b294d529039095c2c71401546b3519
elephas/utils/functional_utils.py
elephas/utils/functional_utils.py
from __future__ import absolute_import import numpy as np def add_params(p1, p2): res = [] for x,y in zip(p1,p2): res.append(x+y) return res def get_neutral(array): res = [] for x in array: res.append(np.zeros_like(x)) return res def divide_by(array_list, num_workers): for i in xrange(len(array_list)): array_list[i] /= num_workers return array_list
from __future__ import absolute_import import numpy as np def add_params(p1, p2): res = [] for x,y in zip(p1,p2): res.append(x+y) return res def subtract_params(p1, p2): res = [] for x,y in zip(p1,p2): res.append(x-y) return res def get_neutral(array): res = [] for x in array: res.append(np.zeros_like(x)) return res def divide_by(array_list, num_workers): for i in xrange(len(array_list)): array_list[i] /= num_workers return array_list
Subtract two sets of parameters
Subtract two sets of parameters
Python
mit
FighterLYL/elephas,maxpumperla/elephas,CheMcCandless/elephas,daishichao/elephas,maxpumperla/elephas,aarzhaev/elephas,darcy0511/elephas
from __future__ import absolute_import import numpy as np def add_params(p1, p2): res = [] for x,y in zip(p1,p2): res.append(x+y) + return res + + def subtract_params(p1, p2): + res = [] + for x,y in zip(p1,p2): + res.append(x-y) return res def get_neutral(array): res = [] for x in array: res.append(np.zeros_like(x)) return res def divide_by(array_list, num_workers): for i in xrange(len(array_list)): array_list[i] /= num_workers return array_list
Subtract two sets of parameters
## Code Before: from __future__ import absolute_import import numpy as np def add_params(p1, p2): res = [] for x,y in zip(p1,p2): res.append(x+y) return res def get_neutral(array): res = [] for x in array: res.append(np.zeros_like(x)) return res def divide_by(array_list, num_workers): for i in xrange(len(array_list)): array_list[i] /= num_workers return array_list ## Instruction: Subtract two sets of parameters ## Code After: from __future__ import absolute_import import numpy as np def add_params(p1, p2): res = [] for x,y in zip(p1,p2): res.append(x+y) return res def subtract_params(p1, p2): res = [] for x,y in zip(p1,p2): res.append(x-y) return res def get_neutral(array): res = [] for x in array: res.append(np.zeros_like(x)) return res def divide_by(array_list, num_workers): for i in xrange(len(array_list)): array_list[i] /= num_workers return array_list
from __future__ import absolute_import import numpy as np def add_params(p1, p2): res = [] for x,y in zip(p1,p2): res.append(x+y) + return res + + def subtract_params(p1, p2): + res = [] + for x,y in zip(p1,p2): + res.append(x-y) return res def get_neutral(array): res = [] for x in array: res.append(np.zeros_like(x)) return res def divide_by(array_list, num_workers): for i in xrange(len(array_list)): array_list[i] /= num_workers return array_list
fb665cf8d6c0eb6c794a41eaf312c35473d1bdf0
tests/settings_complex.py
tests/settings_complex.py
from settings import * INSTALLED_APPS.append('complex') INSTALLED_APPS.append('django.contrib.comments') ROOT_URLCONF = 'complex.urls'
from settings import * INSTALLED_APPS += [ 'complex', 'django.contrib.comments', 'django.contrib.sites', ] ROOT_URLCONF = 'complex.urls'
Add sites app, change how installed_apps are edited.
Add sites app, change how installed_apps are edited.
Python
bsd-3-clause
esatterwhite/django-tastypie,beni55/django-tastypie,Eksmo/django-tastypie,SeanHayes/django-tastypie,cbxcube/bezrealitky.py,ywarezk/nerdeez-tastypie,mohabusama/django-tastypie,ocadotechnology/django-tastypie,ocadotechnology/django-tastypie,waveaccounting/django-tastypie,SeanHayes/django-tastypie,Eksmo/django-tastypie,beedesk/django-tastypie,yfli/django-tastypie,shownomercy/django-tastypie,coxmediagroup/django-tastypie,Eksmo/django-tastypie,guilhermegm/django-tastypie,backslash112/django-tastypie,grischa/django-tastypie,mthornhill/django-tastypie,glencoates/django-tastypie,loftywaif002/django-tastypie,marcosleonefilho/hoop-tastypie,backslash112/django-tastypie,beni55/django-tastypie,SiggyF/django-tastypie,VishvajitP/django-tastypie,VishvajitP/django-tastypie,ipsosante/django-tastypie,waveaccounting/django-tastypie,doselect/django-tastypie,ipsosante/django-tastypie,beedesk/django-tastypie,yfli/django-tastypie,mjschultz/django-tastefulpy,SiggyF/django-tastypie,doselect/django-tastypie,beni55/django-tastypie,frifri/django-tastypie,mitar/django-tastypie,nomadjourney/django-tastypie,ipsosante/django-tastypie,wlanslovenija/django-tastypie,igavrilov/django-tastypie,strets123/django-tastypie,mohabusama/django-tastypie,Perkville/django-tastypie,annacorobco/django-tastypie,ywarezk/nerdeez-tastypie,Perkville/django-tastypie,strets123/django-tastypie-tweaks,igavrilov/django-tastypie,cbxcube/bezrealitky.py,ocadotechnology/django-tastypie,nomadjourney/django-tastypie,annacorobco/django-tastypie,frifri/django-tastypie,strets123/django-tastypie,guilhermegm/django-tastypie,mthornhill/django-tastypie,tyaslab/django-tastypie,strets123/django-tastypie,loftywaif002/django-tastypie,pveglia/django-tastypie,rbraley/django-tastypie,beedesk/django-tastypie,doselect/django-tastypie,backslash112/django-tastypie,esatterwhite/django-tastypie,shownomercy/django-tastypie,pveglia/django-tastypie,strets123/django-tastypie-tweaks,guilhermegm/django-tastypie,wlanslovenija/django-tastypie,mthornhill/django-tastypie,nomadjourney/django-tastypie,marcosleonefilho/hoop-tastypie,esatterwhite/django-tastypie,akvo/django-tastypie,igavrilov/django-tastypie,wlanslovenija/django-tastypie,mjschultz/django-tastefulpy,cbxcube/bezrealitky.py,loftywaif002/django-tastypie,SiggyF/django-tastypie,sideffect0/django-tastypie,akvo/django-tastypie,coxmediagroup/django-tastypie,Perkville/django-tastypie,pveglia/django-tastypie,VishvajitP/django-tastypie,strets123/django-tastypie-tweaks,mohabusama/django-tastypie,coxmediagroup/django-tastypie,glencoates/django-tastypie,grischa/django-tastypie,sideffect0/django-tastypie,mjschultz/django-tastefulpy,mitar/django-tastypie,sideffect0/django-tastypie,rbraley/django-tastypie,waveaccounting/django-tastypie,SeanHayes/django-tastypie,tyaslab/django-tastypie,shownomercy/django-tastypie,yfli/django-tastypie,annacorobco/django-tastypie
from settings import * - INSTALLED_APPS.append('complex') - INSTALLED_APPS.append('django.contrib.comments') + + INSTALLED_APPS += [ + 'complex', + 'django.contrib.comments', + 'django.contrib.sites', + ] ROOT_URLCONF = 'complex.urls'
Add sites app, change how installed_apps are edited.
## Code Before: from settings import * INSTALLED_APPS.append('complex') INSTALLED_APPS.append('django.contrib.comments') ROOT_URLCONF = 'complex.urls' ## Instruction: Add sites app, change how installed_apps are edited. ## Code After: from settings import * INSTALLED_APPS += [ 'complex', 'django.contrib.comments', 'django.contrib.sites', ] ROOT_URLCONF = 'complex.urls'
from settings import * - INSTALLED_APPS.append('complex') - INSTALLED_APPS.append('django.contrib.comments') + + INSTALLED_APPS += [ + 'complex', + 'django.contrib.comments', + 'django.contrib.sites', + ] ROOT_URLCONF = 'complex.urls'
62d7c94968d70564839b32375fac6608720c2a67
backend/pycon/urls.py
backend/pycon/urls.py
from api.views import GraphQLView from django.contrib import admin from django.urls import include, path from django.views.decorators.csrf import csrf_exempt urlpatterns = [ path("admin/", admin.site.urls), path("graphql", csrf_exempt(GraphQLView.as_view()), name="graphql"), path("user/", include("users.urls")), path("", include("social_django.urls", namespace="social")), path("", include("payments.urls")), ]
from api.views import GraphQLView from django.conf import settings from django.conf.urls.static import static from django.contrib import admin from django.urls import include, path from django.views.decorators.csrf import csrf_exempt urlpatterns = [ path("admin/", admin.site.urls), path("graphql", csrf_exempt(GraphQLView.as_view()), name="graphql"), path("user/", include("users.urls")), path("", include("social_django.urls", namespace="social")), path("", include("payments.urls")), ] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
Add media url when running in debug mode
Add media url when running in debug mode
Python
mit
patrick91/pycon,patrick91/pycon
from api.views import GraphQLView + from django.conf import settings + from django.conf.urls.static import static from django.contrib import admin from django.urls import include, path from django.views.decorators.csrf import csrf_exempt urlpatterns = [ path("admin/", admin.site.urls), path("graphql", csrf_exempt(GraphQLView.as_view()), name="graphql"), path("user/", include("users.urls")), path("", include("social_django.urls", namespace="social")), path("", include("payments.urls")), - ] + ] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
Add media url when running in debug mode
## Code Before: from api.views import GraphQLView from django.contrib import admin from django.urls import include, path from django.views.decorators.csrf import csrf_exempt urlpatterns = [ path("admin/", admin.site.urls), path("graphql", csrf_exempt(GraphQLView.as_view()), name="graphql"), path("user/", include("users.urls")), path("", include("social_django.urls", namespace="social")), path("", include("payments.urls")), ] ## Instruction: Add media url when running in debug mode ## Code After: from api.views import GraphQLView from django.conf import settings from django.conf.urls.static import static from django.contrib import admin from django.urls import include, path from django.views.decorators.csrf import csrf_exempt urlpatterns = [ path("admin/", admin.site.urls), path("graphql", csrf_exempt(GraphQLView.as_view()), name="graphql"), path("user/", include("users.urls")), path("", include("social_django.urls", namespace="social")), path("", include("payments.urls")), ] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
from api.views import GraphQLView + from django.conf import settings + from django.conf.urls.static import static from django.contrib import admin from django.urls import include, path from django.views.decorators.csrf import csrf_exempt urlpatterns = [ path("admin/", admin.site.urls), path("graphql", csrf_exempt(GraphQLView.as_view()), name="graphql"), path("user/", include("users.urls")), path("", include("social_django.urls", namespace="social")), path("", include("payments.urls")), - ] + ] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
e4841c674545892dfc6a8390574cec7c2836e004
main.py
main.py
from SimpleCV import * winsize = (640,480) display = Display(winsize) video = VirtualCamera('stefan_eye.mp4', 'video') while display.isNotDone(): a = video.getImage() a.rotate(90).invert().toGray().binarize().save(display)
from SimpleCV import * winsize = (640,480) display = Display(winsize) video = VirtualCamera('stefan_eye.mp4', 'video') while display.isNotDone(): image = video.getImage().rotate(90).crop(850,50,400,400) image2 = image.colorDistance(Color.RED) blobs = image2.findBlobs() image3 = image2.grayscale() if blobs: for b in blobs: if b.isCircle(0.7) and b.radius() > 3: image.drawCircle((b.x,b.y),b.radius(),Color.YELLOW,2) image.show()
Add code to accomodate a new '3 circles' approach
Add code to accomodate a new '3 circles' approach
Python
mit
ColdSauce/Iris
from SimpleCV import * winsize = (640,480) display = Display(winsize) - video = VirtualCamera('stefan_eye.mp4', 'video') while display.isNotDone(): - a = video.getImage() - a.rotate(90).invert().toGray().binarize().save(display) + image = video.getImage().rotate(90).crop(850,50,400,400) + image2 = image.colorDistance(Color.RED) + blobs = image2.findBlobs() + image3 = image2.grayscale() + if blobs: + for b in blobs: + if b.isCircle(0.7) and b.radius() > 3: + image.drawCircle((b.x,b.y),b.radius(),Color.YELLOW,2) + image.show() -
Add code to accomodate a new '3 circles' approach
## Code Before: from SimpleCV import * winsize = (640,480) display = Display(winsize) video = VirtualCamera('stefan_eye.mp4', 'video') while display.isNotDone(): a = video.getImage() a.rotate(90).invert().toGray().binarize().save(display) ## Instruction: Add code to accomodate a new '3 circles' approach ## Code After: from SimpleCV import * winsize = (640,480) display = Display(winsize) video = VirtualCamera('stefan_eye.mp4', 'video') while display.isNotDone(): image = video.getImage().rotate(90).crop(850,50,400,400) image2 = image.colorDistance(Color.RED) blobs = image2.findBlobs() image3 = image2.grayscale() if blobs: for b in blobs: if b.isCircle(0.7) and b.radius() > 3: image.drawCircle((b.x,b.y),b.radius(),Color.YELLOW,2) image.show()
from SimpleCV import * winsize = (640,480) display = Display(winsize) - video = VirtualCamera('stefan_eye.mp4', 'video') while display.isNotDone(): - a = video.getImage() - a.rotate(90).invert().toGray().binarize().save(display) - + image = video.getImage().rotate(90).crop(850,50,400,400) + image2 = image.colorDistance(Color.RED) + blobs = image2.findBlobs() + image3 = image2.grayscale() + if blobs: + for b in blobs: + if b.isCircle(0.7) and b.radius() > 3: + image.drawCircle((b.x,b.y),b.radius(),Color.YELLOW,2) + image.show()
5f128bbfc61169ac6b5f0e9f4dc6bcd05092382c
requests_cache/serializers/pipeline.py
requests_cache/serializers/pipeline.py
from typing import Any, List, Union from ..models import CachedResponse class Stage: """Generic class to wrap serialization steps with consistent ``dumps()`` and ``loads()`` methods""" def __init__(self, obj: Any, dumps: str = "dumps", loads: str = "loads"): self.obj = obj self.dumps = getattr(obj, dumps) self.loads = getattr(obj, loads) class SerializerPipeline: """A sequence of steps used to serialize and deserialize response objects""" def __init__(self, steps: List): self.steps = steps self.dump_steps = [step.dumps for step in steps] self.load_steps = [step.loads for step in reversed(steps)] def dumps(self, value) -> Union[str, bytes]: for step in self.dump_steps: value = step(value) return value def loads(self, value) -> CachedResponse: for step in self.load_steps: value = step(value) return value
from typing import Any, Callable, List, Union from ..models import CachedResponse class Stage: """Generic class to wrap serialization steps with consistent ``dumps()`` and ``loads()`` methods Args: obj: Serializer object or module, if applicable dumps: Serialization function, or name of method on ``obj`` loads: Deserialization function, or name of method on ``obj`` """ def __init__( self, obj: Any = None, dumps: Union[str, Callable] = 'dumps', loads: Union[str, Callable] = 'loads', ): self.obj = obj self.dumps = getattr(obj, dumps) if isinstance(dumps, str) else dumps self.loads = getattr(obj, loads) if isinstance(loads, str) else loads class SerializerPipeline: """A sequence of steps used to serialize and deserialize response objects. This can be initialized with :py:class:`Stage` objects, or any objects with ``dumps()`` and ``loads()`` methods """ def __init__(self, stages: List): self.steps = stages self.dump_steps = [step.dumps for step in stages] self.load_steps = [step.loads for step in reversed(stages)] def dumps(self, value) -> Union[str, bytes]: for step in self.dump_steps: value = step(value) return value def loads(self, value) -> CachedResponse: for step in self.load_steps: value = step(value) return value
Allow Stage objects to take functions instead of object + method names
Allow Stage objects to take functions instead of object + method names
Python
bsd-2-clause
reclosedev/requests-cache
- from typing import Any, List, Union + from typing import Any, Callable, List, Union from ..models import CachedResponse class Stage: - """Generic class to wrap serialization steps with consistent ``dumps()`` and ``loads()`` methods""" + """Generic class to wrap serialization steps with consistent ``dumps()`` and ``loads()`` methods - def __init__(self, obj: Any, dumps: str = "dumps", loads: str = "loads"): + Args: + obj: Serializer object or module, if applicable + dumps: Serialization function, or name of method on ``obj`` + loads: Deserialization function, or name of method on ``obj`` + """ + + def __init__( + self, + obj: Any = None, + dumps: Union[str, Callable] = 'dumps', + loads: Union[str, Callable] = 'loads', + ): self.obj = obj - self.dumps = getattr(obj, dumps) - self.loads = getattr(obj, loads) + self.dumps = getattr(obj, dumps) if isinstance(dumps, str) else dumps + self.loads = getattr(obj, loads) if isinstance(loads, str) else loads class SerializerPipeline: - """A sequence of steps used to serialize and deserialize response objects""" + """A sequence of steps used to serialize and deserialize response objects. + This can be initialized with :py:class:`Stage` objects, or any objects with ``dumps()`` and + ``loads()`` methods + """ - def __init__(self, steps: List): + def __init__(self, stages: List): - self.steps = steps + self.steps = stages - self.dump_steps = [step.dumps for step in steps] + self.dump_steps = [step.dumps for step in stages] - self.load_steps = [step.loads for step in reversed(steps)] + self.load_steps = [step.loads for step in reversed(stages)] def dumps(self, value) -> Union[str, bytes]: for step in self.dump_steps: value = step(value) return value def loads(self, value) -> CachedResponse: for step in self.load_steps: value = step(value) return value
Allow Stage objects to take functions instead of object + method names
## Code Before: from typing import Any, List, Union from ..models import CachedResponse class Stage: """Generic class to wrap serialization steps with consistent ``dumps()`` and ``loads()`` methods""" def __init__(self, obj: Any, dumps: str = "dumps", loads: str = "loads"): self.obj = obj self.dumps = getattr(obj, dumps) self.loads = getattr(obj, loads) class SerializerPipeline: """A sequence of steps used to serialize and deserialize response objects""" def __init__(self, steps: List): self.steps = steps self.dump_steps = [step.dumps for step in steps] self.load_steps = [step.loads for step in reversed(steps)] def dumps(self, value) -> Union[str, bytes]: for step in self.dump_steps: value = step(value) return value def loads(self, value) -> CachedResponse: for step in self.load_steps: value = step(value) return value ## Instruction: Allow Stage objects to take functions instead of object + method names ## Code After: from typing import Any, Callable, List, Union from ..models import CachedResponse class Stage: """Generic class to wrap serialization steps with consistent ``dumps()`` and ``loads()`` methods Args: obj: Serializer object or module, if applicable dumps: Serialization function, or name of method on ``obj`` loads: Deserialization function, or name of method on ``obj`` """ def __init__( self, obj: Any = None, dumps: Union[str, Callable] = 'dumps', loads: Union[str, Callable] = 'loads', ): self.obj = obj self.dumps = getattr(obj, dumps) if isinstance(dumps, str) else dumps self.loads = getattr(obj, loads) if isinstance(loads, str) else loads class SerializerPipeline: """A sequence of steps used to serialize and deserialize response objects. This can be initialized with :py:class:`Stage` objects, or any objects with ``dumps()`` and ``loads()`` methods """ def __init__(self, stages: List): self.steps = stages self.dump_steps = [step.dumps for step in stages] self.load_steps = [step.loads for step in reversed(stages)] def dumps(self, value) -> Union[str, bytes]: for step in self.dump_steps: value = step(value) return value def loads(self, value) -> CachedResponse: for step in self.load_steps: value = step(value) return value
- from typing import Any, List, Union + from typing import Any, Callable, List, Union ? ++++++++++ from ..models import CachedResponse class Stage: - """Generic class to wrap serialization steps with consistent ``dumps()`` and ``loads()`` methods""" ? --- + """Generic class to wrap serialization steps with consistent ``dumps()`` and ``loads()`` methods - def __init__(self, obj: Any, dumps: str = "dumps", loads: str = "loads"): + Args: + obj: Serializer object or module, if applicable + dumps: Serialization function, or name of method on ``obj`` + loads: Deserialization function, or name of method on ``obj`` + """ + + def __init__( + self, + obj: Any = None, + dumps: Union[str, Callable] = 'dumps', + loads: Union[str, Callable] = 'loads', + ): self.obj = obj - self.dumps = getattr(obj, dumps) - self.loads = getattr(obj, loads) + self.dumps = getattr(obj, dumps) if isinstance(dumps, str) else dumps + self.loads = getattr(obj, loads) if isinstance(loads, str) else loads class SerializerPipeline: - """A sequence of steps used to serialize and deserialize response objects""" ? ^^^ + """A sequence of steps used to serialize and deserialize response objects. ? ^ + This can be initialized with :py:class:`Stage` objects, or any objects with ``dumps()`` and + ``loads()`` methods + """ - def __init__(self, steps: List): ? - + def __init__(self, stages: List): ? ++ - self.steps = steps ? - + self.steps = stages ? ++ - self.dump_steps = [step.dumps for step in steps] ? - + self.dump_steps = [step.dumps for step in stages] ? ++ - self.load_steps = [step.loads for step in reversed(steps)] ? - + self.load_steps = [step.loads for step in reversed(stages)] ? ++ def dumps(self, value) -> Union[str, bytes]: for step in self.dump_steps: value = step(value) return value def loads(self, value) -> CachedResponse: for step in self.load_steps: value = step(value) return value
9da9ec6618de8c9a1276e44e81c32639d42efada
setup.py
setup.py
from distutils.core import setup setup( name='hy-py', version='0.0.2', packages=['hy'], license='MIT', author='Joakim Ekberg', author_email='[email protected]', url='https://github.com/kalasjocke/hy', long_description=open('README.md').read(), install_requires=open('requirements.txt').read().split(), )
from distutils.core import setup setup( name='hy-py', version='0.0.3', packages=['hy', 'hy.adapters'], license='MIT', author='Joakim Ekberg', author_email='[email protected]', url='https://github.com/kalasjocke/hy', long_description=open('README.md').read(), install_requires=open('requirements.txt').read().split(), )
Include the serializer adapters in the PyPI package
Include the serializer adapters in the PyPI package
Python
mit
kalasjocke/hyp
from distutils.core import setup setup( name='hy-py', - version='0.0.2', + version='0.0.3', - packages=['hy'], + packages=['hy', 'hy.adapters'], license='MIT', author='Joakim Ekberg', author_email='[email protected]', url='https://github.com/kalasjocke/hy', long_description=open('README.md').read(), install_requires=open('requirements.txt').read().split(), )
Include the serializer adapters in the PyPI package
## Code Before: from distutils.core import setup setup( name='hy-py', version='0.0.2', packages=['hy'], license='MIT', author='Joakim Ekberg', author_email='[email protected]', url='https://github.com/kalasjocke/hy', long_description=open('README.md').read(), install_requires=open('requirements.txt').read().split(), ) ## Instruction: Include the serializer adapters in the PyPI package ## Code After: from distutils.core import setup setup( name='hy-py', version='0.0.3', packages=['hy', 'hy.adapters'], license='MIT', author='Joakim Ekberg', author_email='[email protected]', url='https://github.com/kalasjocke/hy', long_description=open('README.md').read(), install_requires=open('requirements.txt').read().split(), )
from distutils.core import setup setup( name='hy-py', - version='0.0.2', ? ^ + version='0.0.3', ? ^ - packages=['hy'], + packages=['hy', 'hy.adapters'], license='MIT', author='Joakim Ekberg', author_email='[email protected]', url='https://github.com/kalasjocke/hy', long_description=open('README.md').read(), install_requires=open('requirements.txt').read().split(), )
562b56d67d7d292d7c63ec8c3f453bae92a3b073
tests/test_wysiwyg_editor.py
tests/test_wysiwyg_editor.py
from . import TheInternetTestCase from helium.api import click, Text, press, CONTROL, COMMAND, write from sys import platform class WYSIWYGEditorTest(TheInternetTestCase): def get_page(self): return "http://the-internet.herokuapp.com/tinymce" def test_use_wysiwyg_editor(self): self.assertTrue(Text("Your content goes here.").exists()) click("Your content goes here.") if platform == 'darwin': press(COMMAND + 'a') else: press(CONTROL + 'a') write("Hello Helium!") self.assertTrue(Text("Hello Helium!").exists())
from . import TheInternetTestCase from helium.api import click, Text, write class WYSIWYGEditorTest(TheInternetTestCase): def get_page(self): return "http://the-internet.herokuapp.com/tinymce" def test_use_wysiwyg_editor(self): self.assertTrue(Text("Your content goes here.").exists()) click("File") click("New document") write("Hello Helium!") self.assertTrue(Text("Hello Helium!").exists())
Simplify the WYSIWYG editor test case.
Simplify the WYSIWYG editor test case.
Python
mit
bugfree-software/the-internet-solution-python
from . import TheInternetTestCase - from helium.api import click, Text, press, CONTROL, COMMAND, write + from helium.api import click, Text, write - from sys import platform class WYSIWYGEditorTest(TheInternetTestCase): def get_page(self): return "http://the-internet.herokuapp.com/tinymce" def test_use_wysiwyg_editor(self): self.assertTrue(Text("Your content goes here.").exists()) + click("File") + click("New document") - click("Your content goes here.") - if platform == 'darwin': - press(COMMAND + 'a') - else: - press(CONTROL + 'a') write("Hello Helium!") self.assertTrue(Text("Hello Helium!").exists())
Simplify the WYSIWYG editor test case.
## Code Before: from . import TheInternetTestCase from helium.api import click, Text, press, CONTROL, COMMAND, write from sys import platform class WYSIWYGEditorTest(TheInternetTestCase): def get_page(self): return "http://the-internet.herokuapp.com/tinymce" def test_use_wysiwyg_editor(self): self.assertTrue(Text("Your content goes here.").exists()) click("Your content goes here.") if platform == 'darwin': press(COMMAND + 'a') else: press(CONTROL + 'a') write("Hello Helium!") self.assertTrue(Text("Hello Helium!").exists()) ## Instruction: Simplify the WYSIWYG editor test case. ## Code After: from . import TheInternetTestCase from helium.api import click, Text, write class WYSIWYGEditorTest(TheInternetTestCase): def get_page(self): return "http://the-internet.herokuapp.com/tinymce" def test_use_wysiwyg_editor(self): self.assertTrue(Text("Your content goes here.").exists()) click("File") click("New document") write("Hello Helium!") self.assertTrue(Text("Hello Helium!").exists())
from . import TheInternetTestCase - from helium.api import click, Text, press, CONTROL, COMMAND, write ? ------------------------- + from helium.api import click, Text, write - from sys import platform class WYSIWYGEditorTest(TheInternetTestCase): def get_page(self): return "http://the-internet.herokuapp.com/tinymce" def test_use_wysiwyg_editor(self): self.assertTrue(Text("Your content goes here.").exists()) + click("File") + click("New document") - click("Your content goes here.") - if platform == 'darwin': - press(COMMAND + 'a') - else: - press(CONTROL + 'a') write("Hello Helium!") self.assertTrue(Text("Hello Helium!").exists())
420b9a4e2b52de7234734b9c457e0711bb0f1a70
utils/lit/lit/__init__.py
utils/lit/lit/__init__.py
"""'lit' Testing Tool""" __author__ = 'Daniel Dunbar' __email__ = '[email protected]' __versioninfo__ = (0, 6, 0) __version__ = '.'.join(str(v) for v in __versioninfo__) + 'dev' __all__ = []
"""'lit' Testing Tool""" __author__ = 'Daniel Dunbar' __email__ = '[email protected]' __versioninfo__ = (0, 6, 0) __version__ = '.'.join(str(v) for v in __versioninfo__) + 'dev' __all__ = [] from .main import main
Fix issue which cases lit installed with setup.py to not resolve main
Fix issue which cases lit installed with setup.py to not resolve main git-svn-id: 0ff597fd157e6f4fc38580e8d64ab130330d2411@283818 91177308-0d34-0410-b5e6-96231b3b80d8
Python
apache-2.0
apple/swift-llvm,apple/swift-llvm,apple/swift-llvm,apple/swift-llvm,llvm-mirror/llvm,llvm-mirror/llvm,llvm-mirror/llvm,llvm-mirror/llvm,GPUOpen-Drivers/llvm,GPUOpen-Drivers/llvm,apple/swift-llvm,llvm-mirror/llvm,GPUOpen-Drivers/llvm,llvm-mirror/llvm,apple/swift-llvm,llvm-mirror/llvm,llvm-mirror/llvm,GPUOpen-Drivers/llvm,GPUOpen-Drivers/llvm,llvm-mirror/llvm,apple/swift-llvm,GPUOpen-Drivers/llvm,apple/swift-llvm,GPUOpen-Drivers/llvm,GPUOpen-Drivers/llvm
"""'lit' Testing Tool""" __author__ = 'Daniel Dunbar' __email__ = '[email protected]' __versioninfo__ = (0, 6, 0) __version__ = '.'.join(str(v) for v in __versioninfo__) + 'dev' __all__ = [] + from .main import main +
Fix issue which cases lit installed with setup.py to not resolve main
## Code Before: """'lit' Testing Tool""" __author__ = 'Daniel Dunbar' __email__ = '[email protected]' __versioninfo__ = (0, 6, 0) __version__ = '.'.join(str(v) for v in __versioninfo__) + 'dev' __all__ = [] ## Instruction: Fix issue which cases lit installed with setup.py to not resolve main ## Code After: """'lit' Testing Tool""" __author__ = 'Daniel Dunbar' __email__ = '[email protected]' __versioninfo__ = (0, 6, 0) __version__ = '.'.join(str(v) for v in __versioninfo__) + 'dev' __all__ = [] from .main import main
"""'lit' Testing Tool""" __author__ = 'Daniel Dunbar' __email__ = '[email protected]' __versioninfo__ = (0, 6, 0) __version__ = '.'.join(str(v) for v in __versioninfo__) + 'dev' __all__ = [] + + from .main import main
be6ede95d37717a65bd02969e8340afd2354dcdc
tests/basics/gen_yield_from_throw.py
tests/basics/gen_yield_from_throw.py
def gen(): try: yield 1 except ValueError as e: print("got ValueError from upstream!", repr(e.args)) yield "str1" raise TypeError def gen2(): print((yield from gen())) g = gen2() print(next(g)) print(g.throw(ValueError)) try: print(next(g)) except TypeError: print("got TypeError from downstream!") # passing None as second argument to throw g = gen2() print(next(g)) print(g.throw(ValueError, None)) try: print(next(g)) except TypeError: print("got TypeError from downstream!") # passing an exception instance as second argument to throw g = gen2() print(next(g)) print(g.throw(ValueError, ValueError(123))) try: print(next(g)) except TypeError: print("got TypeError from downstream!")
def gen(): try: yield 1 except ValueError as e: print("got ValueError from upstream!", repr(e.args)) yield "str1" raise TypeError def gen2(): print((yield from gen())) g = gen2() print(next(g)) print(g.throw(ValueError)) try: print(next(g)) except TypeError: print("got TypeError from downstream!") # passing None as second argument to throw g = gen2() print(next(g)) print(g.throw(ValueError, None)) try: print(next(g)) except TypeError: print("got TypeError from downstream!") # passing an exception instance as second argument to throw g = gen2() print(next(g)) print(g.throw(ValueError, ValueError(123))) try: print(next(g)) except TypeError: print("got TypeError from downstream!") # thrown value is caught and then generator returns normally def gen(): try: yield 123 except ValueError: print('ValueError') # return normally after catching thrown exception def gen2(): yield from gen() yield 789 g = gen2() print(next(g)) print(g.throw(ValueError))
Add test for throw into yield-from with normal return.
tests/basics: Add test for throw into yield-from with normal return. This test was found by missing coverage of a branch in py/nativeglue.c.
Python
mit
pfalcon/micropython,pfalcon/micropython,pfalcon/micropython,pfalcon/micropython,pfalcon/micropython
def gen(): try: yield 1 except ValueError as e: print("got ValueError from upstream!", repr(e.args)) yield "str1" raise TypeError def gen2(): print((yield from gen())) g = gen2() print(next(g)) print(g.throw(ValueError)) try: print(next(g)) except TypeError: print("got TypeError from downstream!") # passing None as second argument to throw g = gen2() print(next(g)) print(g.throw(ValueError, None)) try: print(next(g)) except TypeError: print("got TypeError from downstream!") # passing an exception instance as second argument to throw g = gen2() print(next(g)) print(g.throw(ValueError, ValueError(123))) try: print(next(g)) except TypeError: print("got TypeError from downstream!") + # thrown value is caught and then generator returns normally + def gen(): + try: + yield 123 + except ValueError: + print('ValueError') + # return normally after catching thrown exception + def gen2(): + yield from gen() + yield 789 + g = gen2() + print(next(g)) + print(g.throw(ValueError)) +
Add test for throw into yield-from with normal return.
## Code Before: def gen(): try: yield 1 except ValueError as e: print("got ValueError from upstream!", repr(e.args)) yield "str1" raise TypeError def gen2(): print((yield from gen())) g = gen2() print(next(g)) print(g.throw(ValueError)) try: print(next(g)) except TypeError: print("got TypeError from downstream!") # passing None as second argument to throw g = gen2() print(next(g)) print(g.throw(ValueError, None)) try: print(next(g)) except TypeError: print("got TypeError from downstream!") # passing an exception instance as second argument to throw g = gen2() print(next(g)) print(g.throw(ValueError, ValueError(123))) try: print(next(g)) except TypeError: print("got TypeError from downstream!") ## Instruction: Add test for throw into yield-from with normal return. ## Code After: def gen(): try: yield 1 except ValueError as e: print("got ValueError from upstream!", repr(e.args)) yield "str1" raise TypeError def gen2(): print((yield from gen())) g = gen2() print(next(g)) print(g.throw(ValueError)) try: print(next(g)) except TypeError: print("got TypeError from downstream!") # passing None as second argument to throw g = gen2() print(next(g)) print(g.throw(ValueError, None)) try: print(next(g)) except TypeError: print("got TypeError from downstream!") # passing an exception instance as second argument to throw g = gen2() print(next(g)) print(g.throw(ValueError, ValueError(123))) try: print(next(g)) except TypeError: print("got TypeError from downstream!") # thrown value is caught and then generator returns normally def gen(): try: yield 123 except ValueError: print('ValueError') # return normally after catching thrown exception def gen2(): yield from gen() yield 789 g = gen2() print(next(g)) print(g.throw(ValueError))
def gen(): try: yield 1 except ValueError as e: print("got ValueError from upstream!", repr(e.args)) yield "str1" raise TypeError def gen2(): print((yield from gen())) g = gen2() print(next(g)) print(g.throw(ValueError)) try: print(next(g)) except TypeError: print("got TypeError from downstream!") # passing None as second argument to throw g = gen2() print(next(g)) print(g.throw(ValueError, None)) try: print(next(g)) except TypeError: print("got TypeError from downstream!") # passing an exception instance as second argument to throw g = gen2() print(next(g)) print(g.throw(ValueError, ValueError(123))) try: print(next(g)) except TypeError: print("got TypeError from downstream!") + + # thrown value is caught and then generator returns normally + def gen(): + try: + yield 123 + except ValueError: + print('ValueError') + # return normally after catching thrown exception + def gen2(): + yield from gen() + yield 789 + g = gen2() + print(next(g)) + print(g.throw(ValueError))
2ba4e0758c04bebcd1dcde78e99605d0b9460abf
foldatlas/monitor.py
foldatlas/monitor.py
import os # must call "sudo apt-get install sendmail" first... # if sts != 0: # print("Sendmail exit status "+str(sts)) def send_error(recipient, error_details): SENDMAIL = "/usr/sbin/sendmail" # sendmail location p = os.popen("%s -t" % SENDMAIL, "w") p.write("To: "+recipient+"\n") p.write("Subject: FoldAtlas error\n") p.write("\n") # blank line separating headers from body p.write("Some text\n") p.write("some more text\n") sts = p.close()
import traceback import os import urllib.request # the lib that handles the url stuff test_url = "http://www.foldatlas.com/transcript/AT2G45180.1" recipient = "[email protected]" search_str = "AT2G45180.1" def run_test(): try: data = urllib.request.urlopen(test_url) # it's a file like object and works just like a file text = str(data.read()) if search_str in text: print("It worked!") else: send_error(text) except: send_error(traceback.format_exc()) def send_error(error_details): print("FAILED") SENDMAIL = "/usr/sbin/sendmail" # sendmail location p = os.popen("%s -t" % SENDMAIL, "w") p.write("To: "+recipient+"\n") p.write("Subject: FoldAtlas error\n") p.write("\n") # blank line separating headers from body p.write(error_details) sts = p.close() run_test()
Monitor now checks and emails
Monitor now checks and emails
Python
mit
mnori/foldatlas,mnori/foldatlas,mnori/foldatlas,mnori/foldatlas
+ import traceback import os + import urllib.request # the lib that handles the url stuff - # must call "sudo apt-get install sendmail" first... + test_url = "http://www.foldatlas.com/transcript/AT2G45180.1" + recipient = "[email protected]" + search_str = "AT2G45180.1" + def run_test(): + try: + data = urllib.request.urlopen(test_url) # it's a file like object and works just like a file + text = str(data.read()) - # if sts != 0: - # print("Sendmail exit status "+str(sts)) + if search_str in text: + print("It worked!") + else: + send_error(text) + except: + send_error(traceback.format_exc()) - - - def send_error(recipient, error_details): + def send_error(error_details): + print("FAILED") SENDMAIL = "/usr/sbin/sendmail" # sendmail location p = os.popen("%s -t" % SENDMAIL, "w") p.write("To: "+recipient+"\n") p.write("Subject: FoldAtlas error\n") p.write("\n") # blank line separating headers from body + p.write(error_details) - p.write("Some text\n") - p.write("some more text\n") sts = p.close() + run_test()
Monitor now checks and emails
## Code Before: import os # must call "sudo apt-get install sendmail" first... # if sts != 0: # print("Sendmail exit status "+str(sts)) def send_error(recipient, error_details): SENDMAIL = "/usr/sbin/sendmail" # sendmail location p = os.popen("%s -t" % SENDMAIL, "w") p.write("To: "+recipient+"\n") p.write("Subject: FoldAtlas error\n") p.write("\n") # blank line separating headers from body p.write("Some text\n") p.write("some more text\n") sts = p.close() ## Instruction: Monitor now checks and emails ## Code After: import traceback import os import urllib.request # the lib that handles the url stuff test_url = "http://www.foldatlas.com/transcript/AT2G45180.1" recipient = "[email protected]" search_str = "AT2G45180.1" def run_test(): try: data = urllib.request.urlopen(test_url) # it's a file like object and works just like a file text = str(data.read()) if search_str in text: print("It worked!") else: send_error(text) except: send_error(traceback.format_exc()) def send_error(error_details): print("FAILED") SENDMAIL = "/usr/sbin/sendmail" # sendmail location p = os.popen("%s -t" % SENDMAIL, "w") p.write("To: "+recipient+"\n") p.write("Subject: FoldAtlas error\n") p.write("\n") # blank line separating headers from body p.write(error_details) sts = p.close() run_test()
+ import traceback import os + import urllib.request # the lib that handles the url stuff - # must call "sudo apt-get install sendmail" first... + test_url = "http://www.foldatlas.com/transcript/AT2G45180.1" + recipient = "[email protected]" + search_str = "AT2G45180.1" + def run_test(): + try: + data = urllib.request.urlopen(test_url) # it's a file like object and works just like a file + text = str(data.read()) - # if sts != 0: - # print("Sendmail exit status "+str(sts)) + if search_str in text: + print("It worked!") + else: + send_error(text) + except: + send_error(traceback.format_exc()) - - - def send_error(recipient, error_details): ? ----------- + def send_error(error_details): + print("FAILED") SENDMAIL = "/usr/sbin/sendmail" # sendmail location p = os.popen("%s -t" % SENDMAIL, "w") p.write("To: "+recipient+"\n") p.write("Subject: FoldAtlas error\n") p.write("\n") # blank line separating headers from body + p.write(error_details) - p.write("Some text\n") - p.write("some more text\n") sts = p.close() + + run_test()
9f82fe03a38d9eaf4ccd22f2ee6d13907bc3b42e
relay_api/api/server.py
relay_api/api/server.py
from flask import Flask, jsonify server = Flask(__name__) def get_relays(relays): return jsonify({"relays": relays}), 200 def get_relay(relays, relay_name): code = 200 try: relay = relays[relay_name] except KeyError: code = 404 return "", code return jsonify({"relay": relay}), code
from flask import Flask, jsonify # import json server = Flask(__name__) def __serialize_relay(relays): if type(relays).__name__ == "relay": return jsonify({"gpio": relays.gpio, "NC": relays.nc, "state": relays.state}) di = {} for r in relays: di[r] = {"gpio": relays[r].gpio, "NC": relays[r].nc, "state": relays[r].state} return jsonify(di) def get_relays(relays_dict): return __serialize_relay(relays_dict), 200 def get_relay(relay): code = 200 if not relay: code = 404 return "", code return __serialize_relay(relay), code
Change to get a dict with the relay instances
Change to get a dict with the relay instances
Python
mit
pahumadad/raspi-relay-api
from flask import Flask, jsonify + # import json server = Flask(__name__) - def get_relays(relays): + def __serialize_relay(relays): - return jsonify({"relays": relays}), 200 + if type(relays).__name__ == "relay": + return jsonify({"gpio": relays.gpio, + "NC": relays.nc, + "state": relays.state}) + di = {} + for r in relays: + di[r] = {"gpio": relays[r].gpio, + "NC": relays[r].nc, + "state": relays[r].state} + return jsonify(di) + def get_relays(relays_dict): + return __serialize_relay(relays_dict), 200 + + - def get_relay(relays, relay_name): + def get_relay(relay): code = 200 + if not relay: - try: - relay = relays[relay_name] - except KeyError: code = 404 return "", code + return __serialize_relay(relay), code - return jsonify({"relay": relay}), code -
Change to get a dict with the relay instances
## Code Before: from flask import Flask, jsonify server = Flask(__name__) def get_relays(relays): return jsonify({"relays": relays}), 200 def get_relay(relays, relay_name): code = 200 try: relay = relays[relay_name] except KeyError: code = 404 return "", code return jsonify({"relay": relay}), code ## Instruction: Change to get a dict with the relay instances ## Code After: from flask import Flask, jsonify # import json server = Flask(__name__) def __serialize_relay(relays): if type(relays).__name__ == "relay": return jsonify({"gpio": relays.gpio, "NC": relays.nc, "state": relays.state}) di = {} for r in relays: di[r] = {"gpio": relays[r].gpio, "NC": relays[r].nc, "state": relays[r].state} return jsonify(di) def get_relays(relays_dict): return __serialize_relay(relays_dict), 200 def get_relay(relay): code = 200 if not relay: code = 404 return "", code return __serialize_relay(relay), code
from flask import Flask, jsonify + # import json server = Flask(__name__) - def get_relays(relays): ? ^ ^ - + def __serialize_relay(relays): ? ^^^ ^^^^^^^ - return jsonify({"relays": relays}), 200 + if type(relays).__name__ == "relay": + return jsonify({"gpio": relays.gpio, + "NC": relays.nc, + "state": relays.state}) + di = {} + for r in relays: + di[r] = {"gpio": relays[r].gpio, + "NC": relays[r].nc, + "state": relays[r].state} + return jsonify(di) + def get_relays(relays_dict): + return __serialize_relay(relays_dict), 200 + + - def get_relay(relays, relay_name): ? ------------- + def get_relay(relay): code = 200 + if not relay: - try: - relay = relays[relay_name] - except KeyError: code = 404 return "", code + return __serialize_relay(relay), code - - return jsonify({"relay": relay}), code
5a45840e81d612e1f743ad063fd32da4d19354d4
cacheops/signals.py
cacheops/signals.py
import django.dispatch cache_read = django.dispatch.Signal(providing_args=["func", "hit"]) cache_invalidated = django.dispatch.Signal(providing_args=["obj_dict"])
import django.dispatch cache_read = django.dispatch.Signal() # args: func, hit cache_invalidated = django.dispatch.Signal() # args: obj_dict
Stop using Signal(providing_args) deprected in Django 4.0
Stop using Signal(providing_args) deprected in Django 4.0 Closes #393
Python
bsd-3-clause
Suor/django-cacheops
import django.dispatch - cache_read = django.dispatch.Signal(providing_args=["func", "hit"]) + cache_read = django.dispatch.Signal() # args: func, hit - cache_invalidated = django.dispatch.Signal(providing_args=["obj_dict"]) + cache_invalidated = django.dispatch.Signal() # args: obj_dict
Stop using Signal(providing_args) deprected in Django 4.0
## Code Before: import django.dispatch cache_read = django.dispatch.Signal(providing_args=["func", "hit"]) cache_invalidated = django.dispatch.Signal(providing_args=["obj_dict"]) ## Instruction: Stop using Signal(providing_args) deprected in Django 4.0 ## Code After: import django.dispatch cache_read = django.dispatch.Signal() # args: func, hit cache_invalidated = django.dispatch.Signal() # args: obj_dict
import django.dispatch - cache_read = django.dispatch.Signal(providing_args=["func", "hit"]) ? ^^^^^^^^^^ ^^^ - - --- + cache_read = django.dispatch.Signal() # args: func, hit ? ^^^^^ ^^ - cache_invalidated = django.dispatch.Signal(providing_args=["obj_dict"]) ? ^^^^^^^^^^ ^^^ --- + cache_invalidated = django.dispatch.Signal() # args: obj_dict ? ^^^^^ ^^
6c349621dd3331bf92f803d2d66c96868f8e94c6
src/geelweb/django/editos/runtests.py
src/geelweb/django/editos/runtests.py
import os import sys os.environ['DJANGO_SETTINGS_MODULE'] = 'test_settings' test_dir = os.path.dirname(__file__) sys.path.insert(0, test_dir) from django.test.utils import get_runner from django.conf import settings def runtests(): TestRunner = get_runner(settings) test_runner = TestRunner(verbosity=1, interactive=True) failures = test_runner.run_tests(['geelweb.django.editos']) sys.exit(bool(failures))
import os import sys os.environ['DJANGO_SETTINGS_MODULE'] = 'test_settings' test_dir = os.path.dirname(__file__) sys.path.insert(0, test_dir) import django from django.test.utils import get_runner from django.conf import settings def runtests(): if django.VERSION[0] == 1 and django.VERSION[1] < 7: from django.test.utils import setup_test_environment setup_test_environment() if django.VERSION[0] == 1 and django.VERSION[1] >= 7: django.setup() TestRunner = get_runner(settings) test_runner = TestRunner() failures = test_runner.run_tests(['geelweb.django.editos']) sys.exit(bool(failures))
Upgrade to test using django 1.7 and 1.8
Upgrade to test using django 1.7 and 1.8
Python
mit
geelweb/django-editos,geelweb/django-editos
import os import sys os.environ['DJANGO_SETTINGS_MODULE'] = 'test_settings' test_dir = os.path.dirname(__file__) sys.path.insert(0, test_dir) + import django from django.test.utils import get_runner from django.conf import settings def runtests(): + if django.VERSION[0] == 1 and django.VERSION[1] < 7: + from django.test.utils import setup_test_environment + setup_test_environment() + + if django.VERSION[0] == 1 and django.VERSION[1] >= 7: + django.setup() + TestRunner = get_runner(settings) - test_runner = TestRunner(verbosity=1, interactive=True) + test_runner = TestRunner() failures = test_runner.run_tests(['geelweb.django.editos']) sys.exit(bool(failures))
Upgrade to test using django 1.7 and 1.8
## Code Before: import os import sys os.environ['DJANGO_SETTINGS_MODULE'] = 'test_settings' test_dir = os.path.dirname(__file__) sys.path.insert(0, test_dir) from django.test.utils import get_runner from django.conf import settings def runtests(): TestRunner = get_runner(settings) test_runner = TestRunner(verbosity=1, interactive=True) failures = test_runner.run_tests(['geelweb.django.editos']) sys.exit(bool(failures)) ## Instruction: Upgrade to test using django 1.7 and 1.8 ## Code After: import os import sys os.environ['DJANGO_SETTINGS_MODULE'] = 'test_settings' test_dir = os.path.dirname(__file__) sys.path.insert(0, test_dir) import django from django.test.utils import get_runner from django.conf import settings def runtests(): if django.VERSION[0] == 1 and django.VERSION[1] < 7: from django.test.utils import setup_test_environment setup_test_environment() if django.VERSION[0] == 1 and django.VERSION[1] >= 7: django.setup() TestRunner = get_runner(settings) test_runner = TestRunner() failures = test_runner.run_tests(['geelweb.django.editos']) sys.exit(bool(failures))
import os import sys os.environ['DJANGO_SETTINGS_MODULE'] = 'test_settings' test_dir = os.path.dirname(__file__) sys.path.insert(0, test_dir) + import django from django.test.utils import get_runner from django.conf import settings def runtests(): + if django.VERSION[0] == 1 and django.VERSION[1] < 7: + from django.test.utils import setup_test_environment + setup_test_environment() + + if django.VERSION[0] == 1 and django.VERSION[1] >= 7: + django.setup() + TestRunner = get_runner(settings) - test_runner = TestRunner(verbosity=1, interactive=True) + test_runner = TestRunner() failures = test_runner.run_tests(['geelweb.django.editos']) sys.exit(bool(failures))
151599602b9d626ebcfe5ae6960ea216b767fec2
setuptools/distutils_patch.py
setuptools/distutils_patch.py
import sys import importlib from os.path import dirname sys.path.insert(0, dirname(dirname(__file__))) importlib.import_module('distutils') sys.path.pop(0)
import sys import importlib import contextlib from os.path import dirname @contextlib.contextmanager def patch_sys_path(): orig = sys.path[:] sys.path[:] = [dirname(dirname(__file__))] try: yield finally: sys.path[:] = orig if 'distutils' in sys.path: raise RuntimeError("Distutils must not be imported before setuptools") with patch_sys_path(): importlib.import_module('distutils')
Update distutils patch to monkeypatch all paths from sys.path to ensure that distutils is never imported except from the same path as setuptools. Assert that 'distutils' is not already in sys.modules.
Update distutils patch to monkeypatch all paths from sys.path to ensure that distutils is never imported except from the same path as setuptools. Assert that 'distutils' is not already in sys.modules.
Python
mit
pypa/setuptools,pypa/setuptools,pypa/setuptools
import sys import importlib + import contextlib from os.path import dirname - sys.path.insert(0, dirname(dirname(__file__))) - importlib.import_module('distutils') - sys.path.pop(0) + @contextlib.contextmanager + def patch_sys_path(): + orig = sys.path[:] + sys.path[:] = [dirname(dirname(__file__))] + try: + yield + finally: + sys.path[:] = orig + + if 'distutils' in sys.path: + raise RuntimeError("Distutils must not be imported before setuptools") + + + with patch_sys_path(): + importlib.import_module('distutils') +
Update distutils patch to monkeypatch all paths from sys.path to ensure that distutils is never imported except from the same path as setuptools. Assert that 'distutils' is not already in sys.modules.
## Code Before: import sys import importlib from os.path import dirname sys.path.insert(0, dirname(dirname(__file__))) importlib.import_module('distutils') sys.path.pop(0) ## Instruction: Update distutils patch to monkeypatch all paths from sys.path to ensure that distutils is never imported except from the same path as setuptools. Assert that 'distutils' is not already in sys.modules. ## Code After: import sys import importlib import contextlib from os.path import dirname @contextlib.contextmanager def patch_sys_path(): orig = sys.path[:] sys.path[:] = [dirname(dirname(__file__))] try: yield finally: sys.path[:] = orig if 'distutils' in sys.path: raise RuntimeError("Distutils must not be imported before setuptools") with patch_sys_path(): importlib.import_module('distutils')
import sys import importlib + import contextlib from os.path import dirname + @contextlib.contextmanager + def patch_sys_path(): + orig = sys.path[:] - sys.path.insert(0, dirname(dirname(__file__))) ? ^^^^^^^^^^ ^ + sys.path[:] = [dirname(dirname(__file__))] ? ++++ ^^^ +++ ^ + try: + yield + finally: + sys.path[:] = orig + + + if 'distutils' in sys.path: + raise RuntimeError("Distutils must not be imported before setuptools") + + + with patch_sys_path(): - importlib.import_module('distutils') + importlib.import_module('distutils') ? ++++ - sys.path.pop(0)
7efcc9987f827eec56677d95bc7ad873208b392f
saw/parser/sentences.py
saw/parser/sentences.py
import base from blocks import Blocks import re class Sentences(base.Base): _type = 'sentences' child_class = Blocks @staticmethod def parse(text): #re.split('\!|\?|\. | \.',text) result = [] prev = 0 # we allow .09 as not end of sentences #for m in re.finditer('[\!\?]+|\.+(?:\s+|$|\?|\!)', text): for m in re.finditer('\.+(?:\s+|$)|(\.*)[\!\?]+(\.+(?:\s+|$))*', text): curr, _next = m.start(), m.end() # if prev position of delimiter < current - between exists text # at least 1 symbol. if prev < curr: node = text[prev:curr].strip() if node != '': result.append(node) result.append(list( text[curr:_next].strip() )) prev = _next if len(text) > prev: result.append(text[prev:].strip()) return result
import base from blocks import Blocks import re class Sentences(base.Base): _type = 'sentences' child_class = Blocks @staticmethod def parse(text): _len = len(text) result = [] prev = 0 # we allow .09 as not end of sentences for m in re.finditer('[\!\?\.]+', text): curr, _next = m.start(), m.end() items = list( text[curr: _next].strip() ) if (_len > _next) and not (text[_next] == ' '): # delete ending '.' if they not before space or end of string while (len(items) > 0) and (items[-1] == '.'): items.pop() _next = _next - 1 if len(items) > 0: # if prev position of delimiter < current - between exists text # at least 1 symbol. if prev < curr: node = text[prev:curr].strip() if node != '': result.append(node) result.append( items ) prev = _next if _len > prev: result.append(text[prev:].strip()) return result
Optimize from 5-6s to 2.9-3.0
Optimize from 5-6s to 2.9-3.0
Python
mit
diNard/Saw
import base from blocks import Blocks import re class Sentences(base.Base): _type = 'sentences' child_class = Blocks @staticmethod def parse(text): - #re.split('\!|\?|\. | \.',text) + _len = len(text) result = [] prev = 0 # we allow .09 as not end of sentences - #for m in re.finditer('[\!\?]+|\.+(?:\s+|$|\?|\!)', text): + for m in re.finditer('[\!\?\.]+', text): - for m in re.finditer('\.+(?:\s+|$)|(\.*)[\!\?]+(\.+(?:\s+|$))*', text): curr, _next = m.start(), m.end() + items = list( text[curr: _next].strip() ) + + if (_len > _next) and not (text[_next] == ' '): + # delete ending '.' if they not before space or end of string + while (len(items) > 0) and (items[-1] == '.'): + items.pop() + _next = _next - 1 + + if len(items) > 0: - # if prev position of delimiter < current - between exists text + # if prev position of delimiter < current - between exists text - # at least 1 symbol. + # at least 1 symbol. - if prev < curr: + if prev < curr: - node = text[prev:curr].strip() + node = text[prev:curr].strip() - if node != '': + if node != '': - result.append(node) + result.append(node) - result.append(list( text[curr:_next].strip() )) + result.append( items ) - prev = _next + prev = _next - if len(text) > prev: + if _len > prev: result.append(text[prev:].strip()) return result
Optimize from 5-6s to 2.9-3.0
## Code Before: import base from blocks import Blocks import re class Sentences(base.Base): _type = 'sentences' child_class = Blocks @staticmethod def parse(text): #re.split('\!|\?|\. | \.',text) result = [] prev = 0 # we allow .09 as not end of sentences #for m in re.finditer('[\!\?]+|\.+(?:\s+|$|\?|\!)', text): for m in re.finditer('\.+(?:\s+|$)|(\.*)[\!\?]+(\.+(?:\s+|$))*', text): curr, _next = m.start(), m.end() # if prev position of delimiter < current - between exists text # at least 1 symbol. if prev < curr: node = text[prev:curr].strip() if node != '': result.append(node) result.append(list( text[curr:_next].strip() )) prev = _next if len(text) > prev: result.append(text[prev:].strip()) return result ## Instruction: Optimize from 5-6s to 2.9-3.0 ## Code After: import base from blocks import Blocks import re class Sentences(base.Base): _type = 'sentences' child_class = Blocks @staticmethod def parse(text): _len = len(text) result = [] prev = 0 # we allow .09 as not end of sentences for m in re.finditer('[\!\?\.]+', text): curr, _next = m.start(), m.end() items = list( text[curr: _next].strip() ) if (_len > _next) and not (text[_next] == ' '): # delete ending '.' if they not before space or end of string while (len(items) > 0) and (items[-1] == '.'): items.pop() _next = _next - 1 if len(items) > 0: # if prev position of delimiter < current - between exists text # at least 1 symbol. if prev < curr: node = text[prev:curr].strip() if node != '': result.append(node) result.append( items ) prev = _next if _len > prev: result.append(text[prev:].strip()) return result
import base from blocks import Blocks import re class Sentences(base.Base): _type = 'sentences' child_class = Blocks @staticmethod def parse(text): - #re.split('\!|\?|\. | \.',text) + _len = len(text) result = [] prev = 0 # we allow .09 as not end of sentences - #for m in re.finditer('[\!\?]+|\.+(?:\s+|$|\?|\!)', text): ? - ------------------- + for m in re.finditer('[\!\?\.]+', text): ? ++ - for m in re.finditer('\.+(?:\s+|$)|(\.*)[\!\?]+(\.+(?:\s+|$))*', text): curr, _next = m.start(), m.end() + items = list( text[curr: _next].strip() ) + + if (_len > _next) and not (text[_next] == ' '): + # delete ending '.' if they not before space or end of string + while (len(items) > 0) and (items[-1] == '.'): + items.pop() + _next = _next - 1 + + if len(items) > 0: - # if prev position of delimiter < current - between exists text + # if prev position of delimiter < current - between exists text ? ++++ - # at least 1 symbol. + # at least 1 symbol. ? ++++ - if prev < curr: + if prev < curr: ? ++++ - node = text[prev:curr].strip() + node = text[prev:curr].strip() ? ++++ - if node != '': + if node != '': ? ++++ - result.append(node) + result.append(node) ? ++++ - result.append(list( text[curr:_next].strip() )) + result.append( items ) - prev = _next + prev = _next ? ++++ - if len(text) > prev: ? ------ + if _len > prev: ? + result.append(text[prev:].strip()) return result
754d2949ce0c2fa2b36615af755b3b8aaf9876b5
tests/test_resources.py
tests/test_resources.py
import pytest from micromanager.resources import Resource from micromanager.resources import BQDataset from micromanager.resources import Bucket from micromanager.resources import SQLInstance test_cases = [ ( {'resource_kind': 'storage#bucket'}, Bucket ), ( {'resource_kind': 'sql#instance'}, SQLInstance ), ( {'resource_kind': 'bigquery#dataset'}, BQDataset ) ] @pytest.mark.parametrize( "input,expected", test_cases, ids=[cls.__name__ for (_, cls) in test_cases]) def test_resource_factory(input, expected): r = Resource.factory(input) assert r.__class__ == expected def test_resource_factory_invalid(): with pytest.raises(AssertionError): r = Resource.factory({})
import pytest from .util import load_test_data from .util import discovery_cache from .mock import HttpMockSequenceEx from googleapiclient.http import HttpMockSequence from micromanager.resources import Resource from micromanager.resources.gcp import GcpBigqueryDataset from micromanager.resources.gcp import GcpComputeInstance from micromanager.resources.gcp import GcpSqlInstance from micromanager.resources.gcp import GcpStorageBucket from micromanager.resources.gcp import GcpStorageBucketIamPolicy test_cases = [ ( {'resource_type': 'bigquery.datasets', 'resource_name': '', 'project_id': ''}, GcpBigqueryDataset, 'gcp.bigquery.datasets' ), ( {'resource_type': 'compute.instances', 'resource_name': '', 'project_id': ''}, GcpComputeInstance, 'gcp.compute.instances' ), ( {'resource_type': 'sqladmin.instances', 'resource_name': '', 'project_id': ''}, GcpSqlInstance, 'gcp.sqladmin.instances' ), ( {'resource_type': 'storage.buckets', 'resource_name': '', 'project_id': ''}, GcpStorageBucket, 'gcp.storage.buckets' ), ( {'resource_type': 'storage.buckets.iam', 'resource_name': '', 'project_id': ''}, GcpStorageBucketIamPolicy, 'gcp.storage.buckets.iam' ) ] @pytest.mark.parametrize( "input,cls,rtype", test_cases, ids=[cls.__name__ for (_, cls, _) in test_cases]) def test_gcp_resource_factory(input, cls, rtype): r = Resource.factory("gcp", input) assert r.__class__ == cls assert r.type() == rtype def test_gcp_resource_factory_invalid(): with pytest.raises(AssertionError): r = Resource.factory('gcp', {})
Update tests after lots of work on Resources
Update tests after lots of work on Resources
Python
apache-2.0
forseti-security/resource-policy-evaluation-library
import pytest + from .util import load_test_data + from .util import discovery_cache + + from .mock import HttpMockSequenceEx + + from googleapiclient.http import HttpMockSequence + from micromanager.resources import Resource - from micromanager.resources import BQDataset + from micromanager.resources.gcp import GcpBigqueryDataset - from micromanager.resources import Bucket + from micromanager.resources.gcp import GcpComputeInstance - from micromanager.resources import SQLInstance + from micromanager.resources.gcp import GcpSqlInstance + from micromanager.resources.gcp import GcpStorageBucket + from micromanager.resources.gcp import GcpStorageBucketIamPolicy test_cases = [ ( - {'resource_kind': 'storage#bucket'}, - Bucket + {'resource_type': 'bigquery.datasets', 'resource_name': '', 'project_id': ''}, + GcpBigqueryDataset, + 'gcp.bigquery.datasets' ), ( - {'resource_kind': 'sql#instance'}, - SQLInstance + {'resource_type': 'compute.instances', 'resource_name': '', 'project_id': ''}, + GcpComputeInstance, + 'gcp.compute.instances' ), ( - {'resource_kind': 'bigquery#dataset'}, - BQDataset + {'resource_type': 'sqladmin.instances', 'resource_name': '', 'project_id': ''}, + GcpSqlInstance, + 'gcp.sqladmin.instances' + ), + ( + {'resource_type': 'storage.buckets', 'resource_name': '', 'project_id': ''}, + GcpStorageBucket, + 'gcp.storage.buckets' + ), + ( + {'resource_type': 'storage.buckets.iam', 'resource_name': '', 'project_id': ''}, + GcpStorageBucketIamPolicy, + 'gcp.storage.buckets.iam' ) ] @pytest.mark.parametrize( - "input,expected", + "input,cls,rtype", test_cases, - ids=[cls.__name__ for (_, cls) in test_cases]) + ids=[cls.__name__ for (_, cls, _) in test_cases]) - def test_resource_factory(input, expected): + def test_gcp_resource_factory(input, cls, rtype): - r = Resource.factory(input) + r = Resource.factory("gcp", input) - assert r.__class__ == expected + assert r.__class__ == cls + assert r.type() == rtype - def test_resource_factory_invalid(): + def test_gcp_resource_factory_invalid(): - with pytest.raises(AssertionError): + with pytest.raises(AssertionError): - r = Resource.factory({}) + r = Resource.factory('gcp', {})
Update tests after lots of work on Resources
## Code Before: import pytest from micromanager.resources import Resource from micromanager.resources import BQDataset from micromanager.resources import Bucket from micromanager.resources import SQLInstance test_cases = [ ( {'resource_kind': 'storage#bucket'}, Bucket ), ( {'resource_kind': 'sql#instance'}, SQLInstance ), ( {'resource_kind': 'bigquery#dataset'}, BQDataset ) ] @pytest.mark.parametrize( "input,expected", test_cases, ids=[cls.__name__ for (_, cls) in test_cases]) def test_resource_factory(input, expected): r = Resource.factory(input) assert r.__class__ == expected def test_resource_factory_invalid(): with pytest.raises(AssertionError): r = Resource.factory({}) ## Instruction: Update tests after lots of work on Resources ## Code After: import pytest from .util import load_test_data from .util import discovery_cache from .mock import HttpMockSequenceEx from googleapiclient.http import HttpMockSequence from micromanager.resources import Resource from micromanager.resources.gcp import GcpBigqueryDataset from micromanager.resources.gcp import GcpComputeInstance from micromanager.resources.gcp import GcpSqlInstance from micromanager.resources.gcp import GcpStorageBucket from micromanager.resources.gcp import GcpStorageBucketIamPolicy test_cases = [ ( {'resource_type': 'bigquery.datasets', 'resource_name': '', 'project_id': ''}, GcpBigqueryDataset, 'gcp.bigquery.datasets' ), ( {'resource_type': 'compute.instances', 'resource_name': '', 'project_id': ''}, GcpComputeInstance, 'gcp.compute.instances' ), ( {'resource_type': 'sqladmin.instances', 'resource_name': '', 'project_id': ''}, GcpSqlInstance, 'gcp.sqladmin.instances' ), ( {'resource_type': 'storage.buckets', 'resource_name': '', 'project_id': ''}, GcpStorageBucket, 'gcp.storage.buckets' ), ( {'resource_type': 'storage.buckets.iam', 'resource_name': '', 'project_id': ''}, GcpStorageBucketIamPolicy, 'gcp.storage.buckets.iam' ) ] @pytest.mark.parametrize( "input,cls,rtype", test_cases, ids=[cls.__name__ for (_, cls, _) in test_cases]) def test_gcp_resource_factory(input, cls, rtype): r = Resource.factory("gcp", input) assert r.__class__ == cls assert r.type() == rtype def test_gcp_resource_factory_invalid(): with pytest.raises(AssertionError): r = Resource.factory('gcp', {})
import pytest + from .util import load_test_data + from .util import discovery_cache + + from .mock import HttpMockSequenceEx + + from googleapiclient.http import HttpMockSequence + from micromanager.resources import Resource - from micromanager.resources import BQDataset ? ^ + from micromanager.resources.gcp import GcpBigqueryDataset ? ++++ +++ ^^^^^^^ - from micromanager.resources import Bucket ? ^ - - + from micromanager.resources.gcp import GcpComputeInstance ? ++++ ^^^^^^^ ++++++++ - from micromanager.resources import SQLInstance ? ^^ + from micromanager.resources.gcp import GcpSqlInstance ? ++++ +++ ^^ + from micromanager.resources.gcp import GcpStorageBucket + from micromanager.resources.gcp import GcpStorageBucketIamPolicy test_cases = [ ( - {'resource_kind': 'storage#bucket'}, - Bucket + {'resource_type': 'bigquery.datasets', 'resource_name': '', 'project_id': ''}, + GcpBigqueryDataset, + 'gcp.bigquery.datasets' ), ( - {'resource_kind': 'sql#instance'}, - SQLInstance + {'resource_type': 'compute.instances', 'resource_name': '', 'project_id': ''}, + GcpComputeInstance, + 'gcp.compute.instances' ), ( - {'resource_kind': 'bigquery#dataset'}, - BQDataset + {'resource_type': 'sqladmin.instances', 'resource_name': '', 'project_id': ''}, + GcpSqlInstance, + 'gcp.sqladmin.instances' + ), + ( + {'resource_type': 'storage.buckets', 'resource_name': '', 'project_id': ''}, + GcpStorageBucket, + 'gcp.storage.buckets' + ), + ( + {'resource_type': 'storage.buckets.iam', 'resource_name': '', 'project_id': ''}, + GcpStorageBucketIamPolicy, + 'gcp.storage.buckets.iam' ) ] @pytest.mark.parametrize( - "input,expected", + "input,cls,rtype", test_cases, - ids=[cls.__name__ for (_, cls) in test_cases]) + ids=[cls.__name__ for (_, cls, _) in test_cases]) ? +++ - def test_resource_factory(input, expected): ? ^^ ---- + def test_gcp_resource_factory(input, cls, rtype): ? ++++ ^^^^^^^^ - r = Resource.factory(input) ? ---- + r = Resource.factory("gcp", input) ? +++++++ - assert r.__class__ == expected ? ---- ---- ^^^ + assert r.__class__ == cls ? ^^ + assert r.type() == rtype - def test_resource_factory_invalid(): + def test_gcp_resource_factory_invalid(): ? ++++ - with pytest.raises(AssertionError): ? ---- + with pytest.raises(AssertionError): - r = Resource.factory({}) ? ---- + r = Resource.factory('gcp', {}) ? +++++++
d9226d778a831d6d9f9f8d7645869245d0757754
tests/integration/test_cli.py
tests/integration/test_cli.py
import os import subprocess import pytest from chalice.utils import OSUtils CURRENT_DIR = os.path.dirname(os.path.abspath(__file__)) PROJECT_DIR = os.path.join(CURRENT_DIR, 'testapp') @pytest.fixture def local_app(tmpdir): temp_dir_path = str(tmpdir) OSUtils().copytree(PROJECT_DIR, temp_dir_path) old_dir = os.getcwd() try: os.chdir(temp_dir_path) yield temp_dir_path finally: os.chdir(old_dir) def test_stack_trace_printed_on_error(local_app): app_file = os.path.join(local_app, 'app.py') with open(app_file, 'w') as f: f.write( 'from chalice import Chalice\n' 'app = Chalice(app_name="test")\n' 'foobarbaz\n' ) p = subprocess.Popen(['chalice', 'local'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) stderr = p.communicate()[1].decode('ascii') rc = p.returncode assert rc == 2 assert 'Traceback' in stderr assert 'foobarbaz' in stderr
import os import subprocess import pytest from chalice.utils import OSUtils CURRENT_DIR = os.path.dirname(os.path.abspath(__file__)) PROJECT_DIR = os.path.join(CURRENT_DIR, 'testapp') @pytest.fixture def local_app(tmpdir): temp_dir_path = str(tmpdir) OSUtils().copytree(PROJECT_DIR, temp_dir_path) old_dir = os.getcwd() try: os.chdir(temp_dir_path) yield temp_dir_path finally: os.chdir(old_dir) def test_stack_trace_printed_on_error(local_app): app_file = os.path.join(local_app, 'app.py') with open(app_file, 'w') as f: f.write( 'from chalice import Chalice\n' 'app = Chalice(app_name="test")\n' 'foobarbaz\n' ) p = subprocess.Popen(['chalice', 'local', '--no-autoreload'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) stderr = p.communicate()[1].decode('ascii') rc = p.returncode assert rc == 2 assert 'Traceback' in stderr assert 'foobarbaz' in stderr
Disable autoreload in integration tests
Disable autoreload in integration tests
Python
apache-2.0
awslabs/chalice
import os import subprocess import pytest from chalice.utils import OSUtils CURRENT_DIR = os.path.dirname(os.path.abspath(__file__)) PROJECT_DIR = os.path.join(CURRENT_DIR, 'testapp') @pytest.fixture def local_app(tmpdir): temp_dir_path = str(tmpdir) OSUtils().copytree(PROJECT_DIR, temp_dir_path) old_dir = os.getcwd() try: os.chdir(temp_dir_path) yield temp_dir_path finally: os.chdir(old_dir) def test_stack_trace_printed_on_error(local_app): app_file = os.path.join(local_app, 'app.py') with open(app_file, 'w') as f: f.write( 'from chalice import Chalice\n' 'app = Chalice(app_name="test")\n' 'foobarbaz\n' ) - p = subprocess.Popen(['chalice', 'local'], + p = subprocess.Popen(['chalice', 'local', '--no-autoreload'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) stderr = p.communicate()[1].decode('ascii') rc = p.returncode assert rc == 2 assert 'Traceback' in stderr assert 'foobarbaz' in stderr
Disable autoreload in integration tests
## Code Before: import os import subprocess import pytest from chalice.utils import OSUtils CURRENT_DIR = os.path.dirname(os.path.abspath(__file__)) PROJECT_DIR = os.path.join(CURRENT_DIR, 'testapp') @pytest.fixture def local_app(tmpdir): temp_dir_path = str(tmpdir) OSUtils().copytree(PROJECT_DIR, temp_dir_path) old_dir = os.getcwd() try: os.chdir(temp_dir_path) yield temp_dir_path finally: os.chdir(old_dir) def test_stack_trace_printed_on_error(local_app): app_file = os.path.join(local_app, 'app.py') with open(app_file, 'w') as f: f.write( 'from chalice import Chalice\n' 'app = Chalice(app_name="test")\n' 'foobarbaz\n' ) p = subprocess.Popen(['chalice', 'local'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) stderr = p.communicate()[1].decode('ascii') rc = p.returncode assert rc == 2 assert 'Traceback' in stderr assert 'foobarbaz' in stderr ## Instruction: Disable autoreload in integration tests ## Code After: import os import subprocess import pytest from chalice.utils import OSUtils CURRENT_DIR = os.path.dirname(os.path.abspath(__file__)) PROJECT_DIR = os.path.join(CURRENT_DIR, 'testapp') @pytest.fixture def local_app(tmpdir): temp_dir_path = str(tmpdir) OSUtils().copytree(PROJECT_DIR, temp_dir_path) old_dir = os.getcwd() try: os.chdir(temp_dir_path) yield temp_dir_path finally: os.chdir(old_dir) def test_stack_trace_printed_on_error(local_app): app_file = os.path.join(local_app, 'app.py') with open(app_file, 'w') as f: f.write( 'from chalice import Chalice\n' 'app = Chalice(app_name="test")\n' 'foobarbaz\n' ) p = subprocess.Popen(['chalice', 'local', '--no-autoreload'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) stderr = p.communicate()[1].decode('ascii') rc = p.returncode assert rc == 2 assert 'Traceback' in stderr assert 'foobarbaz' in stderr
import os import subprocess import pytest from chalice.utils import OSUtils CURRENT_DIR = os.path.dirname(os.path.abspath(__file__)) PROJECT_DIR = os.path.join(CURRENT_DIR, 'testapp') @pytest.fixture def local_app(tmpdir): temp_dir_path = str(tmpdir) OSUtils().copytree(PROJECT_DIR, temp_dir_path) old_dir = os.getcwd() try: os.chdir(temp_dir_path) yield temp_dir_path finally: os.chdir(old_dir) def test_stack_trace_printed_on_error(local_app): app_file = os.path.join(local_app, 'app.py') with open(app_file, 'w') as f: f.write( 'from chalice import Chalice\n' 'app = Chalice(app_name="test")\n' 'foobarbaz\n' ) - p = subprocess.Popen(['chalice', 'local'], + p = subprocess.Popen(['chalice', 'local', '--no-autoreload'], ? +++++++++++++++++++ stdout=subprocess.PIPE, stderr=subprocess.PIPE) stderr = p.communicate()[1].decode('ascii') rc = p.returncode assert rc == 2 assert 'Traceback' in stderr assert 'foobarbaz' in stderr
bcc6d199186953b5ae05f7e93bf61c169ac89c77
opps/archives/admin.py
opps/archives/admin.py
from django.contrib import admin from django.contrib.auth import get_user_model from django.utils import timezone from django.utils.translation import ugettext_lazy as _ from opps.core.admin import apply_opps_rules from opps.contrib.multisite.admin import AdminViewPermission from .models import File @apply_opps_rules('archives') class FileAdmin(AdminViewPermission): search_fields = ['title', 'slug'] raw_id_fields = ['user'] ordering = ('-date_available',) list_filter = ['date_available', 'published'] prepopulated_fields = {"slug": ["title"]} fieldsets = ( (_(u'Identification'), { 'fields': ('site', 'title', 'slug',)}), (_(u'Content'), { 'fields': ('description', 'archive', 'archive_link', 'tags')}), (_(u'Publication'), { 'classes': ('extrapretty'), 'fields': ('published', 'date_available',)}), ) def save_model(self, request, obj, form, change): if not change: obj.user = get_user_model().objects.get(pk=request.user.pk) obj.date_insert = timezone.now() obj.date_update = timezone.now() obj.save() admin.site.register(File, FileAdmin)
from django.contrib import admin from django.contrib.auth import get_user_model from django.utils import timezone from django.utils.translation import ugettext_lazy as _ from opps.core.admin import apply_opps_rules from opps.contrib.multisite.admin import AdminViewPermission from .models import File @apply_opps_rules('archives') class FileAdmin(AdminViewPermission): search_fields = ['title', 'slug'] raw_id_fields = ['user'] list_display = ['title', 'slug', 'download_link', 'published'] ordering = ('-date_available',) list_filter = ['date_available', 'published'] prepopulated_fields = {"slug": ["title"]} fieldsets = ( (_(u'Identification'), { 'fields': ('site', 'title', 'slug',)}), (_(u'Content'), { 'fields': ('description', 'archive', 'archive_link', 'tags')}), (_(u'Publication'), { 'classes': ('extrapretty'), 'fields': ('published', 'date_available',)}), ) def download_link(self, obj): html = '<a href="{}">{}</a>'.format(obj.archive.url, unicode(_(u'Download'))) return html download_link.short_description = _(u'download') download_link.allow_tags = True def save_model(self, request, obj, form, change): if not change: obj.user = get_user_model().objects.get(pk=request.user.pk) obj.date_insert = timezone.now() obj.date_update = timezone.now() obj.save() admin.site.register(File, FileAdmin)
Add list_display on FileAdmin and download_link def
Add list_display on FileAdmin and download_link def
Python
mit
YACOWS/opps,opps/opps,opps/opps,jeanmask/opps,williamroot/opps,jeanmask/opps,opps/opps,YACOWS/opps,YACOWS/opps,williamroot/opps,williamroot/opps,YACOWS/opps,jeanmask/opps,jeanmask/opps,williamroot/opps,opps/opps
from django.contrib import admin from django.contrib.auth import get_user_model from django.utils import timezone from django.utils.translation import ugettext_lazy as _ from opps.core.admin import apply_opps_rules from opps.contrib.multisite.admin import AdminViewPermission from .models import File @apply_opps_rules('archives') class FileAdmin(AdminViewPermission): search_fields = ['title', 'slug'] raw_id_fields = ['user'] + list_display = ['title', 'slug', 'download_link', 'published'] ordering = ('-date_available',) list_filter = ['date_available', 'published'] prepopulated_fields = {"slug": ["title"]} fieldsets = ( (_(u'Identification'), { 'fields': ('site', 'title', 'slug',)}), (_(u'Content'), { 'fields': ('description', 'archive', 'archive_link', 'tags')}), (_(u'Publication'), { 'classes': ('extrapretty'), 'fields': ('published', 'date_available',)}), ) + def download_link(self, obj): + html = '<a href="{}">{}</a>'.format(obj.archive.url, + unicode(_(u'Download'))) + return html + download_link.short_description = _(u'download') + download_link.allow_tags = True + def save_model(self, request, obj, form, change): if not change: obj.user = get_user_model().objects.get(pk=request.user.pk) obj.date_insert = timezone.now() obj.date_update = timezone.now() obj.save() admin.site.register(File, FileAdmin)
Add list_display on FileAdmin and download_link def
## Code Before: from django.contrib import admin from django.contrib.auth import get_user_model from django.utils import timezone from django.utils.translation import ugettext_lazy as _ from opps.core.admin import apply_opps_rules from opps.contrib.multisite.admin import AdminViewPermission from .models import File @apply_opps_rules('archives') class FileAdmin(AdminViewPermission): search_fields = ['title', 'slug'] raw_id_fields = ['user'] ordering = ('-date_available',) list_filter = ['date_available', 'published'] prepopulated_fields = {"slug": ["title"]} fieldsets = ( (_(u'Identification'), { 'fields': ('site', 'title', 'slug',)}), (_(u'Content'), { 'fields': ('description', 'archive', 'archive_link', 'tags')}), (_(u'Publication'), { 'classes': ('extrapretty'), 'fields': ('published', 'date_available',)}), ) def save_model(self, request, obj, form, change): if not change: obj.user = get_user_model().objects.get(pk=request.user.pk) obj.date_insert = timezone.now() obj.date_update = timezone.now() obj.save() admin.site.register(File, FileAdmin) ## Instruction: Add list_display on FileAdmin and download_link def ## Code After: from django.contrib import admin from django.contrib.auth import get_user_model from django.utils import timezone from django.utils.translation import ugettext_lazy as _ from opps.core.admin import apply_opps_rules from opps.contrib.multisite.admin import AdminViewPermission from .models import File @apply_opps_rules('archives') class FileAdmin(AdminViewPermission): search_fields = ['title', 'slug'] raw_id_fields = ['user'] list_display = ['title', 'slug', 'download_link', 'published'] ordering = ('-date_available',) list_filter = ['date_available', 'published'] prepopulated_fields = {"slug": ["title"]} fieldsets = ( (_(u'Identification'), { 'fields': ('site', 'title', 'slug',)}), (_(u'Content'), { 'fields': ('description', 'archive', 'archive_link', 'tags')}), (_(u'Publication'), { 'classes': ('extrapretty'), 'fields': ('published', 'date_available',)}), ) def download_link(self, obj): html = '<a href="{}">{}</a>'.format(obj.archive.url, unicode(_(u'Download'))) return html download_link.short_description = _(u'download') download_link.allow_tags = True def save_model(self, request, obj, form, change): if not change: obj.user = get_user_model().objects.get(pk=request.user.pk) obj.date_insert = timezone.now() obj.date_update = timezone.now() obj.save() admin.site.register(File, FileAdmin)
from django.contrib import admin from django.contrib.auth import get_user_model from django.utils import timezone from django.utils.translation import ugettext_lazy as _ from opps.core.admin import apply_opps_rules from opps.contrib.multisite.admin import AdminViewPermission from .models import File @apply_opps_rules('archives') class FileAdmin(AdminViewPermission): search_fields = ['title', 'slug'] raw_id_fields = ['user'] + list_display = ['title', 'slug', 'download_link', 'published'] ordering = ('-date_available',) list_filter = ['date_available', 'published'] prepopulated_fields = {"slug": ["title"]} fieldsets = ( (_(u'Identification'), { 'fields': ('site', 'title', 'slug',)}), (_(u'Content'), { 'fields': ('description', 'archive', 'archive_link', 'tags')}), (_(u'Publication'), { 'classes': ('extrapretty'), 'fields': ('published', 'date_available',)}), ) + def download_link(self, obj): + html = '<a href="{}">{}</a>'.format(obj.archive.url, + unicode(_(u'Download'))) + return html + download_link.short_description = _(u'download') + download_link.allow_tags = True + def save_model(self, request, obj, form, change): if not change: obj.user = get_user_model().objects.get(pk=request.user.pk) obj.date_insert = timezone.now() obj.date_update = timezone.now() obj.save() admin.site.register(File, FileAdmin)
b7b67a0327feddc977a404178aae03e47947dd20
bluebottle/bluebottle_drf2/pagination.py
bluebottle/bluebottle_drf2/pagination.py
from rest_framework.pagination import PageNumberPagination class BluebottlePagination(PageNumberPagination): page_size = 10
from rest_framework.pagination import PageNumberPagination class BluebottlePagination(PageNumberPagination): page_size = 10 page_size_query_param = 'page_size'
Make it possible to send a page_size parameter to all paged endpoints.
Make it possible to send a page_size parameter to all paged endpoints. BB-9512 #resolve
Python
bsd-3-clause
onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle
from rest_framework.pagination import PageNumberPagination class BluebottlePagination(PageNumberPagination): page_size = 10 + page_size_query_param = 'page_size'
Make it possible to send a page_size parameter to all paged endpoints.
## Code Before: from rest_framework.pagination import PageNumberPagination class BluebottlePagination(PageNumberPagination): page_size = 10 ## Instruction: Make it possible to send a page_size parameter to all paged endpoints. ## Code After: from rest_framework.pagination import PageNumberPagination class BluebottlePagination(PageNumberPagination): page_size = 10 page_size_query_param = 'page_size'
from rest_framework.pagination import PageNumberPagination class BluebottlePagination(PageNumberPagination): page_size = 10 + page_size_query_param = 'page_size'
34c38e0cfe5e880e678704c4d473f082787fca64
rest_framework/authtoken/management/commands/drf_create_token.py
rest_framework/authtoken/management/commands/drf_create_token.py
from django.contrib.auth import get_user_model from django.core.management.base import BaseCommand from rest_framework.authtoken.models import Token UserModel = get_user_model() class Command(BaseCommand): help = 'Create DRF Token for a given user' def create_user_token(self, username): user = UserModel._default_manager.get_by_natural_key(username) token = Token.objects.get_or_create(user=user) return token[0] def add_arguments(self, parser): parser.add_argument('username', type=str, nargs='+') def handle(self, *args, **options): username = options['username'] try: token = self.create_user_token(username) except UserModel.DoesNotExist: print('Cannot create the Token: user {0} does not exist'.format( username )) print('Generated token {0} for user {1}'.format(token.key, username))
from django.contrib.auth import get_user_model from django.core.management.base import BaseCommand, CommandError from rest_framework.authtoken.models import Token UserModel = get_user_model() class Command(BaseCommand): help = 'Create DRF Token for a given user' def create_user_token(self, username): user = UserModel._default_manager.get_by_natural_key(username) token = Token.objects.get_or_create(user=user) return token[0] def add_arguments(self, parser): parser.add_argument('username', type=str, nargs='+') def handle(self, *args, **options): username = options['username'] try: token = self.create_user_token(username) except UserModel.DoesNotExist: raise CommandError( 'Cannot create the Token: user {0} does not exist'.format( username) ) self.stdout.write( 'Generated token {0} for user {1}'.format(token.key, username))
Use self.sdtout and CommandError to print output
Use self.sdtout and CommandError to print output
Python
bsd-2-clause
dmwyatt/django-rest-framework,ossanna16/django-rest-framework,jpadilla/django-rest-framework,tomchristie/django-rest-framework,jpadilla/django-rest-framework,kgeorgy/django-rest-framework,kgeorgy/django-rest-framework,tomchristie/django-rest-framework,tomchristie/django-rest-framework,ossanna16/django-rest-framework,jpadilla/django-rest-framework,kgeorgy/django-rest-framework,ossanna16/django-rest-framework,davesque/django-rest-framework,davesque/django-rest-framework,davesque/django-rest-framework,dmwyatt/django-rest-framework,dmwyatt/django-rest-framework
from django.contrib.auth import get_user_model - from django.core.management.base import BaseCommand + from django.core.management.base import BaseCommand, CommandError from rest_framework.authtoken.models import Token UserModel = get_user_model() class Command(BaseCommand): help = 'Create DRF Token for a given user' def create_user_token(self, username): user = UserModel._default_manager.get_by_natural_key(username) token = Token.objects.get_or_create(user=user) return token[0] def add_arguments(self, parser): parser.add_argument('username', type=str, nargs='+') def handle(self, *args, **options): username = options['username'] try: token = self.create_user_token(username) except UserModel.DoesNotExist: + raise CommandError( - print('Cannot create the Token: user {0} does not exist'.format( + 'Cannot create the Token: user {0} does not exist'.format( - username + username) - )) + ) + self.stdout.write( - print('Generated token {0} for user {1}'.format(token.key, username)) + 'Generated token {0} for user {1}'.format(token.key, username))
Use self.sdtout and CommandError to print output
## Code Before: from django.contrib.auth import get_user_model from django.core.management.base import BaseCommand from rest_framework.authtoken.models import Token UserModel = get_user_model() class Command(BaseCommand): help = 'Create DRF Token for a given user' def create_user_token(self, username): user = UserModel._default_manager.get_by_natural_key(username) token = Token.objects.get_or_create(user=user) return token[0] def add_arguments(self, parser): parser.add_argument('username', type=str, nargs='+') def handle(self, *args, **options): username = options['username'] try: token = self.create_user_token(username) except UserModel.DoesNotExist: print('Cannot create the Token: user {0} does not exist'.format( username )) print('Generated token {0} for user {1}'.format(token.key, username)) ## Instruction: Use self.sdtout and CommandError to print output ## Code After: from django.contrib.auth import get_user_model from django.core.management.base import BaseCommand, CommandError from rest_framework.authtoken.models import Token UserModel = get_user_model() class Command(BaseCommand): help = 'Create DRF Token for a given user' def create_user_token(self, username): user = UserModel._default_manager.get_by_natural_key(username) token = Token.objects.get_or_create(user=user) return token[0] def add_arguments(self, parser): parser.add_argument('username', type=str, nargs='+') def handle(self, *args, **options): username = options['username'] try: token = self.create_user_token(username) except UserModel.DoesNotExist: raise CommandError( 'Cannot create the Token: user {0} does not exist'.format( username) ) self.stdout.write( 'Generated token {0} for user {1}'.format(token.key, username))
from django.contrib.auth import get_user_model - from django.core.management.base import BaseCommand + from django.core.management.base import BaseCommand, CommandError ? ++++++++++++++ from rest_framework.authtoken.models import Token UserModel = get_user_model() class Command(BaseCommand): help = 'Create DRF Token for a given user' def create_user_token(self, username): user = UserModel._default_manager.get_by_natural_key(username) token = Token.objects.get_or_create(user=user) return token[0] def add_arguments(self, parser): parser.add_argument('username', type=str, nargs='+') def handle(self, *args, **options): username = options['username'] try: token = self.create_user_token(username) except UserModel.DoesNotExist: + raise CommandError( - print('Cannot create the Token: user {0} does not exist'.format( ? ^^^^^^ + 'Cannot create the Token: user {0} does not exist'.format( ? ^^^^ - username + username) ? ++++ + - )) ? - + ) + self.stdout.write( - print('Generated token {0} for user {1}'.format(token.key, username)) ? ^^^^^^ + 'Generated token {0} for user {1}'.format(token.key, username)) ? ^^^^
26e16c6229f12ca75c4bbf224eb9d1cf3b250b9c
rock/utils.py
rock/utils.py
import StringIO import os from rock.exceptions import ConfigError ROCK_SHELL = os.environ.get('ROCK_SHELL', '/bin/bash -l -c').split() class Shell(object): def __init__(self): self.stdin = StringIO.StringIO() def __enter__(self): return self def __exit__(self, type, value, traceback): self.run() def run(self): if not os.path.isfile(ROCK_SHELL[0]) or not os.access(ROCK_SHELL[0], os.X_OK): raise ConfigError('invalid ROCK_SHELL: %s' % ROCK_SHELL) os.execl(*(ROCK_SHELL + [self.stdin.getvalue()])) def write(self, text): self.stdin.write(text + '\n')
import StringIO import os from rock.exceptions import ConfigError ROCK_SHELL = os.environ.get('ROCK_SHELL', '/bin/bash -l -c').split() def isexecutable(path): return os.path.isfile(path) and os.access(path, os.X_OK) class Shell(object): def __init__(self): self.stdin = StringIO.StringIO() def __enter__(self): return self def __exit__(self, type, value, traceback): self.run() def run(self): if not isexecutable(ROCK_SHELL[0]): raise ConfigError('invalid ROCK_SHELL: %s' % ROCK_SHELL) os.execl(*(ROCK_SHELL + [self.stdin.getvalue()])) def write(self, text): self.stdin.write(text + '\n')
Split isexecutable into its own function
Split isexecutable into its own function
Python
mit
silas/rock,silas/rock,silas/rock,silas/rock,silas/rock,silas/rock,silas/rock,silas/rock
import StringIO import os from rock.exceptions import ConfigError ROCK_SHELL = os.environ.get('ROCK_SHELL', '/bin/bash -l -c').split() + + + def isexecutable(path): + return os.path.isfile(path) and os.access(path, os.X_OK) class Shell(object): def __init__(self): self.stdin = StringIO.StringIO() def __enter__(self): return self def __exit__(self, type, value, traceback): self.run() def run(self): - if not os.path.isfile(ROCK_SHELL[0]) or not os.access(ROCK_SHELL[0], os.X_OK): + if not isexecutable(ROCK_SHELL[0]): raise ConfigError('invalid ROCK_SHELL: %s' % ROCK_SHELL) os.execl(*(ROCK_SHELL + [self.stdin.getvalue()])) def write(self, text): self.stdin.write(text + '\n')
Split isexecutable into its own function
## Code Before: import StringIO import os from rock.exceptions import ConfigError ROCK_SHELL = os.environ.get('ROCK_SHELL', '/bin/bash -l -c').split() class Shell(object): def __init__(self): self.stdin = StringIO.StringIO() def __enter__(self): return self def __exit__(self, type, value, traceback): self.run() def run(self): if not os.path.isfile(ROCK_SHELL[0]) or not os.access(ROCK_SHELL[0], os.X_OK): raise ConfigError('invalid ROCK_SHELL: %s' % ROCK_SHELL) os.execl(*(ROCK_SHELL + [self.stdin.getvalue()])) def write(self, text): self.stdin.write(text + '\n') ## Instruction: Split isexecutable into its own function ## Code After: import StringIO import os from rock.exceptions import ConfigError ROCK_SHELL = os.environ.get('ROCK_SHELL', '/bin/bash -l -c').split() def isexecutable(path): return os.path.isfile(path) and os.access(path, os.X_OK) class Shell(object): def __init__(self): self.stdin = StringIO.StringIO() def __enter__(self): return self def __exit__(self, type, value, traceback): self.run() def run(self): if not isexecutable(ROCK_SHELL[0]): raise ConfigError('invalid ROCK_SHELL: %s' % ROCK_SHELL) os.execl(*(ROCK_SHELL + [self.stdin.getvalue()])) def write(self, text): self.stdin.write(text + '\n')
import StringIO import os from rock.exceptions import ConfigError ROCK_SHELL = os.environ.get('ROCK_SHELL', '/bin/bash -l -c').split() + + + def isexecutable(path): + return os.path.isfile(path) and os.access(path, os.X_OK) class Shell(object): def __init__(self): self.stdin = StringIO.StringIO() def __enter__(self): return self def __exit__(self, type, value, traceback): self.run() def run(self): - if not os.path.isfile(ROCK_SHELL[0]) or not os.access(ROCK_SHELL[0], os.X_OK): + if not isexecutable(ROCK_SHELL[0]): raise ConfigError('invalid ROCK_SHELL: %s' % ROCK_SHELL) os.execl(*(ROCK_SHELL + [self.stdin.getvalue()])) def write(self, text): self.stdin.write(text + '\n')
072d5bf150ff3f8d743a84c636929e7a326bf8ea
src/python/tensorflow_cloud/tuner/constants.py
src/python/tensorflow_cloud/tuner/constants.py
"""Constants definitions for tuner sub module.""" # API definition of Cloud AI Platform Optimizer service OPTIMIZER_API_DOCUMENT_FILE = "api/ml_public_google_rest_v1.json" # By default, the Tuner worker(s) always requests one trial at a time because # we would parallelize the tuning loop themselves as opposed to getting multiple # trial suggestions in one tuning loop. SUGGESTION_COUNT_PER_REQUEST = 1 # Number of tries to retry getting study if it was already created NUM_TRIES_FOR_STUDIES = 3
"""Constants definitions for tuner sub module.""" import os # API definition of Cloud AI Platform Optimizer service OPTIMIZER_API_DOCUMENT_FILE = os.path.join( os.path.dirname(os.path.abspath(__file__)), "api/ml_public_google_rest_v1.json") # By default, the Tuner worker(s) always requests one trial at a time because # we would parallelize the tuning loop themselves as opposed to getting multiple # trial suggestions in one tuning loop. SUGGESTION_COUNT_PER_REQUEST = 1 # Number of tries to retry getting study if it was already created NUM_TRIES_FOR_STUDIES = 3
Fix path to API doc
Fix path to API doc
Python
apache-2.0
tensorflow/cloud,tensorflow/cloud
"""Constants definitions for tuner sub module.""" + import os + # API definition of Cloud AI Platform Optimizer service - OPTIMIZER_API_DOCUMENT_FILE = "api/ml_public_google_rest_v1.json" + OPTIMIZER_API_DOCUMENT_FILE = os.path.join( + os.path.dirname(os.path.abspath(__file__)), + "api/ml_public_google_rest_v1.json") # By default, the Tuner worker(s) always requests one trial at a time because # we would parallelize the tuning loop themselves as opposed to getting multiple # trial suggestions in one tuning loop. SUGGESTION_COUNT_PER_REQUEST = 1 # Number of tries to retry getting study if it was already created NUM_TRIES_FOR_STUDIES = 3
Fix path to API doc
## Code Before: """Constants definitions for tuner sub module.""" # API definition of Cloud AI Platform Optimizer service OPTIMIZER_API_DOCUMENT_FILE = "api/ml_public_google_rest_v1.json" # By default, the Tuner worker(s) always requests one trial at a time because # we would parallelize the tuning loop themselves as opposed to getting multiple # trial suggestions in one tuning loop. SUGGESTION_COUNT_PER_REQUEST = 1 # Number of tries to retry getting study if it was already created NUM_TRIES_FOR_STUDIES = 3 ## Instruction: Fix path to API doc ## Code After: """Constants definitions for tuner sub module.""" import os # API definition of Cloud AI Platform Optimizer service OPTIMIZER_API_DOCUMENT_FILE = os.path.join( os.path.dirname(os.path.abspath(__file__)), "api/ml_public_google_rest_v1.json") # By default, the Tuner worker(s) always requests one trial at a time because # we would parallelize the tuning loop themselves as opposed to getting multiple # trial suggestions in one tuning loop. SUGGESTION_COUNT_PER_REQUEST = 1 # Number of tries to retry getting study if it was already created NUM_TRIES_FOR_STUDIES = 3
"""Constants definitions for tuner sub module.""" + import os + # API definition of Cloud AI Platform Optimizer service - OPTIMIZER_API_DOCUMENT_FILE = "api/ml_public_google_rest_v1.json" + OPTIMIZER_API_DOCUMENT_FILE = os.path.join( + os.path.dirname(os.path.abspath(__file__)), + "api/ml_public_google_rest_v1.json") # By default, the Tuner worker(s) always requests one trial at a time because # we would parallelize the tuning loop themselves as opposed to getting multiple # trial suggestions in one tuning loop. SUGGESTION_COUNT_PER_REQUEST = 1 # Number of tries to retry getting study if it was already created NUM_TRIES_FOR_STUDIES = 3
d6a67a94cacab93463f2a15fc5d2a2fadae2ad83
site/tests/test_unittest.py
site/tests/test_unittest.py
import unittest class IntegerArithmenticTestCase(unittest.TestCase): def testAdd(self): ## test method names begin 'test*' self.assertEqual((1 + 2), 3) self.assertEqual(0 + 1, 1) def testMultiply(self): self.assertEqual((0 * 10), 0) self.assertEqual((5 * 8), 40) unittest.main(exit=False)
import unittest class IntegerArithmeticTestCase(unittest.TestCase): def testAdd(self): ## test method names begin 'test*' self.assertEqual((1 + 2), 3) self.assertEqual(0 + 1, 1) def testMultiply(self): self.assertEqual((0 * 10), 0) self.assertEqual((5 * 8), 40) suite = unittest.TestLoader().loadTestsFromTestCase(IntegerArithmeticTestCase) unittest.TextTestRunner(verbosity=0).run(suite)
Change unittest test in test suite : it is not run in module __main__
Change unittest test in test suite : it is not run in module __main__
Python
bsd-3-clause
Hasimir/brython,olemis/brython,JohnDenker/brython,firmlyjin/brython,Isendir/brython,jonathanverner/brython,Mozhuowen/brython,olemis/brython,brython-dev/brython,Lh4cKg/brython,Isendir/brython,kevinmel2000/brython,amrdraz/brython,jonathanverner/brython,molebot/brython,Mozhuowen/brython,jonathanverner/brython,JohnDenker/brython,olemis/brython,jonathanverner/brython,Hasimir/brython,kikocorreoso/brython,brython-dev/brython,Hasimir/brython,firmlyjin/brython,kevinmel2000/brython,Lh4cKg/brython,molebot/brython,amrdraz/brython,rubyinhell/brython,kikocorreoso/brython,Mozhuowen/brython,Isendir/brython,firmlyjin/brython,kevinmel2000/brython,Mozhuowen/brython,firmlyjin/brython,JohnDenker/brython,olemis/brython,molebot/brython,amrdraz/brython,brython-dev/brython,Isendir/brython,JohnDenker/brython,rubyinhell/brython,kikocorreoso/brython,firmlyjin/brython,Hasimir/brython,kevinmel2000/brython,molebot/brython,Lh4cKg/brython,rubyinhell/brython,rubyinhell/brython,amrdraz/brython,Lh4cKg/brython
import unittest - class IntegerArithmenticTestCase(unittest.TestCase): + class IntegerArithmeticTestCase(unittest.TestCase): def testAdd(self): ## test method names begin 'test*' self.assertEqual((1 + 2), 3) self.assertEqual(0 + 1, 1) def testMultiply(self): self.assertEqual((0 * 10), 0) self.assertEqual((5 * 8), 40) - unittest.main(exit=False) + suite = unittest.TestLoader().loadTestsFromTestCase(IntegerArithmeticTestCase) + unittest.TextTestRunner(verbosity=0).run(suite)
Change unittest test in test suite : it is not run in module __main__
## Code Before: import unittest class IntegerArithmenticTestCase(unittest.TestCase): def testAdd(self): ## test method names begin 'test*' self.assertEqual((1 + 2), 3) self.assertEqual(0 + 1, 1) def testMultiply(self): self.assertEqual((0 * 10), 0) self.assertEqual((5 * 8), 40) unittest.main(exit=False) ## Instruction: Change unittest test in test suite : it is not run in module __main__ ## Code After: import unittest class IntegerArithmeticTestCase(unittest.TestCase): def testAdd(self): ## test method names begin 'test*' self.assertEqual((1 + 2), 3) self.assertEqual(0 + 1, 1) def testMultiply(self): self.assertEqual((0 * 10), 0) self.assertEqual((5 * 8), 40) suite = unittest.TestLoader().loadTestsFromTestCase(IntegerArithmeticTestCase) unittest.TextTestRunner(verbosity=0).run(suite)
import unittest - class IntegerArithmenticTestCase(unittest.TestCase): ? - + class IntegerArithmeticTestCase(unittest.TestCase): def testAdd(self): ## test method names begin 'test*' self.assertEqual((1 + 2), 3) self.assertEqual(0 + 1, 1) def testMultiply(self): self.assertEqual((0 * 10), 0) self.assertEqual((5 * 8), 40) - unittest.main(exit=False) + suite = unittest.TestLoader().loadTestsFromTestCase(IntegerArithmeticTestCase) + unittest.TextTestRunner(verbosity=0).run(suite)
0e8bd8248cc649637b7c392616887c50986427a0
telethon/__init__.py
telethon/__init__.py
from .client.telegramclient import TelegramClient from .network import connection from .tl import types, functions, custom from .tl.custom import Button from . import version, events, utils, errors __version__ = version.__version__ __all__ = [ 'TelegramClient', 'Button', 'types', 'functions', 'custom', 'errors', 'events', 'utils', 'connection' ]
from .client.telegramclient import TelegramClient from .network import connection from .tl import types, functions, custom from .tl.custom import Button from .tl import patched as _ # import for its side-effects from . import version, events, utils, errors __version__ = version.__version__ __all__ = [ 'TelegramClient', 'Button', 'types', 'functions', 'custom', 'errors', 'events', 'utils', 'connection' ]
Fix patched module was never automatically imported
Fix patched module was never automatically imported Closes #1701. It has to be imported late in the process of `import telethon` for its side-effects.
Python
mit
LonamiWebs/Telethon,LonamiWebs/Telethon,LonamiWebs/Telethon,LonamiWebs/Telethon
from .client.telegramclient import TelegramClient from .network import connection from .tl import types, functions, custom from .tl.custom import Button + from .tl import patched as _ # import for its side-effects from . import version, events, utils, errors __version__ = version.__version__ __all__ = [ 'TelegramClient', 'Button', 'types', 'functions', 'custom', 'errors', 'events', 'utils', 'connection' ]
Fix patched module was never automatically imported
## Code Before: from .client.telegramclient import TelegramClient from .network import connection from .tl import types, functions, custom from .tl.custom import Button from . import version, events, utils, errors __version__ = version.__version__ __all__ = [ 'TelegramClient', 'Button', 'types', 'functions', 'custom', 'errors', 'events', 'utils', 'connection' ] ## Instruction: Fix patched module was never automatically imported ## Code After: from .client.telegramclient import TelegramClient from .network import connection from .tl import types, functions, custom from .tl.custom import Button from .tl import patched as _ # import for its side-effects from . import version, events, utils, errors __version__ = version.__version__ __all__ = [ 'TelegramClient', 'Button', 'types', 'functions', 'custom', 'errors', 'events', 'utils', 'connection' ]
from .client.telegramclient import TelegramClient from .network import connection from .tl import types, functions, custom from .tl.custom import Button + from .tl import patched as _ # import for its side-effects from . import version, events, utils, errors __version__ = version.__version__ __all__ = [ 'TelegramClient', 'Button', 'types', 'functions', 'custom', 'errors', 'events', 'utils', 'connection' ]
b0d9a11292b6d6b17fe8b72d7735d26c47599187
linkatos/printer.py
linkatos/printer.py
def bot_says(channel, text, slack_client): return slack_client.api_call("chat.postMessage", channel=channel, text=text, as_user=True) def compose_explanation(url): return "If you would like {} to be stored please react to it with a :+1:, \ if you would like it to be ignored use :-1:".format(url) def ask_confirmation(message, slack_client): bot_says(message['channel'], compose_explanation(message['url']), slack_client) def compose_url_list(url_cache_list): if len(url_cache_list) == 0: return "The list is empty" list_message = "The list of urls to be confirmed is: \n" for index in range(0, len(url_cache_list)): extra = "{} - {} \n".format(index, url_cache_list[index]['url']) list_message = list_message + extra return list_message def list_cached_urls(url_cache_list, channel, slack_client): bot_says(channel, compose_url_list(url_cache_list), slack_client)
def bot_says(channel, text, slack_client): return slack_client.api_call("chat.postMessage", channel=channel, text=text, as_user=True) def compose_explanation(url): return "If you would like {} to be stored please react to it with a :+1:, \ if you would like it to be ignored use :-1:".format(url) def ask_confirmation(message, slack_client): bot_says(message['channel'], compose_explanation(message['url']), slack_client) def compose_url_list(url_cache_list): if len(url_cache_list) == 0: return "The list is empty" intro = "The list of urls to be confirmed is: \n" options = ["{} - {}".format(i, v['url']) for i, v in enumerate(url_cache_list)] return intro + "\n".join(options) def list_cached_urls(url_cache_list, channel, slack_client): bot_says(channel, compose_url_list(url_cache_list), slack_client)
Change iteration over a collection based on ags suggestion
refactor: Change iteration over a collection based on ags suggestion
Python
mit
iwi/linkatos,iwi/linkatos
def bot_says(channel, text, slack_client): return slack_client.api_call("chat.postMessage", channel=channel, text=text, as_user=True) def compose_explanation(url): return "If you would like {} to be stored please react to it with a :+1:, \ if you would like it to be ignored use :-1:".format(url) def ask_confirmation(message, slack_client): bot_says(message['channel'], compose_explanation(message['url']), slack_client) def compose_url_list(url_cache_list): if len(url_cache_list) == 0: return "The list is empty" - list_message = "The list of urls to be confirmed is: \n" + intro = "The list of urls to be confirmed is: \n" + options = ["{} - {}".format(i, v['url']) for i, v in enumerate(url_cache_list)] + return intro + "\n".join(options) - for index in range(0, len(url_cache_list)): - extra = "{} - {} \n".format(index, url_cache_list[index]['url']) - list_message = list_message + extra - - return list_message def list_cached_urls(url_cache_list, channel, slack_client): bot_says(channel, compose_url_list(url_cache_list), slack_client)
Change iteration over a collection based on ags suggestion
## Code Before: def bot_says(channel, text, slack_client): return slack_client.api_call("chat.postMessage", channel=channel, text=text, as_user=True) def compose_explanation(url): return "If you would like {} to be stored please react to it with a :+1:, \ if you would like it to be ignored use :-1:".format(url) def ask_confirmation(message, slack_client): bot_says(message['channel'], compose_explanation(message['url']), slack_client) def compose_url_list(url_cache_list): if len(url_cache_list) == 0: return "The list is empty" list_message = "The list of urls to be confirmed is: \n" for index in range(0, len(url_cache_list)): extra = "{} - {} \n".format(index, url_cache_list[index]['url']) list_message = list_message + extra return list_message def list_cached_urls(url_cache_list, channel, slack_client): bot_says(channel, compose_url_list(url_cache_list), slack_client) ## Instruction: Change iteration over a collection based on ags suggestion ## Code After: def bot_says(channel, text, slack_client): return slack_client.api_call("chat.postMessage", channel=channel, text=text, as_user=True) def compose_explanation(url): return "If you would like {} to be stored please react to it with a :+1:, \ if you would like it to be ignored use :-1:".format(url) def ask_confirmation(message, slack_client): bot_says(message['channel'], compose_explanation(message['url']), slack_client) def compose_url_list(url_cache_list): if len(url_cache_list) == 0: return "The list is empty" intro = "The list of urls to be confirmed is: \n" options = ["{} - {}".format(i, v['url']) for i, v in enumerate(url_cache_list)] return intro + "\n".join(options) def list_cached_urls(url_cache_list, channel, slack_client): bot_says(channel, compose_url_list(url_cache_list), slack_client)
def bot_says(channel, text, slack_client): return slack_client.api_call("chat.postMessage", channel=channel, text=text, as_user=True) def compose_explanation(url): return "If you would like {} to be stored please react to it with a :+1:, \ if you would like it to be ignored use :-1:".format(url) def ask_confirmation(message, slack_client): bot_says(message['channel'], compose_explanation(message['url']), slack_client) def compose_url_list(url_cache_list): if len(url_cache_list) == 0: return "The list is empty" - list_message = "The list of urls to be confirmed is: \n" ? - ^ ^^^^^^^^ + intro = "The list of urls to be confirmed is: \n" ? ^ ^^ + options = ["{} - {}".format(i, v['url']) for i, v in enumerate(url_cache_list)] + return intro + "\n".join(options) - for index in range(0, len(url_cache_list)): - extra = "{} - {} \n".format(index, url_cache_list[index]['url']) - list_message = list_message + extra - - return list_message def list_cached_urls(url_cache_list, channel, slack_client): bot_says(channel, compose_url_list(url_cache_list), slack_client)
ff3c3c3842790cc9eb06f1241d6da77f828859c1
IPython/external/qt.py
IPython/external/qt.py
import os # Use PyQt by default until PySide is stable. qt_api = os.environ.get('QT_API', 'pyqt') if qt_api == 'pyqt': # For PySide compatibility, use the new string API that automatically # converts QStrings to unicode Python strings. import sip sip.setapi('QString', 2) from PyQt4 import QtCore, QtGui, QtSvg # Alias PyQt-specific functions for PySide compatibility. QtCore.Signal = QtCore.pyqtSignal QtCore.Slot = QtCore.pyqtSlot else: from PySide import QtCore, QtGui, QtSvg
import os # Available APIs. QT_API_PYQT = 'pyqt' QT_API_PYSIDE = 'pyside' # Use PyQt by default until PySide is stable. QT_API = os.environ.get('QT_API', QT_API_PYQT) if QT_API == QT_API_PYQT: # For PySide compatibility, use the new string API that automatically # converts QStrings to Unicode Python strings. import sip sip.setapi('QString', 2) from PyQt4 import QtCore, QtGui, QtSvg # Alias PyQt-specific functions for PySide compatibility. QtCore.Signal = QtCore.pyqtSignal QtCore.Slot = QtCore.pyqtSlot elif QT_API == QT_API_PYSIDE: from PySide import QtCore, QtGui, QtSvg else: raise RuntimeError('Invalid Qt API "%s"' % QT_API)
Clean up in Qt API switcher.
Clean up in Qt API switcher.
Python
bsd-3-clause
ipython/ipython,ipython/ipython
import os + # Available APIs. + QT_API_PYQT = 'pyqt' + QT_API_PYSIDE = 'pyside' + # Use PyQt by default until PySide is stable. - qt_api = os.environ.get('QT_API', 'pyqt') + QT_API = os.environ.get('QT_API', QT_API_PYQT) - if qt_api == 'pyqt': + if QT_API == QT_API_PYQT: # For PySide compatibility, use the new string API that automatically - # converts QStrings to unicode Python strings. + # converts QStrings to Unicode Python strings. import sip sip.setapi('QString', 2) from PyQt4 import QtCore, QtGui, QtSvg # Alias PyQt-specific functions for PySide compatibility. QtCore.Signal = QtCore.pyqtSignal QtCore.Slot = QtCore.pyqtSlot - else: + elif QT_API == QT_API_PYSIDE: from PySide import QtCore, QtGui, QtSvg + else: + raise RuntimeError('Invalid Qt API "%s"' % QT_API) +
Clean up in Qt API switcher.
## Code Before: import os # Use PyQt by default until PySide is stable. qt_api = os.environ.get('QT_API', 'pyqt') if qt_api == 'pyqt': # For PySide compatibility, use the new string API that automatically # converts QStrings to unicode Python strings. import sip sip.setapi('QString', 2) from PyQt4 import QtCore, QtGui, QtSvg # Alias PyQt-specific functions for PySide compatibility. QtCore.Signal = QtCore.pyqtSignal QtCore.Slot = QtCore.pyqtSlot else: from PySide import QtCore, QtGui, QtSvg ## Instruction: Clean up in Qt API switcher. ## Code After: import os # Available APIs. QT_API_PYQT = 'pyqt' QT_API_PYSIDE = 'pyside' # Use PyQt by default until PySide is stable. QT_API = os.environ.get('QT_API', QT_API_PYQT) if QT_API == QT_API_PYQT: # For PySide compatibility, use the new string API that automatically # converts QStrings to Unicode Python strings. import sip sip.setapi('QString', 2) from PyQt4 import QtCore, QtGui, QtSvg # Alias PyQt-specific functions for PySide compatibility. QtCore.Signal = QtCore.pyqtSignal QtCore.Slot = QtCore.pyqtSlot elif QT_API == QT_API_PYSIDE: from PySide import QtCore, QtGui, QtSvg else: raise RuntimeError('Invalid Qt API "%s"' % QT_API)
import os + # Available APIs. + QT_API_PYQT = 'pyqt' + QT_API_PYSIDE = 'pyside' + # Use PyQt by default until PySide is stable. - qt_api = os.environ.get('QT_API', 'pyqt') + QT_API = os.environ.get('QT_API', QT_API_PYQT) - if qt_api == 'pyqt': + if QT_API == QT_API_PYQT: # For PySide compatibility, use the new string API that automatically - # converts QStrings to unicode Python strings. ? ^ + # converts QStrings to Unicode Python strings. ? ^ import sip sip.setapi('QString', 2) from PyQt4 import QtCore, QtGui, QtSvg # Alias PyQt-specific functions for PySide compatibility. QtCore.Signal = QtCore.pyqtSignal QtCore.Slot = QtCore.pyqtSlot + elif QT_API == QT_API_PYSIDE: + from PySide import QtCore, QtGui, QtSvg + else: - from PySide import QtCore, QtGui, QtSvg + raise RuntimeError('Invalid Qt API "%s"' % QT_API)
05b8ae37fccb152fcdd618b09984f3d1d8beae45
fabfile.py
fabfile.py
import os from fabric.api import * base_path = os.path.dirname(__file__) project_root = "~/Gather" pip_path = os.path.join(project_root, "bin/pip") python_path = os.path.join(project_root, "bin/python") env.user = "gather" env.hosts = ["gather.whouz.com"] def update_from_github(): with cd(project_root): run("git pull") def update_pip_requirements(): with cd(project_root): run("%s install -r requirements.txt" % pip_path) def migrate_databases(): with cd(project_root): run("%s manage.py db upgrade" % python_path) def reload_nginx(): _current_user = env.user env.user = 'root' run("/etc/init.d/nginx reload") env.user = _current_user def restart_gunicorn(): _current_user = env.user env.user = 'root' run("supervisorctl reload") env.user = _current_user def reload_gunicorn(): run("kill -HUP `cat /tmp/gather.pid`") def update(): update_from_github() reload_gunicorn() def fullyupdate(): update_from_github() update_pip_requirements() migrate_databases() reload_nginx() reload_gunicorn()
import os from fabric.api import * base_path = os.path.dirname(__file__) project_root = "~/Gather" pip_path = os.path.join(project_root, "bin/pip") python_path = os.path.join(project_root, "bin/python") env.user = "gather" env.hosts = ["gather.whouz.com"] def update_from_github(): with cd(project_root): run("git pull") def update_pip_requirements(): with cd(project_root): run("%s install -r requirements.txt" % pip_path) def migrate_databases(): with cd(project_root): run("%s manage.py db upgrade" % python_path) def reload_nginx(): _current_user = env.user env.user = 'root' run("/etc/init.d/nginx reload") env.user = _current_user def restart_gunicorn(): _current_user = env.user env.user = 'root' run("supervisorctl reload") env.user = _current_user def reload_gunicorn(): run("kill -HUP `cat /tmp/gather.pid`") def update(): update_from_github() migrate_databases() reload_gunicorn() def fullyupdate(): update_from_github() update_pip_requirements() migrate_databases() reload_nginx() reload_gunicorn()
Migrate database for small update
Migrate database for small update
Python
mit
whtsky/Gather,whtsky/Gather
import os from fabric.api import * base_path = os.path.dirname(__file__) project_root = "~/Gather" pip_path = os.path.join(project_root, "bin/pip") python_path = os.path.join(project_root, "bin/python") env.user = "gather" env.hosts = ["gather.whouz.com"] def update_from_github(): with cd(project_root): run("git pull") def update_pip_requirements(): with cd(project_root): run("%s install -r requirements.txt" % pip_path) def migrate_databases(): with cd(project_root): run("%s manage.py db upgrade" % python_path) def reload_nginx(): _current_user = env.user env.user = 'root' run("/etc/init.d/nginx reload") env.user = _current_user def restart_gunicorn(): _current_user = env.user env.user = 'root' run("supervisorctl reload") env.user = _current_user def reload_gunicorn(): run("kill -HUP `cat /tmp/gather.pid`") def update(): update_from_github() + migrate_databases() reload_gunicorn() def fullyupdate(): update_from_github() update_pip_requirements() migrate_databases() reload_nginx() reload_gunicorn()
Migrate database for small update
## Code Before: import os from fabric.api import * base_path = os.path.dirname(__file__) project_root = "~/Gather" pip_path = os.path.join(project_root, "bin/pip") python_path = os.path.join(project_root, "bin/python") env.user = "gather" env.hosts = ["gather.whouz.com"] def update_from_github(): with cd(project_root): run("git pull") def update_pip_requirements(): with cd(project_root): run("%s install -r requirements.txt" % pip_path) def migrate_databases(): with cd(project_root): run("%s manage.py db upgrade" % python_path) def reload_nginx(): _current_user = env.user env.user = 'root' run("/etc/init.d/nginx reload") env.user = _current_user def restart_gunicorn(): _current_user = env.user env.user = 'root' run("supervisorctl reload") env.user = _current_user def reload_gunicorn(): run("kill -HUP `cat /tmp/gather.pid`") def update(): update_from_github() reload_gunicorn() def fullyupdate(): update_from_github() update_pip_requirements() migrate_databases() reload_nginx() reload_gunicorn() ## Instruction: Migrate database for small update ## Code After: import os from fabric.api import * base_path = os.path.dirname(__file__) project_root = "~/Gather" pip_path = os.path.join(project_root, "bin/pip") python_path = os.path.join(project_root, "bin/python") env.user = "gather" env.hosts = ["gather.whouz.com"] def update_from_github(): with cd(project_root): run("git pull") def update_pip_requirements(): with cd(project_root): run("%s install -r requirements.txt" % pip_path) def migrate_databases(): with cd(project_root): run("%s manage.py db upgrade" % python_path) def reload_nginx(): _current_user = env.user env.user = 'root' run("/etc/init.d/nginx reload") env.user = _current_user def restart_gunicorn(): _current_user = env.user env.user = 'root' run("supervisorctl reload") env.user = _current_user def reload_gunicorn(): run("kill -HUP `cat /tmp/gather.pid`") def update(): update_from_github() migrate_databases() reload_gunicorn() def fullyupdate(): update_from_github() update_pip_requirements() migrate_databases() reload_nginx() reload_gunicorn()
import os from fabric.api import * base_path = os.path.dirname(__file__) project_root = "~/Gather" pip_path = os.path.join(project_root, "bin/pip") python_path = os.path.join(project_root, "bin/python") env.user = "gather" env.hosts = ["gather.whouz.com"] def update_from_github(): with cd(project_root): run("git pull") def update_pip_requirements(): with cd(project_root): run("%s install -r requirements.txt" % pip_path) def migrate_databases(): with cd(project_root): run("%s manage.py db upgrade" % python_path) def reload_nginx(): _current_user = env.user env.user = 'root' run("/etc/init.d/nginx reload") env.user = _current_user def restart_gunicorn(): _current_user = env.user env.user = 'root' run("supervisorctl reload") env.user = _current_user def reload_gunicorn(): run("kill -HUP `cat /tmp/gather.pid`") def update(): update_from_github() + migrate_databases() reload_gunicorn() def fullyupdate(): update_from_github() update_pip_requirements() migrate_databases() reload_nginx() reload_gunicorn()
beac0323253454f343b32d42d8c065cfc4fcc04f
src/epiweb/apps/reminder/models.py
src/epiweb/apps/reminder/models.py
import datetime from django.db import models from django.contrib.auth.models import User from django.db.models.signals import post_save class Reminder(models.Model): user = models.ForeignKey(User, unique=True) last_reminder = models.DateTimeField() next_reminder = models.DateField() wday = models.IntegerField() active = models.BooleanField() def add_reminder(sender, **kwargs): instance = kwargs.get('instance', None) try: reminder = Reminder.objects.get(user=instance) except Reminder.DoesNotExist: now = datetime.datetime.now() next = now + datetime.timedelta(days=7) reminder = Reminder() reminder.user = instance reminder.last_reminder = now reminder.next_reminder = next reminder.wday = now.timetuple().tm_wday reminder.active = True reminder.save() post_save.connect(add_reminder, sender=User)
import datetime from django.db import models from django.contrib.auth.models import User from django.db.models.signals import post_save _ = lambda x: x # Reference: http://docs.python.org/library/time.html # - tm_wday => range [0,6], Monday is 0 MONDAY = 0 TUESDAY = 1 WEDNESDAY = 2 THURSDAY = 3 FRIDAY = 4 SATURDAY = 5 SUNDAY = 6 DAYS = ( (MONDAY, _('Monday')), (TUESDAY, _('Tuesday')), (WEDNESDAY, _('Wednesday')), (THURSDAY, _('Thursday')), (FRIDAY, _('Friday')), (SATURDAY, _('Saturday')), (SUNDAY, _('Sunday')) ) class Reminder(models.Model): user = models.ForeignKey(User, unique=True) last_reminder = models.DateTimeField() next_reminder = models.DateField() wday = models.IntegerField(choices=DAYS, verbose_name="Day", default=MONDAY) active = models.BooleanField() def add_reminder(sender, **kwargs): instance = kwargs.get('instance', None) try: reminder = Reminder.objects.get(user=instance) except Reminder.DoesNotExist: now = datetime.datetime.now() next = now + datetime.timedelta(days=7) reminder = Reminder() reminder.user = instance reminder.last_reminder = now reminder.next_reminder = next reminder.wday = now.timetuple().tm_wday reminder.active = True reminder.save() post_save.connect(add_reminder, sender=User)
Set available options for weekday field of reminder's model
Set available options for weekday field of reminder's model
Python
agpl-3.0
ISIFoundation/influenzanet-website,ISIFoundation/influenzanet-website,ISIFoundation/influenzanet-website,ISIFoundation/influenzanet-website,ISIFoundation/influenzanet-website,ISIFoundation/influenzanet-website,ISIFoundation/influenzanet-website
import datetime from django.db import models from django.contrib.auth.models import User from django.db.models.signals import post_save + _ = lambda x: x + + # Reference: http://docs.python.org/library/time.html + # - tm_wday => range [0,6], Monday is 0 + MONDAY = 0 + TUESDAY = 1 + WEDNESDAY = 2 + THURSDAY = 3 + FRIDAY = 4 + SATURDAY = 5 + SUNDAY = 6 + + DAYS = ( + (MONDAY, _('Monday')), + (TUESDAY, _('Tuesday')), + (WEDNESDAY, _('Wednesday')), + (THURSDAY, _('Thursday')), + (FRIDAY, _('Friday')), + (SATURDAY, _('Saturday')), + (SUNDAY, _('Sunday')) + ) + class Reminder(models.Model): user = models.ForeignKey(User, unique=True) last_reminder = models.DateTimeField() next_reminder = models.DateField() - wday = models.IntegerField() + wday = models.IntegerField(choices=DAYS, verbose_name="Day", + default=MONDAY) active = models.BooleanField() def add_reminder(sender, **kwargs): instance = kwargs.get('instance', None) try: reminder = Reminder.objects.get(user=instance) except Reminder.DoesNotExist: now = datetime.datetime.now() next = now + datetime.timedelta(days=7) reminder = Reminder() reminder.user = instance reminder.last_reminder = now reminder.next_reminder = next reminder.wday = now.timetuple().tm_wday reminder.active = True reminder.save() post_save.connect(add_reminder, sender=User)
Set available options for weekday field of reminder's model
## Code Before: import datetime from django.db import models from django.contrib.auth.models import User from django.db.models.signals import post_save class Reminder(models.Model): user = models.ForeignKey(User, unique=True) last_reminder = models.DateTimeField() next_reminder = models.DateField() wday = models.IntegerField() active = models.BooleanField() def add_reminder(sender, **kwargs): instance = kwargs.get('instance', None) try: reminder = Reminder.objects.get(user=instance) except Reminder.DoesNotExist: now = datetime.datetime.now() next = now + datetime.timedelta(days=7) reminder = Reminder() reminder.user = instance reminder.last_reminder = now reminder.next_reminder = next reminder.wday = now.timetuple().tm_wday reminder.active = True reminder.save() post_save.connect(add_reminder, sender=User) ## Instruction: Set available options for weekday field of reminder's model ## Code After: import datetime from django.db import models from django.contrib.auth.models import User from django.db.models.signals import post_save _ = lambda x: x # Reference: http://docs.python.org/library/time.html # - tm_wday => range [0,6], Monday is 0 MONDAY = 0 TUESDAY = 1 WEDNESDAY = 2 THURSDAY = 3 FRIDAY = 4 SATURDAY = 5 SUNDAY = 6 DAYS = ( (MONDAY, _('Monday')), (TUESDAY, _('Tuesday')), (WEDNESDAY, _('Wednesday')), (THURSDAY, _('Thursday')), (FRIDAY, _('Friday')), (SATURDAY, _('Saturday')), (SUNDAY, _('Sunday')) ) class Reminder(models.Model): user = models.ForeignKey(User, unique=True) last_reminder = models.DateTimeField() next_reminder = models.DateField() wday = models.IntegerField(choices=DAYS, verbose_name="Day", default=MONDAY) active = models.BooleanField() def add_reminder(sender, **kwargs): instance = kwargs.get('instance', None) try: reminder = Reminder.objects.get(user=instance) except Reminder.DoesNotExist: now = datetime.datetime.now() next = now + datetime.timedelta(days=7) reminder = Reminder() reminder.user = instance reminder.last_reminder = now reminder.next_reminder = next reminder.wday = now.timetuple().tm_wday reminder.active = True reminder.save() post_save.connect(add_reminder, sender=User)
import datetime from django.db import models from django.contrib.auth.models import User from django.db.models.signals import post_save + _ = lambda x: x + + # Reference: http://docs.python.org/library/time.html + # - tm_wday => range [0,6], Monday is 0 + MONDAY = 0 + TUESDAY = 1 + WEDNESDAY = 2 + THURSDAY = 3 + FRIDAY = 4 + SATURDAY = 5 + SUNDAY = 6 + + DAYS = ( + (MONDAY, _('Monday')), + (TUESDAY, _('Tuesday')), + (WEDNESDAY, _('Wednesday')), + (THURSDAY, _('Thursday')), + (FRIDAY, _('Friday')), + (SATURDAY, _('Saturday')), + (SUNDAY, _('Sunday')) + ) + class Reminder(models.Model): user = models.ForeignKey(User, unique=True) last_reminder = models.DateTimeField() next_reminder = models.DateField() - wday = models.IntegerField() + wday = models.IntegerField(choices=DAYS, verbose_name="Day", + default=MONDAY) active = models.BooleanField() def add_reminder(sender, **kwargs): instance = kwargs.get('instance', None) try: reminder = Reminder.objects.get(user=instance) except Reminder.DoesNotExist: now = datetime.datetime.now() next = now + datetime.timedelta(days=7) reminder = Reminder() reminder.user = instance reminder.last_reminder = now reminder.next_reminder = next reminder.wday = now.timetuple().tm_wday reminder.active = True reminder.save() post_save.connect(add_reminder, sender=User)
3faf3a9debc0fad175ca032f3eb0880defbd0cdb
akaudit/clidriver.py
akaudit/clidriver.py
import sys import argparse from akaudit.audit import Auditer def main(argv = sys.argv, log = sys.stderr): parser = argparse.ArgumentParser(description='Audit who has access to your homes.', formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('-l', '--log', default='info', help='Log level') args = parser.parse_args() auditer = Auditer() auditer.run_audit(args) if __name__ == "__main__": main(sys.argv[1:])
import sys import argparse from akaudit.audit import Auditer def main(argv = sys.argv, log = sys.stderr): parser = argparse.ArgumentParser(description='Audit who has access to your homes.', formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('-l', '--log', default='info', help='Log level') parser.add_argument('-i', '--interactive', help='Interactive mode (prompts asking if to delete each key)', action="store_true") args = parser.parse_args() auditer = Auditer() auditer.run_audit(args) if __name__ == "__main__": main(sys.argv[1:])
Add argument option for --interactive.
Add argument option for --interactive.
Python
apache-2.0
flaccid/akaudit
import sys import argparse from akaudit.audit import Auditer def main(argv = sys.argv, log = sys.stderr): parser = argparse.ArgumentParser(description='Audit who has access to your homes.', formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('-l', '--log', default='info', help='Log level') + parser.add_argument('-i', '--interactive', help='Interactive mode (prompts asking if to delete each key)', action="store_true") args = parser.parse_args() + auditer = Auditer() auditer.run_audit(args) if __name__ == "__main__": main(sys.argv[1:])
Add argument option for --interactive.
## Code Before: import sys import argparse from akaudit.audit import Auditer def main(argv = sys.argv, log = sys.stderr): parser = argparse.ArgumentParser(description='Audit who has access to your homes.', formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('-l', '--log', default='info', help='Log level') args = parser.parse_args() auditer = Auditer() auditer.run_audit(args) if __name__ == "__main__": main(sys.argv[1:]) ## Instruction: Add argument option for --interactive. ## Code After: import sys import argparse from akaudit.audit import Auditer def main(argv = sys.argv, log = sys.stderr): parser = argparse.ArgumentParser(description='Audit who has access to your homes.', formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('-l', '--log', default='info', help='Log level') parser.add_argument('-i', '--interactive', help='Interactive mode (prompts asking if to delete each key)', action="store_true") args = parser.parse_args() auditer = Auditer() auditer.run_audit(args) if __name__ == "__main__": main(sys.argv[1:])
import sys import argparse from akaudit.audit import Auditer def main(argv = sys.argv, log = sys.stderr): parser = argparse.ArgumentParser(description='Audit who has access to your homes.', formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('-l', '--log', default='info', help='Log level') + parser.add_argument('-i', '--interactive', help='Interactive mode (prompts asking if to delete each key)', action="store_true") args = parser.parse_args() + auditer = Auditer() auditer.run_audit(args) if __name__ == "__main__": main(sys.argv[1:])
47d950b882c01820db7fe99431526487d88622db
tasks.py
tasks.py
import os from invocations.docs import docs, www, sites, watch_docs from invocations.testing import test, coverage, integration, watch_tests from invocations.packaging import vendorize, release from invoke import Collection from invoke.util import LOG_FORMAT ns = Collection( test, coverage, integration, vendorize, release, www, docs, sites, watch_docs, watch_tests ) ns.configure({ 'tests': { 'logformat': LOG_FORMAT, 'package': 'invoke', }, 'packaging': { 'sign': True, 'wheel': True, # Because of PyYAML's dual source nonsense =/ 'dual_wheels': True, 'changelog_file': os.path.join( www.configuration()['sphinx']['source'], 'changelog.rst', ), }, })
import os from invocations.docs import docs, www, sites, watch_docs from invocations.testing import test, coverage, integration, watch_tests from invocations.packaging import vendorize, release from invoke import Collection from invoke.util import LOG_FORMAT ns = Collection( test, coverage, integration, vendorize, release, www, docs, sites, watch_docs, watch_tests ) ns.configure({ 'tests': { 'logformat': LOG_FORMAT, 'package': 'invoke', }, 'packaging': { 'sign': True, 'wheel': True, 'check_desc': True, # Because of PyYAML's dual source nonsense =/ 'dual_wheels': True, 'changelog_file': os.path.join( www.configuration()['sphinx']['source'], 'changelog.rst', ), }, })
Check setup.py desc when packaging
Check setup.py desc when packaging
Python
bsd-2-clause
pyinvoke/invoke,pyinvoke/invoke,mkusz/invoke,mkusz/invoke
import os from invocations.docs import docs, www, sites, watch_docs from invocations.testing import test, coverage, integration, watch_tests from invocations.packaging import vendorize, release from invoke import Collection from invoke.util import LOG_FORMAT ns = Collection( test, coverage, integration, vendorize, release, www, docs, sites, watch_docs, watch_tests ) ns.configure({ 'tests': { 'logformat': LOG_FORMAT, 'package': 'invoke', }, 'packaging': { 'sign': True, 'wheel': True, + 'check_desc': True, # Because of PyYAML's dual source nonsense =/ 'dual_wheels': True, 'changelog_file': os.path.join( www.configuration()['sphinx']['source'], 'changelog.rst', ), }, })
Check setup.py desc when packaging
## Code Before: import os from invocations.docs import docs, www, sites, watch_docs from invocations.testing import test, coverage, integration, watch_tests from invocations.packaging import vendorize, release from invoke import Collection from invoke.util import LOG_FORMAT ns = Collection( test, coverage, integration, vendorize, release, www, docs, sites, watch_docs, watch_tests ) ns.configure({ 'tests': { 'logformat': LOG_FORMAT, 'package': 'invoke', }, 'packaging': { 'sign': True, 'wheel': True, # Because of PyYAML's dual source nonsense =/ 'dual_wheels': True, 'changelog_file': os.path.join( www.configuration()['sphinx']['source'], 'changelog.rst', ), }, }) ## Instruction: Check setup.py desc when packaging ## Code After: import os from invocations.docs import docs, www, sites, watch_docs from invocations.testing import test, coverage, integration, watch_tests from invocations.packaging import vendorize, release from invoke import Collection from invoke.util import LOG_FORMAT ns = Collection( test, coverage, integration, vendorize, release, www, docs, sites, watch_docs, watch_tests ) ns.configure({ 'tests': { 'logformat': LOG_FORMAT, 'package': 'invoke', }, 'packaging': { 'sign': True, 'wheel': True, 'check_desc': True, # Because of PyYAML's dual source nonsense =/ 'dual_wheels': True, 'changelog_file': os.path.join( www.configuration()['sphinx']['source'], 'changelog.rst', ), }, })
import os from invocations.docs import docs, www, sites, watch_docs from invocations.testing import test, coverage, integration, watch_tests from invocations.packaging import vendorize, release from invoke import Collection from invoke.util import LOG_FORMAT ns = Collection( test, coverage, integration, vendorize, release, www, docs, sites, watch_docs, watch_tests ) ns.configure({ 'tests': { 'logformat': LOG_FORMAT, 'package': 'invoke', }, 'packaging': { 'sign': True, 'wheel': True, + 'check_desc': True, # Because of PyYAML's dual source nonsense =/ 'dual_wheels': True, 'changelog_file': os.path.join( www.configuration()['sphinx']['source'], 'changelog.rst', ), }, })
2430d4ae362ca22ebff83b405355d60343b3a0c1
non_iterable_example/_5_context.py
non_iterable_example/_5_context.py
def print_numbers(numbers): for n in numbers: print(n) if random: numbers = 1 print_numbers(numbers) else: numbers = 1, 2, 3 print_numbers(numbers)
def print_numbers(flag, numbers): if flag: for n in numbers: print(n) if random: numbers = 1 print_numbers(False, numbers) else: numbers = 1, 2, 3 print_numbers(True, numbers)
Modify example to emphasise importance of context.
Modify example to emphasise importance of context.
Python
unlicense
markshannon/buggy_code
- def print_numbers(numbers): + def print_numbers(flag, numbers): + if flag: - for n in numbers: + for n in numbers: - print(n) + print(n) if random: numbers = 1 - print_numbers(numbers) + print_numbers(False, numbers) else: numbers = 1, 2, 3 - print_numbers(numbers) + print_numbers(True, numbers) +
Modify example to emphasise importance of context.
## Code Before: def print_numbers(numbers): for n in numbers: print(n) if random: numbers = 1 print_numbers(numbers) else: numbers = 1, 2, 3 print_numbers(numbers) ## Instruction: Modify example to emphasise importance of context. ## Code After: def print_numbers(flag, numbers): if flag: for n in numbers: print(n) if random: numbers = 1 print_numbers(False, numbers) else: numbers = 1, 2, 3 print_numbers(True, numbers)
- def print_numbers(numbers): + def print_numbers(flag, numbers): ? ++++++ + if flag: - for n in numbers: + for n in numbers: ? ++++ - print(n) + print(n) ? ++++ if random: numbers = 1 - print_numbers(numbers) + print_numbers(False, numbers) ? +++++++ else: numbers = 1, 2, 3 - print_numbers(numbers) + print_numbers(True, numbers) ? ++++++ +
bb195d3290d2e9921df8b989ac0d2123a6b9a7f8
server/run.py
server/run.py
"""Run a server that takes all GET requests and dumps them.""" from flask import Flask, request, send_from_directory from flask_cors import CORS from w3lib.html import replace_entities app = Flask(__name__) CORS(app) @app.route('/') def route(): """Get all GET and POST requests and dump them to logs.""" # Print, log, and return. print(request.url) with open("cap.log", "a") as f: f.write(replace_entities(str(request.url)) + "\n") with open("key.log", "a") as f: if "c" in request.args: keys = replace_entities(request.args.get('c')) f.write(keys + '\n') return "WARNING: This site exists to demonstrate a 'capture server' for a penetration tester. Every GET request you send to it will be logged and recorded. Old logs will be deleted after some time, but information you send here is not safe. Use this site for educational purposes only! I am not responsible for any damages caused, as this site will be taken down as frequently as possible to reduce damages." # @app.route('/<path:path>') # def staticserve(path): # """Serve a file from your static directory.""" # return app.send_static_file(path) if __name__ == "__main__": app.run(host='0.0.0.0', port=80)
"""Run a server that takes all GET requests and dumps them.""" from json import loads from flask import Flask, request, send_from_directory from flask_cors import CORS from w3lib.html import replace_entities app = Flask(__name__) CORS(app) @app.route('/') def route(): """Get all GET and POST requests and dump them to logs.""" # Print, log, and return. print(request.url) with open("cap.log", "a") as f: f.write(replace_entities(str(request.url)) + "\n") with open("key.log", "a") as f: if "c" in request.args: keys = loads(replace_entities(request.args.get('c'))) try: keys = "".join(keys) except Exception: pass f.write(keys + '\n') return "WARNING: This site exists to demonstrate a 'capture server' for a penetration tester. Every GET request you send to it will be logged and recorded. Old logs will be deleted after some time, but information you send here is not safe. Use this site for educational purposes only! I am not responsible for any damages caused, as this site will be taken down as frequently as possible to reduce damages." # @app.route('/<path:path>') # def staticserve(path): # """Serve a file from your static directory.""" # return app.send_static_file(path) if __name__ == "__main__": app.run(host='0.0.0.0', port=80)
Make it yet even easier to read key logger output
Make it yet even easier to read key logger output
Python
apache-2.0
umisc/listenserv
"""Run a server that takes all GET requests and dumps them.""" + + from json import loads from flask import Flask, request, send_from_directory from flask_cors import CORS from w3lib.html import replace_entities app = Flask(__name__) CORS(app) @app.route('/') def route(): """Get all GET and POST requests and dump them to logs.""" # Print, log, and return. print(request.url) with open("cap.log", "a") as f: f.write(replace_entities(str(request.url)) + "\n") with open("key.log", "a") as f: if "c" in request.args: - keys = replace_entities(request.args.get('c')) + keys = loads(replace_entities(request.args.get('c'))) + + try: + keys = "".join(keys) + except Exception: + pass + f.write(keys + '\n') return "WARNING: This site exists to demonstrate a 'capture server' for a penetration tester. Every GET request you send to it will be logged and recorded. Old logs will be deleted after some time, but information you send here is not safe. Use this site for educational purposes only! I am not responsible for any damages caused, as this site will be taken down as frequently as possible to reduce damages." # @app.route('/<path:path>') # def staticserve(path): # """Serve a file from your static directory.""" # return app.send_static_file(path) if __name__ == "__main__": app.run(host='0.0.0.0', port=80)
Make it yet even easier to read key logger output
## Code Before: """Run a server that takes all GET requests and dumps them.""" from flask import Flask, request, send_from_directory from flask_cors import CORS from w3lib.html import replace_entities app = Flask(__name__) CORS(app) @app.route('/') def route(): """Get all GET and POST requests and dump them to logs.""" # Print, log, and return. print(request.url) with open("cap.log", "a") as f: f.write(replace_entities(str(request.url)) + "\n") with open("key.log", "a") as f: if "c" in request.args: keys = replace_entities(request.args.get('c')) f.write(keys + '\n') return "WARNING: This site exists to demonstrate a 'capture server' for a penetration tester. Every GET request you send to it will be logged and recorded. Old logs will be deleted after some time, but information you send here is not safe. Use this site for educational purposes only! I am not responsible for any damages caused, as this site will be taken down as frequently as possible to reduce damages." # @app.route('/<path:path>') # def staticserve(path): # """Serve a file from your static directory.""" # return app.send_static_file(path) if __name__ == "__main__": app.run(host='0.0.0.0', port=80) ## Instruction: Make it yet even easier to read key logger output ## Code After: """Run a server that takes all GET requests and dumps them.""" from json import loads from flask import Flask, request, send_from_directory from flask_cors import CORS from w3lib.html import replace_entities app = Flask(__name__) CORS(app) @app.route('/') def route(): """Get all GET and POST requests and dump them to logs.""" # Print, log, and return. print(request.url) with open("cap.log", "a") as f: f.write(replace_entities(str(request.url)) + "\n") with open("key.log", "a") as f: if "c" in request.args: keys = loads(replace_entities(request.args.get('c'))) try: keys = "".join(keys) except Exception: pass f.write(keys + '\n') return "WARNING: This site exists to demonstrate a 'capture server' for a penetration tester. Every GET request you send to it will be logged and recorded. Old logs will be deleted after some time, but information you send here is not safe. Use this site for educational purposes only! I am not responsible for any damages caused, as this site will be taken down as frequently as possible to reduce damages." # @app.route('/<path:path>') # def staticserve(path): # """Serve a file from your static directory.""" # return app.send_static_file(path) if __name__ == "__main__": app.run(host='0.0.0.0', port=80)
"""Run a server that takes all GET requests and dumps them.""" + + from json import loads from flask import Flask, request, send_from_directory from flask_cors import CORS from w3lib.html import replace_entities app = Flask(__name__) CORS(app) @app.route('/') def route(): """Get all GET and POST requests and dump them to logs.""" # Print, log, and return. print(request.url) with open("cap.log", "a") as f: f.write(replace_entities(str(request.url)) + "\n") with open("key.log", "a") as f: if "c" in request.args: - keys = replace_entities(request.args.get('c')) + keys = loads(replace_entities(request.args.get('c'))) ? ++++++ + + + try: + keys = "".join(keys) + except Exception: + pass + f.write(keys + '\n') return "WARNING: This site exists to demonstrate a 'capture server' for a penetration tester. Every GET request you send to it will be logged and recorded. Old logs will be deleted after some time, but information you send here is not safe. Use this site for educational purposes only! I am not responsible for any damages caused, as this site will be taken down as frequently as possible to reduce damages." # @app.route('/<path:path>') # def staticserve(path): # """Serve a file from your static directory.""" # return app.send_static_file(path) if __name__ == "__main__": app.run(host='0.0.0.0', port=80)
02d67008d0f0bdc205ca9168384c4a951c106a28
nintendo/common/transport.py
nintendo/common/transport.py
import socket class Socket: TCP = 0 UDP = 1 def __init__(self, type): if type == self.TCP: self.s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, socket.IPPROTO_TCP) else: self.s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP) self.s.setblocking(False) def connect(self, host, port): self.s.connect((host, port)) def close(self): self.s.close() def send(self, data): self.s.sendall(data) def recv(self, num): try: return self.s.recv(num) except BlockingIOError: pass def get_address(self): return self.s.getsockname()[0] def get_port(self): return self.s.getsockname()[1]
import socket class Socket: TCP = 0 UDP = 1 def __init__(self, type): if type == self.TCP: self.s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, socket.IPPROTO_TCP) else: self.s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP) self.s.setblocking(False) def connect(self, host, port): self.s.connect((host, port)) def close(self): self.s.close() def send(self, data): self.s.sendall(data) def recv(self, num): try: return self.s.recv(num) except BlockingIOError: pass def bind(self, addr=("", 0)): self.s.bind(addr) def sendto(self, data, addr): self.s.sendto(data, addr) def recvfrom(self, num): try: return self.s.recvfrom(num) except BlockingIOError: return None, None def get_address(self): return self.s.getsockname()[0] def get_port(self): return self.s.getsockname()[1]
Add a few functions to Socket class
Add a few functions to Socket class
Python
mit
Kinnay/NintendoClients
import socket class Socket: TCP = 0 UDP = 1 def __init__(self, type): if type == self.TCP: self.s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, socket.IPPROTO_TCP) else: self.s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP) self.s.setblocking(False) def connect(self, host, port): self.s.connect((host, port)) def close(self): self.s.close() def send(self, data): self.s.sendall(data) def recv(self, num): try: return self.s.recv(num) except BlockingIOError: pass + def bind(self, addr=("", 0)): self.s.bind(addr) + def sendto(self, data, addr): self.s.sendto(data, addr) + def recvfrom(self, num): + try: + return self.s.recvfrom(num) + except BlockingIOError: + return None, None + def get_address(self): return self.s.getsockname()[0] def get_port(self): return self.s.getsockname()[1]
Add a few functions to Socket class
## Code Before: import socket class Socket: TCP = 0 UDP = 1 def __init__(self, type): if type == self.TCP: self.s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, socket.IPPROTO_TCP) else: self.s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP) self.s.setblocking(False) def connect(self, host, port): self.s.connect((host, port)) def close(self): self.s.close() def send(self, data): self.s.sendall(data) def recv(self, num): try: return self.s.recv(num) except BlockingIOError: pass def get_address(self): return self.s.getsockname()[0] def get_port(self): return self.s.getsockname()[1] ## Instruction: Add a few functions to Socket class ## Code After: import socket class Socket: TCP = 0 UDP = 1 def __init__(self, type): if type == self.TCP: self.s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, socket.IPPROTO_TCP) else: self.s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP) self.s.setblocking(False) def connect(self, host, port): self.s.connect((host, port)) def close(self): self.s.close() def send(self, data): self.s.sendall(data) def recv(self, num): try: return self.s.recv(num) except BlockingIOError: pass def bind(self, addr=("", 0)): self.s.bind(addr) def sendto(self, data, addr): self.s.sendto(data, addr) def recvfrom(self, num): try: return self.s.recvfrom(num) except BlockingIOError: return None, None def get_address(self): return self.s.getsockname()[0] def get_port(self): return self.s.getsockname()[1]
import socket class Socket: TCP = 0 UDP = 1 def __init__(self, type): if type == self.TCP: self.s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, socket.IPPROTO_TCP) else: self.s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP) self.s.setblocking(False) def connect(self, host, port): self.s.connect((host, port)) def close(self): self.s.close() def send(self, data): self.s.sendall(data) def recv(self, num): try: return self.s.recv(num) except BlockingIOError: pass + def bind(self, addr=("", 0)): self.s.bind(addr) + def sendto(self, data, addr): self.s.sendto(data, addr) + def recvfrom(self, num): + try: + return self.s.recvfrom(num) + except BlockingIOError: + return None, None + def get_address(self): return self.s.getsockname()[0] def get_port(self): return self.s.getsockname()[1]
e908a2c62be1d937a68b5c602b8cae02633685f7
csunplugged/general/management/commands/updatedata.py
csunplugged/general/management/commands/updatedata.py
"""Module for the custom Django updatedata command.""" from django.core import management class Command(management.base.BaseCommand): """Required command class for the custom Django updatedata command.""" help = "Update all data from content folders for all applications" def add_arguments(self, parser): """Add optional parameter to updatedata command.""" parser.add_argument( "--lite-load", action="store_true", dest="lite_load", help="Perform lite load (only load key content)", ) def handle(self, *args, **options): """Automatically called when the updatedata command is given.""" lite_load = options.get("lite_load") management.call_command("flush", interactive=False) management.call_command("loadresources", lite_load=lite_load) management.call_command("loadtopics", lite_load=lite_load) management.call_command("loadgeneralpages", lite_load=lite_load) management.call_command("loadclassicpages", lite_load=lite_load) management.call_command("loadactivities", lite_load=lite_load) management.call_command("rebuild_search_indexes")
"""Module for the custom Django updatedata command.""" from django.core import management class Command(management.base.BaseCommand): """Required command class for the custom Django updatedata command.""" help = "Update all data from content folders for all applications" def add_arguments(self, parser): """Add optional parameter to updatedata command.""" parser.add_argument( "--lite-load", action="store_true", dest="lite_load", help="Perform lite load (only load key content)", ) def handle(self, *args, **options): """Automatically called when the updatedata command is given.""" lite_load = options.get("lite_load") management.call_command("flush", interactive=False) management.call_command("loadresources", lite_load=lite_load) management.call_command("loadtopics", lite_load=lite_load) management.call_command("loadgeneralpages", lite_load=lite_load) management.call_command("loadclassicpages", lite_load=lite_load) management.call_command("loadactivities", lite_load=lite_load) management.call_command("load_at_a_distance_data", lite_load=lite_load) management.call_command("rebuild_search_indexes")
Load at a distance content in updatadata command
Load at a distance content in updatadata command
Python
mit
uccser/cs-unplugged,uccser/cs-unplugged,uccser/cs-unplugged,uccser/cs-unplugged
"""Module for the custom Django updatedata command.""" from django.core import management class Command(management.base.BaseCommand): """Required command class for the custom Django updatedata command.""" help = "Update all data from content folders for all applications" def add_arguments(self, parser): """Add optional parameter to updatedata command.""" parser.add_argument( "--lite-load", action="store_true", dest="lite_load", help="Perform lite load (only load key content)", ) def handle(self, *args, **options): """Automatically called when the updatedata command is given.""" lite_load = options.get("lite_load") management.call_command("flush", interactive=False) management.call_command("loadresources", lite_load=lite_load) management.call_command("loadtopics", lite_load=lite_load) management.call_command("loadgeneralpages", lite_load=lite_load) management.call_command("loadclassicpages", lite_load=lite_load) management.call_command("loadactivities", lite_load=lite_load) + management.call_command("load_at_a_distance_data", lite_load=lite_load) management.call_command("rebuild_search_indexes")
Load at a distance content in updatadata command
## Code Before: """Module for the custom Django updatedata command.""" from django.core import management class Command(management.base.BaseCommand): """Required command class for the custom Django updatedata command.""" help = "Update all data from content folders for all applications" def add_arguments(self, parser): """Add optional parameter to updatedata command.""" parser.add_argument( "--lite-load", action="store_true", dest="lite_load", help="Perform lite load (only load key content)", ) def handle(self, *args, **options): """Automatically called when the updatedata command is given.""" lite_load = options.get("lite_load") management.call_command("flush", interactive=False) management.call_command("loadresources", lite_load=lite_load) management.call_command("loadtopics", lite_load=lite_load) management.call_command("loadgeneralpages", lite_load=lite_load) management.call_command("loadclassicpages", lite_load=lite_load) management.call_command("loadactivities", lite_load=lite_load) management.call_command("rebuild_search_indexes") ## Instruction: Load at a distance content in updatadata command ## Code After: """Module for the custom Django updatedata command.""" from django.core import management class Command(management.base.BaseCommand): """Required command class for the custom Django updatedata command.""" help = "Update all data from content folders for all applications" def add_arguments(self, parser): """Add optional parameter to updatedata command.""" parser.add_argument( "--lite-load", action="store_true", dest="lite_load", help="Perform lite load (only load key content)", ) def handle(self, *args, **options): """Automatically called when the updatedata command is given.""" lite_load = options.get("lite_load") management.call_command("flush", interactive=False) management.call_command("loadresources", lite_load=lite_load) management.call_command("loadtopics", lite_load=lite_load) management.call_command("loadgeneralpages", lite_load=lite_load) management.call_command("loadclassicpages", lite_load=lite_load) management.call_command("loadactivities", lite_load=lite_load) management.call_command("load_at_a_distance_data", lite_load=lite_load) management.call_command("rebuild_search_indexes")
"""Module for the custom Django updatedata command.""" from django.core import management class Command(management.base.BaseCommand): """Required command class for the custom Django updatedata command.""" help = "Update all data from content folders for all applications" def add_arguments(self, parser): """Add optional parameter to updatedata command.""" parser.add_argument( "--lite-load", action="store_true", dest="lite_load", help="Perform lite load (only load key content)", ) def handle(self, *args, **options): """Automatically called when the updatedata command is given.""" lite_load = options.get("lite_load") management.call_command("flush", interactive=False) management.call_command("loadresources", lite_load=lite_load) management.call_command("loadtopics", lite_load=lite_load) management.call_command("loadgeneralpages", lite_load=lite_load) management.call_command("loadclassicpages", lite_load=lite_load) management.call_command("loadactivities", lite_load=lite_load) + management.call_command("load_at_a_distance_data", lite_load=lite_load) management.call_command("rebuild_search_indexes")
e7bda027780da26183f84f7af5c50cd37649c76b
functional_tests/remote.py
functional_tests/remote.py
from unipath import Path import subprocess THIS_FOLDER = Path(__file__).parent def reset_database(host): subprocess.check_call(['fab', 'reset_database', '--host={}'.format(host)], cwd=THIS_FOLDER) def create_user(host, user, email, password): subprocess.check_call(['fab', 'create_user:user={},password={},email={}' \ .format(user, password, email), '--host={}'.format(host)], cwd=THIS_FOLDER) def get_sitename(host): return subprocess.check_output(['fab', 'get_sitename', '--host={}'.format(host), '--hide=everything,status'], cwd=THIS_FOLDER).decode().strip() def create_project(host, user, name, description=''): return subprocess.check_output(['fab', 'create_project:user={},name={},description={}'.format(user, name, description), '--host={}'.format(host)], cwd=THIS_FOLDER) def create_action(host, user, text, project=''): return subprocess.check_output(['fab', 'create_action:user={},text={},project={}'.format(user, text, project), '--host={}'.format(host)], cwd=THIS_FOLDER)
from unipath import Path import subprocess THIS_FOLDER = Path(__file__).parent def reset_database(host): subprocess.check_call(['fab', 'reset_database', '--host={}'.format(host), '--hide=everything,status'], cwd=THIS_FOLDER) def create_user(host, user, email, password): subprocess.check_call(['fab', 'create_user:user={},password={},email={}' \ .format(user, password, email), '--host={}'.format(host), '--hide=everything,status'], cwd=THIS_FOLDER) def get_sitename(host): return subprocess.check_output(['fab', 'get_sitename', '--host={}'.format(host), '--hide=everything,status'], cwd=THIS_FOLDER).decode().strip() def create_project(host, user, name, description=''): return subprocess.check_output(['fab', 'create_project:user={},name={},description={}'.format(user, name, description), '--host={}'.format(host)], cwd=THIS_FOLDER) def create_action(host, user, text, project=''): return subprocess.check_output(['fab', 'create_action:user={},text={},project={}'.format(user, text, project), '--host={}'.format(host)], cwd=THIS_FOLDER)
Make running FTs against staging a bit less verbose
Make running FTs against staging a bit less verbose
Python
mit
XeryusTC/projman,XeryusTC/projman,XeryusTC/projman
from unipath import Path import subprocess THIS_FOLDER = Path(__file__).parent def reset_database(host): - subprocess.check_call(['fab', 'reset_database', '--host={}'.format(host)], + subprocess.check_call(['fab', 'reset_database', '--host={}'.format(host), + '--hide=everything,status'], cwd=THIS_FOLDER) def create_user(host, user, email, password): subprocess.check_call(['fab', 'create_user:user={},password={},email={}' \ - .format(user, password, email), '--host={}'.format(host)], + .format(user, password, email), '--host={}'.format(host), + '--hide=everything,status'], cwd=THIS_FOLDER) def get_sitename(host): return subprocess.check_output(['fab', 'get_sitename', '--host={}'.format(host), '--hide=everything,status'], cwd=THIS_FOLDER).decode().strip() def create_project(host, user, name, description=''): return subprocess.check_output(['fab', 'create_project:user={},name={},description={}'.format(user, name, description), '--host={}'.format(host)], cwd=THIS_FOLDER) def create_action(host, user, text, project=''): return subprocess.check_output(['fab', 'create_action:user={},text={},project={}'.format(user, text, project), '--host={}'.format(host)], cwd=THIS_FOLDER)
Make running FTs against staging a bit less verbose
## Code Before: from unipath import Path import subprocess THIS_FOLDER = Path(__file__).parent def reset_database(host): subprocess.check_call(['fab', 'reset_database', '--host={}'.format(host)], cwd=THIS_FOLDER) def create_user(host, user, email, password): subprocess.check_call(['fab', 'create_user:user={},password={},email={}' \ .format(user, password, email), '--host={}'.format(host)], cwd=THIS_FOLDER) def get_sitename(host): return subprocess.check_output(['fab', 'get_sitename', '--host={}'.format(host), '--hide=everything,status'], cwd=THIS_FOLDER).decode().strip() def create_project(host, user, name, description=''): return subprocess.check_output(['fab', 'create_project:user={},name={},description={}'.format(user, name, description), '--host={}'.format(host)], cwd=THIS_FOLDER) def create_action(host, user, text, project=''): return subprocess.check_output(['fab', 'create_action:user={},text={},project={}'.format(user, text, project), '--host={}'.format(host)], cwd=THIS_FOLDER) ## Instruction: Make running FTs against staging a bit less verbose ## Code After: from unipath import Path import subprocess THIS_FOLDER = Path(__file__).parent def reset_database(host): subprocess.check_call(['fab', 'reset_database', '--host={}'.format(host), '--hide=everything,status'], cwd=THIS_FOLDER) def create_user(host, user, email, password): subprocess.check_call(['fab', 'create_user:user={},password={},email={}' \ .format(user, password, email), '--host={}'.format(host), '--hide=everything,status'], cwd=THIS_FOLDER) def get_sitename(host): return subprocess.check_output(['fab', 'get_sitename', '--host={}'.format(host), '--hide=everything,status'], cwd=THIS_FOLDER).decode().strip() def create_project(host, user, name, description=''): return subprocess.check_output(['fab', 'create_project:user={},name={},description={}'.format(user, name, description), '--host={}'.format(host)], cwd=THIS_FOLDER) def create_action(host, user, text, project=''): return subprocess.check_output(['fab', 'create_action:user={},text={},project={}'.format(user, text, project), '--host={}'.format(host)], cwd=THIS_FOLDER)
from unipath import Path import subprocess THIS_FOLDER = Path(__file__).parent def reset_database(host): - subprocess.check_call(['fab', 'reset_database', '--host={}'.format(host)], ? - + subprocess.check_call(['fab', 'reset_database', '--host={}'.format(host), + '--hide=everything,status'], cwd=THIS_FOLDER) def create_user(host, user, email, password): subprocess.check_call(['fab', 'create_user:user={},password={},email={}' \ - .format(user, password, email), '--host={}'.format(host)], ? - + .format(user, password, email), '--host={}'.format(host), + '--hide=everything,status'], cwd=THIS_FOLDER) def get_sitename(host): return subprocess.check_output(['fab', 'get_sitename', '--host={}'.format(host), '--hide=everything,status'], cwd=THIS_FOLDER).decode().strip() def create_project(host, user, name, description=''): return subprocess.check_output(['fab', 'create_project:user={},name={},description={}'.format(user, name, description), '--host={}'.format(host)], cwd=THIS_FOLDER) def create_action(host, user, text, project=''): return subprocess.check_output(['fab', 'create_action:user={},text={},project={}'.format(user, text, project), '--host={}'.format(host)], cwd=THIS_FOLDER)
6b9ccae880e9582f38e2a8aa3c451bc6f6a88d37
thing/tasks/tablecleaner.py
thing/tasks/tablecleaner.py
import datetime from celery import task from celery.utils.log import get_task_logger logger = get_task_logger(__name__) from django.db.models import Q from thing.models import APIKey, TaskState # --------------------------------------------------------------------------- # Periodic task to perform database table cleanup @task(name='thing.table_cleaner') def table_cleaner(): utcnow = datetime.datetime.utcnow() queued_timeout = utcnow - datetime.timedelta(minutes=120) # Build a QuerySet to find broken tasks taskstates = TaskState.objects.filter(state=TaskState.QUEUED_STATE, mod_time__lte=queued_timeout) for ts in taskstates: logger.warn('[table_cleaner] Stuck task: %d | %d | %s | %s', ts.id, ts.keyid, ts.parameter, ts.url) count = taskstates.update(mod_time=utcnow, next_time=utcnow, state=TaskState.READY_STATE) if count > 0: logger.warn('[table_cleaner] Reset %d broken task(s)', count) # Build a QuerySet to find tasks that refer to no longer existent keys taskstates = TaskState.objects.exclude( Q(keyid=-1) | Q(keyid__in=APIKey.objects.values('keyid')) ) taskstates.delete() # ---------------------------------------------------------------------------
import datetime from celery import task from celery.utils.log import get_task_logger logger = get_task_logger(__name__) from django.db.models import Q from thing.models import APIKey, TaskState # --------------------------------------------------------------------------- # Periodic task to perform database table cleanup @task(name='thing.table_cleaner') def table_cleaner(): utcnow = datetime.datetime.utcnow() queued_timeout = utcnow - datetime.timedelta(minutes=120) # Build a QuerySet to find broken tasks taskstates = TaskState.objects.filter(state=TaskState.QUEUED_STATE, mod_time__lte=queued_timeout) for ts in taskstates: logger.warn('[table_cleaner] Stuck task: %d | %d | %s | %s', ts.id, ts.keyid, ts.parameter, ts.url) count = taskstates.update(mod_time=utcnow, next_time=utcnow, state=TaskState.READY_STATE) if count > 0: logger.warn('[table_cleaner] Reset %d broken task(s)', count) # Build a QuerySet to find tasks that refer to no longer existent keys taskstates = TaskState.objects.exclude( Q(keyid=-1) | Q(keyid__in=APIKey.objects.filter(valid=True).values('keyid')) ) taskstates.delete() # ---------------------------------------------------------------------------
Change thing.tasks.table_cleaner to delete TaskState objects for any invalid APIKeys
Change thing.tasks.table_cleaner to delete TaskState objects for any invalid APIKeys
Python
bsd-2-clause
madcowfred/evething,madcowfred/evething,Gillingham/evething,Gillingham/evething,cmptrgeekken/evething,madcowfred/evething,Gillingham/evething,cmptrgeekken/evething,cmptrgeekken/evething,Gillingham/evething,madcowfred/evething,cmptrgeekken/evething,cmptrgeekken/evething
import datetime from celery import task from celery.utils.log import get_task_logger logger = get_task_logger(__name__) from django.db.models import Q from thing.models import APIKey, TaskState # --------------------------------------------------------------------------- # Periodic task to perform database table cleanup @task(name='thing.table_cleaner') def table_cleaner(): utcnow = datetime.datetime.utcnow() queued_timeout = utcnow - datetime.timedelta(minutes=120) # Build a QuerySet to find broken tasks taskstates = TaskState.objects.filter(state=TaskState.QUEUED_STATE, mod_time__lte=queued_timeout) for ts in taskstates: logger.warn('[table_cleaner] Stuck task: %d | %d | %s | %s', ts.id, ts.keyid, ts.parameter, ts.url) - + count = taskstates.update(mod_time=utcnow, next_time=utcnow, state=TaskState.READY_STATE) if count > 0: logger.warn('[table_cleaner] Reset %d broken task(s)', count) # Build a QuerySet to find tasks that refer to no longer existent keys taskstates = TaskState.objects.exclude( Q(keyid=-1) | - Q(keyid__in=APIKey.objects.values('keyid')) + Q(keyid__in=APIKey.objects.filter(valid=True).values('keyid')) ) taskstates.delete() # ---------------------------------------------------------------------------
Change thing.tasks.table_cleaner to delete TaskState objects for any invalid APIKeys
## Code Before: import datetime from celery import task from celery.utils.log import get_task_logger logger = get_task_logger(__name__) from django.db.models import Q from thing.models import APIKey, TaskState # --------------------------------------------------------------------------- # Periodic task to perform database table cleanup @task(name='thing.table_cleaner') def table_cleaner(): utcnow = datetime.datetime.utcnow() queued_timeout = utcnow - datetime.timedelta(minutes=120) # Build a QuerySet to find broken tasks taskstates = TaskState.objects.filter(state=TaskState.QUEUED_STATE, mod_time__lte=queued_timeout) for ts in taskstates: logger.warn('[table_cleaner] Stuck task: %d | %d | %s | %s', ts.id, ts.keyid, ts.parameter, ts.url) count = taskstates.update(mod_time=utcnow, next_time=utcnow, state=TaskState.READY_STATE) if count > 0: logger.warn('[table_cleaner] Reset %d broken task(s)', count) # Build a QuerySet to find tasks that refer to no longer existent keys taskstates = TaskState.objects.exclude( Q(keyid=-1) | Q(keyid__in=APIKey.objects.values('keyid')) ) taskstates.delete() # --------------------------------------------------------------------------- ## Instruction: Change thing.tasks.table_cleaner to delete TaskState objects for any invalid APIKeys ## Code After: import datetime from celery import task from celery.utils.log import get_task_logger logger = get_task_logger(__name__) from django.db.models import Q from thing.models import APIKey, TaskState # --------------------------------------------------------------------------- # Periodic task to perform database table cleanup @task(name='thing.table_cleaner') def table_cleaner(): utcnow = datetime.datetime.utcnow() queued_timeout = utcnow - datetime.timedelta(minutes=120) # Build a QuerySet to find broken tasks taskstates = TaskState.objects.filter(state=TaskState.QUEUED_STATE, mod_time__lte=queued_timeout) for ts in taskstates: logger.warn('[table_cleaner] Stuck task: %d | %d | %s | %s', ts.id, ts.keyid, ts.parameter, ts.url) count = taskstates.update(mod_time=utcnow, next_time=utcnow, state=TaskState.READY_STATE) if count > 0: logger.warn('[table_cleaner] Reset %d broken task(s)', count) # Build a QuerySet to find tasks that refer to no longer existent keys taskstates = TaskState.objects.exclude( Q(keyid=-1) | Q(keyid__in=APIKey.objects.filter(valid=True).values('keyid')) ) taskstates.delete() # ---------------------------------------------------------------------------
import datetime from celery import task from celery.utils.log import get_task_logger logger = get_task_logger(__name__) from django.db.models import Q from thing.models import APIKey, TaskState # --------------------------------------------------------------------------- # Periodic task to perform database table cleanup @task(name='thing.table_cleaner') def table_cleaner(): utcnow = datetime.datetime.utcnow() queued_timeout = utcnow - datetime.timedelta(minutes=120) # Build a QuerySet to find broken tasks taskstates = TaskState.objects.filter(state=TaskState.QUEUED_STATE, mod_time__lte=queued_timeout) for ts in taskstates: logger.warn('[table_cleaner] Stuck task: %d | %d | %s | %s', ts.id, ts.keyid, ts.parameter, ts.url) - + count = taskstates.update(mod_time=utcnow, next_time=utcnow, state=TaskState.READY_STATE) if count > 0: logger.warn('[table_cleaner] Reset %d broken task(s)', count) # Build a QuerySet to find tasks that refer to no longer existent keys taskstates = TaskState.objects.exclude( Q(keyid=-1) | - Q(keyid__in=APIKey.objects.values('keyid')) + Q(keyid__in=APIKey.objects.filter(valid=True).values('keyid')) ? +++++++++++++++++++ ) taskstates.delete() # ---------------------------------------------------------------------------
7cc968f90407745b84bd2f663e5f64b9c0923605
project/manage.py
project/manage.py
import os import sys import environ if __name__ == "__main__": if os.path.isfile('.env'): environ.Env.read_env('.env') os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.local") from django.core.management import execute_from_command_line execute_from_command_line(sys.argv)
import os import sys import environ ROOT_DIR = environ.Path(__file__) - 1 if __name__ == "__main__": if os.path.isfile(str(ROOT_DIR + '.env')): environ.Env.read_env(str(ROOT_DIR + '.env')) os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.local") from django.core.management import execute_from_command_line execute_from_command_line(sys.argv)
Use full path in case the working dir is not the same
Use full path in case the working dir is not the same
Python
mit
hacklab-fi/asylum,hacklab-fi/asylum,HelsinkiHacklab/asylum,jautero/asylum,hacklab-fi/asylum,HelsinkiHacklab/asylum,jautero/asylum,hacklab-fi/asylum,rambo/asylum,jautero/asylum,rambo/asylum,rambo/asylum,rambo/asylum,jautero/asylum,HelsinkiHacklab/asylum,HelsinkiHacklab/asylum
import os import sys import environ + ROOT_DIR = environ.Path(__file__) - 1 if __name__ == "__main__": - if os.path.isfile('.env'): + if os.path.isfile(str(ROOT_DIR + '.env')): - environ.Env.read_env('.env') + environ.Env.read_env(str(ROOT_DIR + '.env')) os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.local") from django.core.management import execute_from_command_line execute_from_command_line(sys.argv)
Use full path in case the working dir is not the same
## Code Before: import os import sys import environ if __name__ == "__main__": if os.path.isfile('.env'): environ.Env.read_env('.env') os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.local") from django.core.management import execute_from_command_line execute_from_command_line(sys.argv) ## Instruction: Use full path in case the working dir is not the same ## Code After: import os import sys import environ ROOT_DIR = environ.Path(__file__) - 1 if __name__ == "__main__": if os.path.isfile(str(ROOT_DIR + '.env')): environ.Env.read_env(str(ROOT_DIR + '.env')) os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.local") from django.core.management import execute_from_command_line execute_from_command_line(sys.argv)
import os import sys import environ + ROOT_DIR = environ.Path(__file__) - 1 if __name__ == "__main__": - if os.path.isfile('.env'): + if os.path.isfile(str(ROOT_DIR + '.env')): ? +++++++++++++++ + - environ.Env.read_env('.env') + environ.Env.read_env(str(ROOT_DIR + '.env')) ? +++++++++++++++ + os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.local") from django.core.management import execute_from_command_line execute_from_command_line(sys.argv)
da2376744ec5b1823ea75f3cefbb0de0ac000c1b
tests/secrets.py
tests/secrets.py
import os from dotenv import load_dotenv, find_dotenv load_dotenv(find_dotenv()) OPS_KEY = os.environ['OPS_KEY'] OPS_SECRET = os.environ['OPS_SECRET']
import os from dotenv import load_dotenv, find_dotenv load_dotenv(find_dotenv()) OPS_KEY = os.environ['OPS_KEY'] OPS_SECRET = os.environ['OPS_SECRET'] TWITTER_CONSUMER_ACCESS = os.environ['TWITTER_CONSUMER_ACCESS'] TWITTER_CONSUMER_SECRET = os.environ['TWITTER_CONSUMER_SECRET'] TWITTER_ACCESS = os.environ['TWITTER_ACCESS'] TWITTER_SECRET = os.environ['TWITTER_SECRET']
Read twitter tokens from .env
Read twitter tokens from .env
Python
mit
nestauk/inet
import os from dotenv import load_dotenv, find_dotenv load_dotenv(find_dotenv()) OPS_KEY = os.environ['OPS_KEY'] OPS_SECRET = os.environ['OPS_SECRET'] + TWITTER_CONSUMER_ACCESS = os.environ['TWITTER_CONSUMER_ACCESS'] + TWITTER_CONSUMER_SECRET = os.environ['TWITTER_CONSUMER_SECRET'] + TWITTER_ACCESS = os.environ['TWITTER_ACCESS'] + TWITTER_SECRET = os.environ['TWITTER_SECRET']
Read twitter tokens from .env
## Code Before: import os from dotenv import load_dotenv, find_dotenv load_dotenv(find_dotenv()) OPS_KEY = os.environ['OPS_KEY'] OPS_SECRET = os.environ['OPS_SECRET'] ## Instruction: Read twitter tokens from .env ## Code After: import os from dotenv import load_dotenv, find_dotenv load_dotenv(find_dotenv()) OPS_KEY = os.environ['OPS_KEY'] OPS_SECRET = os.environ['OPS_SECRET'] TWITTER_CONSUMER_ACCESS = os.environ['TWITTER_CONSUMER_ACCESS'] TWITTER_CONSUMER_SECRET = os.environ['TWITTER_CONSUMER_SECRET'] TWITTER_ACCESS = os.environ['TWITTER_ACCESS'] TWITTER_SECRET = os.environ['TWITTER_SECRET']
import os from dotenv import load_dotenv, find_dotenv load_dotenv(find_dotenv()) OPS_KEY = os.environ['OPS_KEY'] OPS_SECRET = os.environ['OPS_SECRET'] + TWITTER_CONSUMER_ACCESS = os.environ['TWITTER_CONSUMER_ACCESS'] + TWITTER_CONSUMER_SECRET = os.environ['TWITTER_CONSUMER_SECRET'] + TWITTER_ACCESS = os.environ['TWITTER_ACCESS'] + TWITTER_SECRET = os.environ['TWITTER_SECRET']
97d1dd6b14cff5196ccd2e2efad8a0aba5bf240b
tests/test_money.py
tests/test_money.py
from decimal import Decimal from django.test import TestCase from shop.money.money_maker import AbstractMoney, MoneyMaker class AbstractMoneyTest(TestCase): def test_is_abstract(self): self.assertRaises(TypeError, lambda: AbstractMoney(1)) class MoneyMakerTest(TestCase): def test_create_new_money_type_without_argumens(self): Money = MoneyMaker() money = Money() self.assertTrue(money.is_nan()) def test_wrong_currency_raises_assertion_error(self): # If we try to call a money class with a value that has a # different currency than the class, there should be an # AssertionError. Money = MoneyMaker(currency_code='EUR') value = Money() value._currency_code = 'USD' self.assertRaises(AssertionError, lambda: Money(value)) def test_create_instance_from_decimal(self): Money = MoneyMaker() value = Decimal("1.2") inst = Money(value) self.assertEquals(inst, value)
from __future__ import unicode_literals from decimal import Decimal from django.test import TestCase from shop.money.money_maker import AbstractMoney, MoneyMaker class AbstractMoneyTest(TestCase): def test_is_abstract(self): self.assertRaises(TypeError, lambda: AbstractMoney(1)) class MoneyMakerTest(TestCase): def test_create_new_money_type_without_argumens(self): Money = MoneyMaker() money = Money() self.assertTrue(money.is_nan()) def test_wrong_currency_raises_assertion_error(self): # If we try to call a money class with a value that has a # different currency than the class, and the value is an # instance of the money class, there should be an # AssertionError. Money = MoneyMaker(currency_code='EUR') value = Money() value._currency_code = 'USD' self.assertRaises(AssertionError, lambda: Money(value)) def test_create_instance_from_decimal(self): Money = MoneyMaker() value = Decimal("1.2") inst = Money(value) self.assertEquals(inst, value) def test_unicode(self): Money = MoneyMaker() value = Money(1) self.assertEqual(unicode(value), "€ 1.00")
Add a test for AbstractMoney.__unicode__
Add a test for AbstractMoney.__unicode__
Python
bsd-3-clause
nimbis/django-shop,jrief/django-shop,awesto/django-shop,rfleschenberg/django-shop,jrief/django-shop,nimbis/django-shop,jrief/django-shop,awesto/django-shop,khchine5/django-shop,rfleschenberg/django-shop,jrief/django-shop,divio/django-shop,divio/django-shop,rfleschenberg/django-shop,nimbis/django-shop,khchine5/django-shop,khchine5/django-shop,awesto/django-shop,rfleschenberg/django-shop,divio/django-shop,khchine5/django-shop,nimbis/django-shop
+ from __future__ import unicode_literals + from decimal import Decimal from django.test import TestCase from shop.money.money_maker import AbstractMoney, MoneyMaker class AbstractMoneyTest(TestCase): def test_is_abstract(self): self.assertRaises(TypeError, lambda: AbstractMoney(1)) class MoneyMakerTest(TestCase): def test_create_new_money_type_without_argumens(self): Money = MoneyMaker() money = Money() self.assertTrue(money.is_nan()) def test_wrong_currency_raises_assertion_error(self): # If we try to call a money class with a value that has a - # different currency than the class, there should be an + # different currency than the class, and the value is an + # instance of the money class, there should be an # AssertionError. Money = MoneyMaker(currency_code='EUR') value = Money() value._currency_code = 'USD' self.assertRaises(AssertionError, lambda: Money(value)) def test_create_instance_from_decimal(self): Money = MoneyMaker() value = Decimal("1.2") inst = Money(value) self.assertEquals(inst, value) + def test_unicode(self): + Money = MoneyMaker() + value = Money(1) + self.assertEqual(unicode(value), "€ 1.00") +
Add a test for AbstractMoney.__unicode__
## Code Before: from decimal import Decimal from django.test import TestCase from shop.money.money_maker import AbstractMoney, MoneyMaker class AbstractMoneyTest(TestCase): def test_is_abstract(self): self.assertRaises(TypeError, lambda: AbstractMoney(1)) class MoneyMakerTest(TestCase): def test_create_new_money_type_without_argumens(self): Money = MoneyMaker() money = Money() self.assertTrue(money.is_nan()) def test_wrong_currency_raises_assertion_error(self): # If we try to call a money class with a value that has a # different currency than the class, there should be an # AssertionError. Money = MoneyMaker(currency_code='EUR') value = Money() value._currency_code = 'USD' self.assertRaises(AssertionError, lambda: Money(value)) def test_create_instance_from_decimal(self): Money = MoneyMaker() value = Decimal("1.2") inst = Money(value) self.assertEquals(inst, value) ## Instruction: Add a test for AbstractMoney.__unicode__ ## Code After: from __future__ import unicode_literals from decimal import Decimal from django.test import TestCase from shop.money.money_maker import AbstractMoney, MoneyMaker class AbstractMoneyTest(TestCase): def test_is_abstract(self): self.assertRaises(TypeError, lambda: AbstractMoney(1)) class MoneyMakerTest(TestCase): def test_create_new_money_type_without_argumens(self): Money = MoneyMaker() money = Money() self.assertTrue(money.is_nan()) def test_wrong_currency_raises_assertion_error(self): # If we try to call a money class with a value that has a # different currency than the class, and the value is an # instance of the money class, there should be an # AssertionError. Money = MoneyMaker(currency_code='EUR') value = Money() value._currency_code = 'USD' self.assertRaises(AssertionError, lambda: Money(value)) def test_create_instance_from_decimal(self): Money = MoneyMaker() value = Decimal("1.2") inst = Money(value) self.assertEquals(inst, value) def test_unicode(self): Money = MoneyMaker() value = Money(1) self.assertEqual(unicode(value), "€ 1.00")
+ from __future__ import unicode_literals + from decimal import Decimal from django.test import TestCase from shop.money.money_maker import AbstractMoney, MoneyMaker class AbstractMoneyTest(TestCase): def test_is_abstract(self): self.assertRaises(TypeError, lambda: AbstractMoney(1)) class MoneyMakerTest(TestCase): def test_create_new_money_type_without_argumens(self): Money = MoneyMaker() money = Money() self.assertTrue(money.is_nan()) def test_wrong_currency_raises_assertion_error(self): # If we try to call a money class with a value that has a - # different currency than the class, there should be an ? ^ -------- + # different currency than the class, and the value is an ? ++++ ^^^^^ + + # instance of the money class, there should be an # AssertionError. Money = MoneyMaker(currency_code='EUR') value = Money() value._currency_code = 'USD' self.assertRaises(AssertionError, lambda: Money(value)) def test_create_instance_from_decimal(self): Money = MoneyMaker() value = Decimal("1.2") inst = Money(value) self.assertEquals(inst, value) + + def test_unicode(self): + Money = MoneyMaker() + value = Money(1) + self.assertEqual(unicode(value), "€ 1.00")
fb335ed74d9d924816fe6bf712844023abf62e30
address_book/person.py
address_book/person.py
__all__ = ['Person'] class Person(object): pass
__all__ = ['Person'] class Person(object): def __init__(self, first_name, last_name, addresses, phone_numbers): self.first_name = first_name self.last_name = last_name self.addresses = addresses self.phone_numbers = phone_numbers
Add constructor with required arguments to the `Person` class
Add constructor with required arguments to the `Person` class
Python
mit
dizpers/python-address-book-assignment
__all__ = ['Person'] class Person(object): - pass + + def __init__(self, first_name, last_name, addresses, phone_numbers): + self.first_name = first_name + self.last_name = last_name + self.addresses = addresses + self.phone_numbers = phone_numbers +
Add constructor with required arguments to the `Person` class
## Code Before: __all__ = ['Person'] class Person(object): pass ## Instruction: Add constructor with required arguments to the `Person` class ## Code After: __all__ = ['Person'] class Person(object): def __init__(self, first_name, last_name, addresses, phone_numbers): self.first_name = first_name self.last_name = last_name self.addresses = addresses self.phone_numbers = phone_numbers
__all__ = ['Person'] class Person(object): - pass + + def __init__(self, first_name, last_name, addresses, phone_numbers): + self.first_name = first_name + self.last_name = last_name + self.addresses = addresses + self.phone_numbers = phone_numbers
27fe9d6531a2e76affd9388db53c0433062a9cfa
photonix/photos/management/commands/create_library.py
photonix/photos/management/commands/create_library.py
import os from pathlib import Path from django.contrib.auth import get_user_model from django.core.management.base import BaseCommand from django.db.utils import IntegrityError from photonix.photos.models import Library, LibraryPath, LibraryUser from photonix.photos.utils.db import record_photo from photonix.photos.utils.fs import determine_destination, download_file User = get_user_model() class Command(BaseCommand): help = 'Create a library for a user' def create_library(self, username, library_name): # Get user user = User.objects.get(username=username) # Create Library library, _ = Library.objects.get_or_create( name=library_name, ) library_path, _ = LibraryPath.objects.get_or_create( library=library, type='St', backend_type='Lo', path='/data/photos/', url='/photos/', ) library_user, _ = LibraryUser.objects.get_or_create( library=library, user=user, owner=True, ) print(f'Library "{library_name}" created successfully for user "{username}"') def add_arguments(self, parser): # Positional arguments parser.add_argument('username', nargs='+', type=str) parser.add_argument('library_name', nargs='+', type=str) def handle(self, *args, **options): self.create_library(options['username'][0], options['library_name'][0])
import os from pathlib import Path from django.contrib.auth import get_user_model from django.core.management.base import BaseCommand from django.db.utils import IntegrityError from photonix.photos.models import Library, LibraryPath, LibraryUser from photonix.photos.utils.db import record_photo from photonix.photos.utils.fs import determine_destination, download_file User = get_user_model() class Command(BaseCommand): help = 'Create a library for a user' def create_library(self, username, library_name, path): # Get user user = User.objects.get(username=username) # Create Library library, _ = Library.objects.get_or_create( name=library_name, ) library_path, _ = LibraryPath.objects.get_or_create( library=library, type='St', backend_type='Lo', path=path, ) library_user, _ = LibraryUser.objects.get_or_create( library=library, user=user, owner=True, ) print(f'Library "{library_name}" with path "{path}" created successfully for user "{username}"') def add_arguments(self, parser): # Positional arguments parser.add_argument('username', type=str) parser.add_argument('library_name', type=str) parser.add_argument('--path', type=str, default='/data/photos') def handle(self, *args, **options): self.create_library(options['username'], options['library_name'], options['path'])
Fix path cannot be set when creating new library
Fix path cannot be set when creating new library
Python
agpl-3.0
damianmoore/photo-manager,damianmoore/photo-manager,damianmoore/photo-manager,damianmoore/photo-manager
import os from pathlib import Path from django.contrib.auth import get_user_model from django.core.management.base import BaseCommand from django.db.utils import IntegrityError from photonix.photos.models import Library, LibraryPath, LibraryUser from photonix.photos.utils.db import record_photo from photonix.photos.utils.fs import determine_destination, download_file User = get_user_model() class Command(BaseCommand): help = 'Create a library for a user' - def create_library(self, username, library_name): + def create_library(self, username, library_name, path): # Get user user = User.objects.get(username=username) # Create Library library, _ = Library.objects.get_or_create( name=library_name, ) library_path, _ = LibraryPath.objects.get_or_create( library=library, type='St', backend_type='Lo', - path='/data/photos/', + path=path, - url='/photos/', ) library_user, _ = LibraryUser.objects.get_or_create( library=library, user=user, owner=True, ) - print(f'Library "{library_name}" created successfully for user "{username}"') + print(f'Library "{library_name}" with path "{path}" created successfully for user "{username}"') def add_arguments(self, parser): # Positional arguments - parser.add_argument('username', nargs='+', type=str) + parser.add_argument('username', type=str) - parser.add_argument('library_name', nargs='+', type=str) + parser.add_argument('library_name', type=str) + parser.add_argument('--path', type=str, default='/data/photos') def handle(self, *args, **options): - self.create_library(options['username'][0], options['library_name'][0]) + self.create_library(options['username'], options['library_name'], options['path'])
Fix path cannot be set when creating new library
## Code Before: import os from pathlib import Path from django.contrib.auth import get_user_model from django.core.management.base import BaseCommand from django.db.utils import IntegrityError from photonix.photos.models import Library, LibraryPath, LibraryUser from photonix.photos.utils.db import record_photo from photonix.photos.utils.fs import determine_destination, download_file User = get_user_model() class Command(BaseCommand): help = 'Create a library for a user' def create_library(self, username, library_name): # Get user user = User.objects.get(username=username) # Create Library library, _ = Library.objects.get_or_create( name=library_name, ) library_path, _ = LibraryPath.objects.get_or_create( library=library, type='St', backend_type='Lo', path='/data/photos/', url='/photos/', ) library_user, _ = LibraryUser.objects.get_or_create( library=library, user=user, owner=True, ) print(f'Library "{library_name}" created successfully for user "{username}"') def add_arguments(self, parser): # Positional arguments parser.add_argument('username', nargs='+', type=str) parser.add_argument('library_name', nargs='+', type=str) def handle(self, *args, **options): self.create_library(options['username'][0], options['library_name'][0]) ## Instruction: Fix path cannot be set when creating new library ## Code After: import os from pathlib import Path from django.contrib.auth import get_user_model from django.core.management.base import BaseCommand from django.db.utils import IntegrityError from photonix.photos.models import Library, LibraryPath, LibraryUser from photonix.photos.utils.db import record_photo from photonix.photos.utils.fs import determine_destination, download_file User = get_user_model() class Command(BaseCommand): help = 'Create a library for a user' def create_library(self, username, library_name, path): # Get user user = User.objects.get(username=username) # Create Library library, _ = Library.objects.get_or_create( name=library_name, ) library_path, _ = LibraryPath.objects.get_or_create( library=library, type='St', backend_type='Lo', path=path, ) library_user, _ = LibraryUser.objects.get_or_create( library=library, user=user, owner=True, ) print(f'Library "{library_name}" with path "{path}" created successfully for user "{username}"') def add_arguments(self, parser): # Positional arguments parser.add_argument('username', type=str) parser.add_argument('library_name', type=str) parser.add_argument('--path', type=str, default='/data/photos') def handle(self, *args, **options): self.create_library(options['username'], options['library_name'], options['path'])
import os from pathlib import Path from django.contrib.auth import get_user_model from django.core.management.base import BaseCommand from django.db.utils import IntegrityError from photonix.photos.models import Library, LibraryPath, LibraryUser from photonix.photos.utils.db import record_photo from photonix.photos.utils.fs import determine_destination, download_file User = get_user_model() class Command(BaseCommand): help = 'Create a library for a user' - def create_library(self, username, library_name): + def create_library(self, username, library_name, path): ? ++++++ # Get user user = User.objects.get(username=username) # Create Library library, _ = Library.objects.get_or_create( name=library_name, ) library_path, _ = LibraryPath.objects.get_or_create( library=library, type='St', backend_type='Lo', - path='/data/photos/', ? ^^^ --- ------ + path=path, ? ^ - url='/photos/', ) library_user, _ = LibraryUser.objects.get_or_create( library=library, user=user, owner=True, ) - print(f'Library "{library_name}" created successfully for user "{username}"') + print(f'Library "{library_name}" with path "{path}" created successfully for user "{username}"') ? +++++++++++++++++++ def add_arguments(self, parser): # Positional arguments - parser.add_argument('username', nargs='+', type=str) ? ----------- + parser.add_argument('username', type=str) - parser.add_argument('library_name', nargs='+', type=str) ? ----------- + parser.add_argument('library_name', type=str) + parser.add_argument('--path', type=str, default='/data/photos') def handle(self, *args, **options): - self.create_library(options['username'][0], options['library_name'][0]) ? --- ^ + self.create_library(options['username'], options['library_name'], options['path']) ? +++++++++ ^^^^^^
06ef0b92b1c8e6cc2916f4d68ec3b4ae513c9085
july/people/views.py
july/people/views.py
from django.shortcuts import render_to_response from django.contrib.auth.decorators import login_required from django.template.context import RequestContext #from google.appengine.ext import db from july.people.models import Commit from gae_django.auth.models import User from django.http import Http404 from forms import EditUserForm def user_profile(request, username): user = User.all().filter("username", username).get() if user == None: raise Http404("User not found") commits = Commit.all().ancestor(request.user.key()) expandos = dict([(key, getattr(user, key, None)) for key in user.dynamic_properties()]) return render_to_response('people/profile.html', {"expandos": expandos, "commits":commits}, RequestContext(request)) @login_required def edit_profile(request, username, template_name='people/edit.html'): user = request.user #CONSIDER FILES with no POST? Can that happen? form = EditUserForm(request.POST or None, request.FILES or None) if form.is_valid(): for key in form.cleaned_data: setattr(user,key,form.cleaned_data.get(key)) user.put() if user == None: raise Http404("User not found") expandos = dict([(key, getattr(user, key, None)) for key in user.dynamic_properties()]) return render_to_response(template_name, {'form':form, }, RequestContext(request))
from django.shortcuts import render_to_response from django.contrib.auth.decorators import login_required from django.template.context import RequestContext #from google.appengine.ext import db from july.people.models import Commit from gae_django.auth.models import User from django.http import Http404 def user_profile(request, username): user = User.all().filter("username", username).get() if user == None: raise Http404("User not found") commits = Commit.all().ancestor(request.user.key()) expandos = dict([(key, getattr(user, key, None)) for key in user.dynamic_properties()]) return render_to_response('people/profile.html', {"expandos": expandos, "commits":commits}, RequestContext(request)) @login_required def edit_profile(request, username, template_name='people/edit.html'): from forms import EditUserForm user = request.user #CONSIDER FILES with no POST? Can that happen? form = EditUserForm(request.POST or None, request.FILES or None) if form.is_valid(): for key in form.cleaned_data: setattr(user,key,form.cleaned_data.get(key)) user.put() if user == None: raise Http404("User not found") expandos = dict([(key, getattr(user, key, None)) for key in user.dynamic_properties()]) return render_to_response(template_name, {'form':form, }, RequestContext(request))
Fix typo and move missing import into edit view
Fix typo and move missing import into edit view
Python
mit
julython/julython.org,julython/julython.org,ChimeraCoder/GOctober,julython/julython.org,ChimeraCoder/GOctober,julython/julython.org,ChimeraCoder/GOctober
from django.shortcuts import render_to_response from django.contrib.auth.decorators import login_required from django.template.context import RequestContext #from google.appengine.ext import db from july.people.models import Commit from gae_django.auth.models import User from django.http import Http404 - from forms import EditUserForm def user_profile(request, username): user = User.all().filter("username", username).get() if user == None: raise Http404("User not found") commits = Commit.all().ancestor(request.user.key()) expandos = dict([(key, getattr(user, key, None)) for key in user.dynamic_properties()]) return render_to_response('people/profile.html', {"expandos": expandos, "commits":commits}, RequestContext(request)) @login_required def edit_profile(request, username, template_name='people/edit.html'): + from forms import EditUserForm user = request.user #CONSIDER FILES with no POST? Can that happen? form = EditUserForm(request.POST or None, request.FILES or None) if form.is_valid(): for key in form.cleaned_data: - setattr(user,key,form.cleaned_data.get(key)) + setattr(user,key,form.cleaned_data.get(key)) user.put() if user == None: raise Http404("User not found") expandos = dict([(key, getattr(user, key, None)) for key in user.dynamic_properties()]) return render_to_response(template_name, {'form':form, }, RequestContext(request))
Fix typo and move missing import into edit view
## Code Before: from django.shortcuts import render_to_response from django.contrib.auth.decorators import login_required from django.template.context import RequestContext #from google.appengine.ext import db from july.people.models import Commit from gae_django.auth.models import User from django.http import Http404 from forms import EditUserForm def user_profile(request, username): user = User.all().filter("username", username).get() if user == None: raise Http404("User not found") commits = Commit.all().ancestor(request.user.key()) expandos = dict([(key, getattr(user, key, None)) for key in user.dynamic_properties()]) return render_to_response('people/profile.html', {"expandos": expandos, "commits":commits}, RequestContext(request)) @login_required def edit_profile(request, username, template_name='people/edit.html'): user = request.user #CONSIDER FILES with no POST? Can that happen? form = EditUserForm(request.POST or None, request.FILES or None) if form.is_valid(): for key in form.cleaned_data: setattr(user,key,form.cleaned_data.get(key)) user.put() if user == None: raise Http404("User not found") expandos = dict([(key, getattr(user, key, None)) for key in user.dynamic_properties()]) return render_to_response(template_name, {'form':form, }, RequestContext(request)) ## Instruction: Fix typo and move missing import into edit view ## Code After: from django.shortcuts import render_to_response from django.contrib.auth.decorators import login_required from django.template.context import RequestContext #from google.appengine.ext import db from july.people.models import Commit from gae_django.auth.models import User from django.http import Http404 def user_profile(request, username): user = User.all().filter("username", username).get() if user == None: raise Http404("User not found") commits = Commit.all().ancestor(request.user.key()) expandos = dict([(key, getattr(user, key, None)) for key in user.dynamic_properties()]) return render_to_response('people/profile.html', {"expandos": expandos, "commits":commits}, RequestContext(request)) @login_required def edit_profile(request, username, template_name='people/edit.html'): from forms import EditUserForm user = request.user #CONSIDER FILES with no POST? Can that happen? form = EditUserForm(request.POST or None, request.FILES or None) if form.is_valid(): for key in form.cleaned_data: setattr(user,key,form.cleaned_data.get(key)) user.put() if user == None: raise Http404("User not found") expandos = dict([(key, getattr(user, key, None)) for key in user.dynamic_properties()]) return render_to_response(template_name, {'form':form, }, RequestContext(request))
from django.shortcuts import render_to_response from django.contrib.auth.decorators import login_required from django.template.context import RequestContext #from google.appengine.ext import db from july.people.models import Commit from gae_django.auth.models import User from django.http import Http404 - from forms import EditUserForm def user_profile(request, username): user = User.all().filter("username", username).get() if user == None: raise Http404("User not found") commits = Commit.all().ancestor(request.user.key()) expandos = dict([(key, getattr(user, key, None)) for key in user.dynamic_properties()]) return render_to_response('people/profile.html', {"expandos": expandos, "commits":commits}, RequestContext(request)) @login_required def edit_profile(request, username, template_name='people/edit.html'): + from forms import EditUserForm user = request.user #CONSIDER FILES with no POST? Can that happen? form = EditUserForm(request.POST or None, request.FILES or None) if form.is_valid(): for key in form.cleaned_data: - setattr(user,key,form.cleaned_data.get(key)) + setattr(user,key,form.cleaned_data.get(key)) ? + user.put() if user == None: raise Http404("User not found") expandos = dict([(key, getattr(user, key, None)) for key in user.dynamic_properties()]) return render_to_response(template_name, {'form':form, }, RequestContext(request))
d731b4172592ef905101868b43817f25f5b04063
virtstrap/exceptions.py
virtstrap/exceptions.py
class CommandConfigError(Exception): """Exception for command configuration errors""" pass
class CommandConfigError(Exception): """Exception for command configuration errors""" pass class RequirementsConfigError(Exception): """Exception for command configuration errors""" pass
Add a requirements configuration exception
Add a requirements configuration exception
Python
mit
ravenac95/virtstrap-core,ravenac95/testvirtstrapdocs,ravenac95/virtstrap-core
class CommandConfigError(Exception): """Exception for command configuration errors""" pass + class RequirementsConfigError(Exception): + """Exception for command configuration errors""" + pass +
Add a requirements configuration exception
## Code Before: class CommandConfigError(Exception): """Exception for command configuration errors""" pass ## Instruction: Add a requirements configuration exception ## Code After: class CommandConfigError(Exception): """Exception for command configuration errors""" pass class RequirementsConfigError(Exception): """Exception for command configuration errors""" pass
class CommandConfigError(Exception): """Exception for command configuration errors""" pass + + class RequirementsConfigError(Exception): + """Exception for command configuration errors""" + pass
8ce6a6144fee1c9ec6a5f1a083eabbb653d8514b
virtool/postgres.py
virtool/postgres.py
import logging import sys from sqlalchemy import text from sqlalchemy.ext.asyncio import AsyncConnection, create_async_engine logger = logging.getLogger(__name__) async def connect(postgres_connection_string: str) -> AsyncConnection: """ Create a connection of Postgres. :param postgres_connection_string: the postgres connection string :return: an AsyncConnection object """ if not postgres_connection_string.startswith("postgresql+asyncpg://"): logger.fatal("Invalid PostgreSQL connection string") sys.exit(1) try: postgres = create_async_engine(postgres_connection_string) async with postgres.connect() as connection: await check_version(connection) return connection except ConnectionRefusedError: logger.fatal("Could not connect to PostgreSQL: Connection refused") sys.exit(1) async def check_version(connection: AsyncConnection): """ Check and log the Postgres sever version. :param connection:an AsyncConnection object """ info = await connection.execute(text('SHOW server_version')) version = info.first()[0].split()[0] logger.info(f"Found PostgreSQL {version}")
import logging import sys from sqlalchemy import text from sqlalchemy.ext.asyncio import AsyncConnection, create_async_engine import virtool.models logger = logging.getLogger(__name__) async def connect(postgres_connection_string: str) -> AsyncConnection: """ Create a connection of Postgres. :param postgres_connection_string: the postgres connection string :return: an AsyncConnection object """ if not postgres_connection_string.startswith("postgresql+asyncpg://"): logger.fatal("Invalid PostgreSQL connection string") sys.exit(1) try: postgres = create_async_engine(postgres_connection_string) await virtool.models.create_tables(postgres) async with postgres.connect() as connection: await check_version(connection) return connection except ConnectionRefusedError: logger.fatal("Could not connect to PostgreSQL: Connection refused") sys.exit(1) async def check_version(connection: AsyncConnection): """ Check and log the Postgres sever version. :param connection:an AsyncConnection object """ info = await connection.execute(text('SHOW server_version')) version = info.first()[0].split()[0] logger.info(f"Found PostgreSQL {version}")
Create tables on application start
Create tables on application start
Python
mit
virtool/virtool,virtool/virtool,igboyes/virtool,igboyes/virtool
import logging import sys from sqlalchemy import text from sqlalchemy.ext.asyncio import AsyncConnection, create_async_engine + import virtool.models logger = logging.getLogger(__name__) async def connect(postgres_connection_string: str) -> AsyncConnection: """ Create a connection of Postgres. :param postgres_connection_string: the postgres connection string :return: an AsyncConnection object """ if not postgres_connection_string.startswith("postgresql+asyncpg://"): logger.fatal("Invalid PostgreSQL connection string") sys.exit(1) try: postgres = create_async_engine(postgres_connection_string) + await virtool.models.create_tables(postgres) + async with postgres.connect() as connection: await check_version(connection) - return connection except ConnectionRefusedError: logger.fatal("Could not connect to PostgreSQL: Connection refused") sys.exit(1) async def check_version(connection: AsyncConnection): """ Check and log the Postgres sever version. :param connection:an AsyncConnection object """ info = await connection.execute(text('SHOW server_version')) version = info.first()[0].split()[0] logger.info(f"Found PostgreSQL {version}")
Create tables on application start
## Code Before: import logging import sys from sqlalchemy import text from sqlalchemy.ext.asyncio import AsyncConnection, create_async_engine logger = logging.getLogger(__name__) async def connect(postgres_connection_string: str) -> AsyncConnection: """ Create a connection of Postgres. :param postgres_connection_string: the postgres connection string :return: an AsyncConnection object """ if not postgres_connection_string.startswith("postgresql+asyncpg://"): logger.fatal("Invalid PostgreSQL connection string") sys.exit(1) try: postgres = create_async_engine(postgres_connection_string) async with postgres.connect() as connection: await check_version(connection) return connection except ConnectionRefusedError: logger.fatal("Could not connect to PostgreSQL: Connection refused") sys.exit(1) async def check_version(connection: AsyncConnection): """ Check and log the Postgres sever version. :param connection:an AsyncConnection object """ info = await connection.execute(text('SHOW server_version')) version = info.first()[0].split()[0] logger.info(f"Found PostgreSQL {version}") ## Instruction: Create tables on application start ## Code After: import logging import sys from sqlalchemy import text from sqlalchemy.ext.asyncio import AsyncConnection, create_async_engine import virtool.models logger = logging.getLogger(__name__) async def connect(postgres_connection_string: str) -> AsyncConnection: """ Create a connection of Postgres. :param postgres_connection_string: the postgres connection string :return: an AsyncConnection object """ if not postgres_connection_string.startswith("postgresql+asyncpg://"): logger.fatal("Invalid PostgreSQL connection string") sys.exit(1) try: postgres = create_async_engine(postgres_connection_string) await virtool.models.create_tables(postgres) async with postgres.connect() as connection: await check_version(connection) return connection except ConnectionRefusedError: logger.fatal("Could not connect to PostgreSQL: Connection refused") sys.exit(1) async def check_version(connection: AsyncConnection): """ Check and log the Postgres sever version. :param connection:an AsyncConnection object """ info = await connection.execute(text('SHOW server_version')) version = info.first()[0].split()[0] logger.info(f"Found PostgreSQL {version}")
import logging import sys from sqlalchemy import text from sqlalchemy.ext.asyncio import AsyncConnection, create_async_engine + import virtool.models logger = logging.getLogger(__name__) async def connect(postgres_connection_string: str) -> AsyncConnection: """ Create a connection of Postgres. :param postgres_connection_string: the postgres connection string :return: an AsyncConnection object """ if not postgres_connection_string.startswith("postgresql+asyncpg://"): logger.fatal("Invalid PostgreSQL connection string") sys.exit(1) try: postgres = create_async_engine(postgres_connection_string) + await virtool.models.create_tables(postgres) + async with postgres.connect() as connection: await check_version(connection) - return connection except ConnectionRefusedError: logger.fatal("Could not connect to PostgreSQL: Connection refused") sys.exit(1) async def check_version(connection: AsyncConnection): """ Check and log the Postgres sever version. :param connection:an AsyncConnection object """ info = await connection.execute(text('SHOW server_version')) version = info.first()[0].split()[0] logger.info(f"Found PostgreSQL {version}")
789ac1de1e94eda1224fb314ccad14c061c58ad4
pact/group.py
pact/group.py
from .base import PactBase from .utils import GroupWaitPredicate class PactGroup(PactBase): def __init__(self, pacts): self._pacts = list(pacts) super(PactGroup, self).__init__() def __iadd__(self, other): self._pacts.append(other) return self def _is_finished(self): return all(p.finished() for p in self._pacts) def _build_wait_predicate(self): return GroupWaitPredicate(self._pacts) def __str__(self): return ", ".join(map(str, self._pacts))
from .base import PactBase from .utils import GroupWaitPredicate class PactGroup(PactBase): def __init__(self, pacts=None): self._pacts = [] if pacts is None else list(pacts) super(PactGroup, self).__init__() def __iadd__(self, other): self._pacts.append(other) return self def _is_finished(self): return all(p.finished() for p in self._pacts) def _build_wait_predicate(self): return GroupWaitPredicate(self._pacts) def __str__(self): return ", ".join(map(str, self._pacts))
Create empty PactGroup if no arguments given
Create empty PactGroup if no arguments given
Python
bsd-3-clause
vmalloc/pact
from .base import PactBase from .utils import GroupWaitPredicate class PactGroup(PactBase): - def __init__(self, pacts): + def __init__(self, pacts=None): - self._pacts = list(pacts) + self._pacts = [] if pacts is None else list(pacts) super(PactGroup, self).__init__() def __iadd__(self, other): self._pacts.append(other) return self def _is_finished(self): return all(p.finished() for p in self._pacts) def _build_wait_predicate(self): return GroupWaitPredicate(self._pacts) def __str__(self): return ", ".join(map(str, self._pacts))
Create empty PactGroup if no arguments given
## Code Before: from .base import PactBase from .utils import GroupWaitPredicate class PactGroup(PactBase): def __init__(self, pacts): self._pacts = list(pacts) super(PactGroup, self).__init__() def __iadd__(self, other): self._pacts.append(other) return self def _is_finished(self): return all(p.finished() for p in self._pacts) def _build_wait_predicate(self): return GroupWaitPredicate(self._pacts) def __str__(self): return ", ".join(map(str, self._pacts)) ## Instruction: Create empty PactGroup if no arguments given ## Code After: from .base import PactBase from .utils import GroupWaitPredicate class PactGroup(PactBase): def __init__(self, pacts=None): self._pacts = [] if pacts is None else list(pacts) super(PactGroup, self).__init__() def __iadd__(self, other): self._pacts.append(other) return self def _is_finished(self): return all(p.finished() for p in self._pacts) def _build_wait_predicate(self): return GroupWaitPredicate(self._pacts) def __str__(self): return ", ".join(map(str, self._pacts))
from .base import PactBase from .utils import GroupWaitPredicate class PactGroup(PactBase): - def __init__(self, pacts): + def __init__(self, pacts=None): ? +++++ - self._pacts = list(pacts) + self._pacts = [] if pacts is None else list(pacts) super(PactGroup, self).__init__() def __iadd__(self, other): self._pacts.append(other) return self def _is_finished(self): return all(p.finished() for p in self._pacts) def _build_wait_predicate(self): return GroupWaitPredicate(self._pacts) def __str__(self): return ", ".join(map(str, self._pacts))
c970661c4525e0f3a9c77935ccfbef62742b18d4
csympy/__init__.py
csympy/__init__.py
from .lib.csympy_wrapper import (Symbol, Integer, sympify, SympifyError, Add, Mul, Pow, sin, cos, sqrt, function_symbol, I) from .utilities import var
from .lib.csympy_wrapper import (Symbol, Integer, sympify, SympifyError, Add, Mul, Pow, sin, cos, sqrt, function_symbol, I) from .utilities import var def test(): import pytest, os return not pytest.cmdline.main( [os.path.dirname(os.path.abspath(__file__))])
Add test function so tests can be run from within python terminal
Add test function so tests can be run from within python terminal import csympy csympy.test()
Python
mit
symengine/symengine.py,bjodah/symengine.py,bjodah/symengine.py,symengine/symengine.py,symengine/symengine.py,bjodah/symengine.py
from .lib.csympy_wrapper import (Symbol, Integer, sympify, SympifyError, Add, Mul, Pow, sin, cos, sqrt, function_symbol, I) from .utilities import var + def test(): + import pytest, os + return not pytest.cmdline.main( + [os.path.dirname(os.path.abspath(__file__))]) +
Add test function so tests can be run from within python terminal
## Code Before: from .lib.csympy_wrapper import (Symbol, Integer, sympify, SympifyError, Add, Mul, Pow, sin, cos, sqrt, function_symbol, I) from .utilities import var ## Instruction: Add test function so tests can be run from within python terminal ## Code After: from .lib.csympy_wrapper import (Symbol, Integer, sympify, SympifyError, Add, Mul, Pow, sin, cos, sqrt, function_symbol, I) from .utilities import var def test(): import pytest, os return not pytest.cmdline.main( [os.path.dirname(os.path.abspath(__file__))])
from .lib.csympy_wrapper import (Symbol, Integer, sympify, SympifyError, Add, Mul, Pow, sin, cos, sqrt, function_symbol, I) from .utilities import var + + def test(): + import pytest, os + return not pytest.cmdline.main( + [os.path.dirname(os.path.abspath(__file__))])
062a2e41e6e605dad4d8a8dc23abaa50f8348595
start_server.py
start_server.py
from server.tsuserver import TsuServer3 # Idiotproof setup def check_pyyaml(): try: import yaml except ImportError: print("Couldn't import PyYAML. Installing it for you...") import pip pip.main(["install", "--user", "pyyaml"]) def main(): server = TsuServer3() server.start() if __name__ == '__main__': check_pyyaml() main()
from server.tsuserver import TsuServer3 # Idiotproof setup def check_pyyaml(): try: import yaml except ModuleNotFoundError: print("Couldn't import PyYAML. Installing it for you...") import pip pip.main(["install", "--user", "pyyaml"]) def main(): server = TsuServer3() server.start() if __name__ == '__main__': check_pyyaml() main()
Use ModuleNotFoundError instead of ImportError
Use ModuleNotFoundError instead of ImportError
Python
agpl-3.0
Attorney-Online-Engineering-Task-Force/tsuserver3,Mariomagistr/tsuserver3
from server.tsuserver import TsuServer3 # Idiotproof setup def check_pyyaml(): try: import yaml - except ImportError: + except ModuleNotFoundError: print("Couldn't import PyYAML. Installing it for you...") import pip pip.main(["install", "--user", "pyyaml"]) def main(): server = TsuServer3() server.start() if __name__ == '__main__': check_pyyaml() main()
Use ModuleNotFoundError instead of ImportError
## Code Before: from server.tsuserver import TsuServer3 # Idiotproof setup def check_pyyaml(): try: import yaml except ImportError: print("Couldn't import PyYAML. Installing it for you...") import pip pip.main(["install", "--user", "pyyaml"]) def main(): server = TsuServer3() server.start() if __name__ == '__main__': check_pyyaml() main() ## Instruction: Use ModuleNotFoundError instead of ImportError ## Code After: from server.tsuserver import TsuServer3 # Idiotproof setup def check_pyyaml(): try: import yaml except ModuleNotFoundError: print("Couldn't import PyYAML. Installing it for you...") import pip pip.main(["install", "--user", "pyyaml"]) def main(): server = TsuServer3() server.start() if __name__ == '__main__': check_pyyaml() main()
from server.tsuserver import TsuServer3 # Idiotproof setup def check_pyyaml(): try: import yaml - except ImportError: + except ModuleNotFoundError: print("Couldn't import PyYAML. Installing it for you...") import pip pip.main(["install", "--user", "pyyaml"]) def main(): server = TsuServer3() server.start() if __name__ == '__main__': check_pyyaml() main()